diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3acbc370..05a89405 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ jobs: lint: timeout-minutes: 10 name: lint - runs-on: ${{ github.repository == 'stainless-sdks/gradientai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'stainless-sdks/gradient-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 @@ -36,7 +36,7 @@ jobs: run: ./scripts/lint build: - if: github.repository == 'stainless-sdks/gradientai-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) + if: github.repository == 'stainless-sdks/gradient-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) timeout-minutes: 10 name: build permissions: @@ -76,7 +76,7 @@ jobs: test: timeout-minutes: 10 name: test - runs-on: ${{ github.repository == 'stainless-sdks/gradientai-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'stainless-sdks/gradient-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} if: github.event_name == 'push' || github.event.pull_request.head.repo.fork steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 3dcd6c42..79ee5b7d 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -1,6 +1,6 @@ # This workflow is triggered when a GitHub release is created. # It can also be run manually to re-publish to PyPI in case it failed for some reason. -# You can run this workflow by navigating to https://www.github.com/digitalocean/gradientai-python/actions/workflows/publish-pypi.yml +# You can run this workflow by navigating to https://www.github.com/digitalocean/gradient-python/actions/workflows/publish-pypi.yml name: Publish PyPI on: workflow_dispatch: @@ -28,4 +28,4 @@ jobs: run: | bash ./bin/publish-pypi env: - PYPI_TOKEN: ${{ secrets.GRADIENT_AI_PYPI_TOKEN || secrets.PYPI_TOKEN }} + PYPI_TOKEN: ${{ secrets.GRADIENT_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml index d49e26c2..9c8912bc 100644 --- a/.github/workflows/release-doctor.yml +++ b/.github/workflows/release-doctor.yml @@ -9,7 +9,7 @@ jobs: release_doctor: name: release doctor runs-on: ubuntu-latest - if: github.repository == 'digitalocean/gradientai-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') + if: github.repository == 'digitalocean/gradient-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') steps: - uses: actions/checkout@v4 @@ -18,4 +18,4 @@ jobs: run: | bash ./bin/check-release-environment env: - PYPI_TOKEN: ${{ secrets.GRADIENT_AI_PYPI_TOKEN || secrets.PYPI_TOKEN }} + PYPI_TOKEN: ${{ secrets.GRADIENT_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.release-please-manifest.json b/.release-please-manifest.json index ce692f94..ae66b92e 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-beta.4" + ".": "3.0.0-beta.1" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 9a868058..8c069c9b 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 170 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradientai-015417b36365dfcb32166e67379c38de8bf5127c33dff646097a819a7b4dc588.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradient-015417b36365dfcb32166e67379c38de8bf5127c33dff646097a819a7b4dc588.yml openapi_spec_hash: d7d811c13cc79f15d82fe680cf425859 -config_hash: 3ad1734779befb065101197f2f35568c +config_hash: 77ddef130940a6ad8ea6c6f66aee8757 diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a4a3c4e..c1c37c72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## 3.0.0-beta.1 (2025-07-31) + +Full Changelog: [v0.1.0-beta.4...v3.0.0-beta.1](https://github.com/digitalocean/gradient-python/compare/v0.1.0-beta.4...v3.0.0-beta.1) + +### Features + +* **api:** remove GRADIENTAI env vars ([43d5c5a](https://github.com/digitalocean/gradient-python/commit/43d5c5a6f22e108e1727e6abae9199c1ba2481da)) +* **api:** update to package gradient ([9dcd1d6](https://github.com/digitalocean/gradient-python/commit/9dcd1d6c53d31e7da58a7828a0864fc7f633b22c)) +* **api:** update to package gradient ([3099c15](https://github.com/digitalocean/gradient-python/commit/3099c154ab5fc3fd104349ce9069cdd18485104d)) +* **client:** support file upload requests ([90a77c9](https://github.com/digitalocean/gradient-python/commit/90a77c93c1a0b4a565fbb78f37e69ed6709df223)) + + +### Chores + +* update SDK settings ([b7d59f7](https://github.com/digitalocean/gradient-python/commit/b7d59f71d0d511e2ec9bdbf5e548d5e5bf946832)) +* update SDK settings ([3b18c48](https://github.com/digitalocean/gradient-python/commit/3b18c48f0c5dbb3f70e73b9a2654d820c8f6a882)) +* update SDK settings ([df18f3a](https://github.com/digitalocean/gradient-python/commit/df18f3a44bdc859e78130aa229e7fd0bfc0af906)) +* update SDK settings ([33893b0](https://github.com/digitalocean/gradient-python/commit/33893b0a60acc7746e7a60b5066e332547210c38)) +* whitespace cleanup ([dd13d32](https://github.com/digitalocean/gradient-python/commit/dd13d321f46cf779fcb841c12068216875f551e0)) + ## 0.1.0-beta.4 (2025-07-29) Full Changelog: [v0.1.0-beta.3...v0.1.0-beta.4](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-beta.3...v0.1.0-beta.4) @@ -13,334 +33,334 @@ Full Changelog: [v0.1.0-beta.3...v0.1.0-beta.4](https://github.com/digitalocean/ ## 0.1.0-beta.3 (2025-07-25) -Full Changelog: [v0.1.0-beta.2...v0.1.0-beta.3](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-beta.2...v0.1.0-beta.3) +Full Changelog: [v0.1.0-beta.2...v0.1.0-beta.3](https://github.com/digitalocean/gradient-python/compare/v0.1.0-beta.2...v0.1.0-beta.3) ### Bug Fixes -* **parsing:** parse extra field types ([93bea71](https://github.com/digitalocean/gradientai-python/commit/93bea71735195fa3f32de6b64bbc0aaac60a6d6c)) +* **parsing:** parse extra field types ([93bea71](https://github.com/digitalocean/gradient-python/commit/93bea71735195fa3f32de6b64bbc0aaac60a6d6c)) ### Chores -* **project:** add settings file for vscode ([3b597aa](https://github.com/digitalocean/gradientai-python/commit/3b597aa96e1f588506de47d782444992383f5522)) -* update README with new gradient name ([03157fb](https://github.com/digitalocean/gradientai-python/commit/03157fb38616c68568024ab7e426b45d414bf432)) +* **project:** add settings file for vscode ([3b597aa](https://github.com/digitalocean/gradient-python/commit/3b597aa96e1f588506de47d782444992383f5522)) +* update README with new gradient name ([03157fb](https://github.com/digitalocean/gradient-python/commit/03157fb38616c68568024ab7e426b45d414bf432)) ## 0.1.0-beta.2 (2025-07-22) -Full Changelog: [v0.1.0-beta.1...v0.1.0-beta.2](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-beta.1...v0.1.0-beta.2) +Full Changelog: [v0.1.0-beta.1...v0.1.0-beta.2](https://github.com/digitalocean/gradient-python/compare/v0.1.0-beta.1...v0.1.0-beta.2) ### Bug Fixes -* **parsing:** ignore empty metadata ([cee9728](https://github.com/digitalocean/gradientai-python/commit/cee9728fd727cd600d2ac47ead9206ca937f7757)) +* **parsing:** ignore empty metadata ([cee9728](https://github.com/digitalocean/gradient-python/commit/cee9728fd727cd600d2ac47ead9206ca937f7757)) ### Chores -* **internal:** version bump ([e13ccb0](https://github.com/digitalocean/gradientai-python/commit/e13ccb069743fc6ebc56e0bb0463ff11864ad944)) -* **internal:** version bump ([00ee94d](https://github.com/digitalocean/gradientai-python/commit/00ee94d848ae5c5fc4604160c822e4757c4e6de8)) -* **types:** rebuild Pydantic models after all types are defined ([db7d61c](https://github.com/digitalocean/gradientai-python/commit/db7d61c02df9f86af9170d38539257e9cbf3eff9)) +* **internal:** version bump ([e13ccb0](https://github.com/digitalocean/gradient-python/commit/e13ccb069743fc6ebc56e0bb0463ff11864ad944)) +* **internal:** version bump ([00ee94d](https://github.com/digitalocean/gradient-python/commit/00ee94d848ae5c5fc4604160c822e4757c4e6de8)) +* **types:** rebuild Pydantic models after all types are defined ([db7d61c](https://github.com/digitalocean/gradient-python/commit/db7d61c02df9f86af9170d38539257e9cbf3eff9)) ## 0.1.0-beta.1 (2025-07-21) -Full Changelog: [v0.1.0-alpha.19...v0.1.0-beta.1](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.19...v0.1.0-beta.1) +Full Changelog: [v0.1.0-alpha.19...v0.1.0-beta.1](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.19...v0.1.0-beta.1) ### Features -* **api:** manual updates ([fda6270](https://github.com/digitalocean/gradientai-python/commit/fda62708a8f4d4fd66187edd54b39336b88a7e1c)) -* **api:** manual updates ([7548648](https://github.com/digitalocean/gradientai-python/commit/75486489df49297376fe0bcff70f1e527764b64d)) +* **api:** manual updates ([fda6270](https://github.com/digitalocean/gradient-python/commit/fda62708a8f4d4fd66187edd54b39336b88a7e1c)) +* **api:** manual updates ([7548648](https://github.com/digitalocean/gradient-python/commit/75486489df49297376fe0bcff70f1e527764b64d)) ### Chores -* **internal:** version bump ([be22c3d](https://github.com/digitalocean/gradientai-python/commit/be22c3d8c9835b45643d5e91db093108cb03f893)) -* **internal:** version bump ([2774d54](https://github.com/digitalocean/gradientai-python/commit/2774d540184f8ca7d401c77eaa69a52f62e8514b)) -* **internal:** version bump ([44abb37](https://github.com/digitalocean/gradientai-python/commit/44abb37d897dc77c1fda511b195cc9297fd324ac)) -* **internal:** version bump ([981ba17](https://github.com/digitalocean/gradientai-python/commit/981ba17925e46a9f87a141a481645711fbb6bb6e)) +* **internal:** version bump ([be22c3d](https://github.com/digitalocean/gradient-python/commit/be22c3d8c9835b45643d5e91db093108cb03f893)) +* **internal:** version bump ([2774d54](https://github.com/digitalocean/gradient-python/commit/2774d540184f8ca7d401c77eaa69a52f62e8514b)) +* **internal:** version bump ([44abb37](https://github.com/digitalocean/gradient-python/commit/44abb37d897dc77c1fda511b195cc9297fd324ac)) +* **internal:** version bump ([981ba17](https://github.com/digitalocean/gradient-python/commit/981ba17925e46a9f87a141a481645711fbb6bb6e)) ## 0.1.0-alpha.19 (2025-07-19) -Full Changelog: [v0.1.0-alpha.18...v0.1.0-alpha.19](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.18...v0.1.0-alpha.19) +Full Changelog: [v0.1.0-alpha.18...v0.1.0-alpha.19](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.18...v0.1.0-alpha.19) ### Features -* **api:** manual updates ([2c36a8b](https://github.com/digitalocean/gradientai-python/commit/2c36a8be83bb24025adf921c24acba3d666bf25d)) +* **api:** manual updates ([2c36a8b](https://github.com/digitalocean/gradient-python/commit/2c36a8be83bb24025adf921c24acba3d666bf25d)) ### Chores -* **internal:** version bump ([2864090](https://github.com/digitalocean/gradientai-python/commit/2864090c0af4858e4bee35aef2113e6983cfdca4)) +* **internal:** version bump ([2864090](https://github.com/digitalocean/gradient-python/commit/2864090c0af4858e4bee35aef2113e6983cfdca4)) ## 0.1.0-alpha.18 (2025-07-19) -Full Changelog: [v0.1.0-alpha.17...v0.1.0-alpha.18](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.17...v0.1.0-alpha.18) +Full Changelog: [v0.1.0-alpha.17...v0.1.0-alpha.18](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.17...v0.1.0-alpha.18) ### Features -* **api:** manual updates ([92d54ed](https://github.com/digitalocean/gradientai-python/commit/92d54edfff94931f10fb8dac822764edf6fca6bd)) -* **api:** manual updates ([688982c](https://github.com/digitalocean/gradientai-python/commit/688982c143e0ebca62f6ac39c1e074a2fd4083fc)) +* **api:** manual updates ([92d54ed](https://github.com/digitalocean/gradient-python/commit/92d54edfff94931f10fb8dac822764edf6fca6bd)) +* **api:** manual updates ([688982c](https://github.com/digitalocean/gradient-python/commit/688982c143e0ebca62f6ac39c1e074a2fd4083fc)) ### Chores -* **internal:** version bump ([ecb4bae](https://github.com/digitalocean/gradientai-python/commit/ecb4baedce933efc4ae99e0ef47100a02a68c9cd)) -* **internal:** version bump ([feb32ce](https://github.com/digitalocean/gradientai-python/commit/feb32ce78b107e9414be87e8c34d8c3274105cb4)) -* update pypi package name ([656dfe0](https://github.com/digitalocean/gradientai-python/commit/656dfe01d8e301dd1f93b3fa447434e6a5b41270)) +* **internal:** version bump ([ecb4bae](https://github.com/digitalocean/gradient-python/commit/ecb4baedce933efc4ae99e0ef47100a02a68c9cd)) +* **internal:** version bump ([feb32ce](https://github.com/digitalocean/gradient-python/commit/feb32ce78b107e9414be87e8c34d8c3274105cb4)) +* update pypi package name ([656dfe0](https://github.com/digitalocean/gradient-python/commit/656dfe01d8e301dd1f93b3fa447434e6a5b41270)) ## 0.1.0-alpha.17 (2025-07-19) -Full Changelog: [v0.1.0-alpha.16...v0.1.0-alpha.17](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.16...v0.1.0-alpha.17) +Full Changelog: [v0.1.0-alpha.16...v0.1.0-alpha.17](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.16...v0.1.0-alpha.17) ### Chores -* **internal:** version bump ([bc0b77b](https://github.com/digitalocean/gradientai-python/commit/bc0b77b663dc5837a2e341b70b1cda31224a6d9d)) -* **internal:** version bump ([503666f](https://github.com/digitalocean/gradientai-python/commit/503666fa61c23e584a22273371850f520100984a)) -* **internal:** version bump ([394991e](https://github.com/digitalocean/gradientai-python/commit/394991e1f436ac2fa3581a3e1bab02e8a95f94b9)) -* **internal:** version bump ([7ae18a1](https://github.com/digitalocean/gradientai-python/commit/7ae18a15cc889c8b0ffe5879824745e964cdd637)) +* **internal:** version bump ([bc0b77b](https://github.com/digitalocean/gradient-python/commit/bc0b77b663dc5837a2e341b70b1cda31224a6d9d)) +* **internal:** version bump ([503666f](https://github.com/digitalocean/gradient-python/commit/503666fa61c23e584a22273371850f520100984a)) +* **internal:** version bump ([394991e](https://github.com/digitalocean/gradient-python/commit/394991e1f436ac2fa3581a3e1bab02e8a95f94b9)) +* **internal:** version bump ([7ae18a1](https://github.com/digitalocean/gradient-python/commit/7ae18a15cc889c8b0ffe5879824745e964cdd637)) ## 0.1.0-alpha.16 (2025-07-18) -Full Changelog: [v0.1.0-alpha.15...v0.1.0-alpha.16](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.15...v0.1.0-alpha.16) +Full Changelog: [v0.1.0-alpha.15...v0.1.0-alpha.16](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.15...v0.1.0-alpha.16) ### Chores -* **internal:** version bump ([02f1f68](https://github.com/digitalocean/gradientai-python/commit/02f1f686505028155ee2a4cf670794117ce7981a)) +* **internal:** version bump ([02f1f68](https://github.com/digitalocean/gradient-python/commit/02f1f686505028155ee2a4cf670794117ce7981a)) ## 0.1.0-alpha.15 (2025-07-18) -Full Changelog: [v0.1.0-alpha.14...v0.1.0-alpha.15](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.14...v0.1.0-alpha.15) +Full Changelog: [v0.1.0-alpha.14...v0.1.0-alpha.15](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.14...v0.1.0-alpha.15) ### Features -* **api:** add gpu droplets ([b207e9a](https://github.com/digitalocean/gradientai-python/commit/b207e9a69ddf821522f5d9e9f10502850220585f)) -* **api:** add gpu droplets ([b9e317b](https://github.com/digitalocean/gradientai-python/commit/b9e317bac2c541a7eafcfb59a4b19c81e1145075)) +* **api:** add gpu droplets ([b207e9a](https://github.com/digitalocean/gradient-python/commit/b207e9a69ddf821522f5d9e9f10502850220585f)) +* **api:** add gpu droplets ([b9e317b](https://github.com/digitalocean/gradient-python/commit/b9e317bac2c541a7eafcfb59a4b19c81e1145075)) ### Chores -* format ([d940e66](https://github.com/digitalocean/gradientai-python/commit/d940e66107e00f351853c0bc667ca6ed3cf98605)) -* **internal:** version bump ([1a66126](https://github.com/digitalocean/gradientai-python/commit/1a661264f68580dff74c3f7d4891ab2661fde190)) -* **internal:** version bump ([9c546a1](https://github.com/digitalocean/gradientai-python/commit/9c546a1f97241bb448430e1e43f4e20589e243c1)) -* **internal:** version bump ([8814098](https://github.com/digitalocean/gradientai-python/commit/881409847161671b798baf2c89f37ae29e195f29)) -* **internal:** version bump ([bb3ad60](https://github.com/digitalocean/gradientai-python/commit/bb3ad60d02fe01b937eaced64682fd66d95a9aec)) -* **internal:** version bump ([2022024](https://github.com/digitalocean/gradientai-python/commit/20220246634accf95c4a53df200db5ace7107c55)) -* **internal:** version bump ([52e2c23](https://github.com/digitalocean/gradientai-python/commit/52e2c23c23d4dc27c176ebf4783c8fbd86a4c07b)) -* **internal:** version bump ([8ac0f2a](https://github.com/digitalocean/gradientai-python/commit/8ac0f2a6d4862907243ba78b132373289e2c3543)) -* **internal:** version bump ([d83fe97](https://github.com/digitalocean/gradientai-python/commit/d83fe97aa2f77c84c3c7f4bf40b9fb94c5c28aca)) -* **internal:** version bump ([9d20399](https://github.com/digitalocean/gradientai-python/commit/9d2039919e1d9c9e6d153edfb03bccff18b56686)) -* **internal:** version bump ([44a045a](https://github.com/digitalocean/gradientai-python/commit/44a045a9c0ce0f0769cce66bc7421a9d81cbc645)) -* **internal:** version bump ([95d1dd2](https://github.com/digitalocean/gradientai-python/commit/95d1dd24d290d7d5f23328e4c45c439dca5df748)) -* **internal:** version bump ([7416147](https://github.com/digitalocean/gradientai-python/commit/74161477f98e3a76b7227b07d942e1f26a4612b3)) -* **internal:** version bump ([06d7f19](https://github.com/digitalocean/gradientai-python/commit/06d7f19cd42a6bc578b39709fe6efed8741a24bc)) +* format ([d940e66](https://github.com/digitalocean/gradient-python/commit/d940e66107e00f351853c0bc667ca6ed3cf98605)) +* **internal:** version bump ([1a66126](https://github.com/digitalocean/gradient-python/commit/1a661264f68580dff74c3f7d4891ab2661fde190)) +* **internal:** version bump ([9c546a1](https://github.com/digitalocean/gradient-python/commit/9c546a1f97241bb448430e1e43f4e20589e243c1)) +* **internal:** version bump ([8814098](https://github.com/digitalocean/gradient-python/commit/881409847161671b798baf2c89f37ae29e195f29)) +* **internal:** version bump ([bb3ad60](https://github.com/digitalocean/gradient-python/commit/bb3ad60d02fe01b937eaced64682fd66d95a9aec)) +* **internal:** version bump ([2022024](https://github.com/digitalocean/gradient-python/commit/20220246634accf95c4a53df200db5ace7107c55)) +* **internal:** version bump ([52e2c23](https://github.com/digitalocean/gradient-python/commit/52e2c23c23d4dc27c176ebf4783c8fbd86a4c07b)) +* **internal:** version bump ([8ac0f2a](https://github.com/digitalocean/gradient-python/commit/8ac0f2a6d4862907243ba78b132373289e2c3543)) +* **internal:** version bump ([d83fe97](https://github.com/digitalocean/gradient-python/commit/d83fe97aa2f77c84c3c7f4bf40b9fb94c5c28aca)) +* **internal:** version bump ([9d20399](https://github.com/digitalocean/gradient-python/commit/9d2039919e1d9c9e6d153edfb03bccff18b56686)) +* **internal:** version bump ([44a045a](https://github.com/digitalocean/gradient-python/commit/44a045a9c0ce0f0769cce66bc7421a9d81cbc645)) +* **internal:** version bump ([95d1dd2](https://github.com/digitalocean/gradient-python/commit/95d1dd24d290d7d5f23328e4c45c439dca5df748)) +* **internal:** version bump ([7416147](https://github.com/digitalocean/gradient-python/commit/74161477f98e3a76b7227b07d942e1f26a4612b3)) +* **internal:** version bump ([06d7f19](https://github.com/digitalocean/gradient-python/commit/06d7f19cd42a6bc578b39709fe6efed8741a24bc)) ## 0.1.0-alpha.14 (2025-07-17) -Full Changelog: [v0.1.0-alpha.13...v0.1.0-alpha.14](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.13...v0.1.0-alpha.14) +Full Changelog: [v0.1.0-alpha.13...v0.1.0-alpha.14](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.13...v0.1.0-alpha.14) ### Features -* **api:** update via SDK Studio ([6cdcc6a](https://github.com/digitalocean/gradientai-python/commit/6cdcc6a36b9dde2117295ee7bcb9a3bc15571779)) -* **api:** update via SDK Studio ([5475a94](https://github.com/digitalocean/gradientai-python/commit/5475a9460676d1c48e99e0d1e75e50de7caecf3a)) -* dynamically build domain for agents.chat.completions.create() ([dee4ef0](https://github.com/digitalocean/gradientai-python/commit/dee4ef07ebb3367abc7f05c15271d43ab57e2081)) -* dynamically build domain for agents.chat.completions.create() ([3dbd194](https://github.com/digitalocean/gradientai-python/commit/3dbd194643e31907a78ab7e222e95e7508378ada)) +* **api:** update via SDK Studio ([6cdcc6a](https://github.com/digitalocean/gradient-python/commit/6cdcc6a36b9dde2117295ee7bcb9a3bc15571779)) +* **api:** update via SDK Studio ([5475a94](https://github.com/digitalocean/gradient-python/commit/5475a9460676d1c48e99e0d1e75e50de7caecf3a)) +* dynamically build domain for agents.chat.completions.create() ([dee4ef0](https://github.com/digitalocean/gradient-python/commit/dee4ef07ebb3367abc7f05c15271d43ab57e2081)) +* dynamically build domain for agents.chat.completions.create() ([3dbd194](https://github.com/digitalocean/gradient-python/commit/3dbd194643e31907a78ab7e222e95e7508378ada)) ### Bug Fixes -* add /api prefix for agent routes ([00c62b3](https://github.com/digitalocean/gradientai-python/commit/00c62b35f3a29ea8b6e7c96b2e755e6b5199ae55)) -* add /api prefix for agent routes ([72a59db](https://github.com/digitalocean/gradientai-python/commit/72a59db98ebeccdf0c4498f6cce37ffe1cb198dd)) -* fix validation for inference_key and agent_key auth ([d27046d](https://github.com/digitalocean/gradientai-python/commit/d27046d0c1e8214dd09ab5508e4fcb11fa549dfe)) +* add /api prefix for agent routes ([00c62b3](https://github.com/digitalocean/gradient-python/commit/00c62b35f3a29ea8b6e7c96b2e755e6b5199ae55)) +* add /api prefix for agent routes ([72a59db](https://github.com/digitalocean/gradient-python/commit/72a59db98ebeccdf0c4498f6cce37ffe1cb198dd)) +* fix validation for inference_key and agent_key auth ([d27046d](https://github.com/digitalocean/gradient-python/commit/d27046d0c1e8214dd09ab5508e4fcb11fa549dfe)) ### Chores -* **internal:** version bump ([f3629f1](https://github.com/digitalocean/gradientai-python/commit/f3629f169267f240aeb2c4d400606761a649dff7)) +* **internal:** version bump ([f3629f1](https://github.com/digitalocean/gradient-python/commit/f3629f169267f240aeb2c4d400606761a649dff7)) ## 0.1.0-alpha.13 (2025-07-15) -Full Changelog: [v0.1.0-alpha.12...v0.1.0-alpha.13](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.12...v0.1.0-alpha.13) +Full Changelog: [v0.1.0-alpha.12...v0.1.0-alpha.13](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.12...v0.1.0-alpha.13) ### Features -* **api:** manual updates ([bd6fecc](https://github.com/digitalocean/gradientai-python/commit/bd6feccf97fa5877085783419f11dad04c57d700)) -* **api:** manual updates ([c2b96ce](https://github.com/digitalocean/gradientai-python/commit/c2b96ce3d95cc9b74bffd8d6a499927eefd23b14)) -* **api:** share chat completion chunk model between chat and agent.chat ([d67371f](https://github.com/digitalocean/gradientai-python/commit/d67371f9f4d0761ea03097820bc3e77654b4d2bf)) -* clean up environment call outs ([64ee5b4](https://github.com/digitalocean/gradientai-python/commit/64ee5b449c0195288d0a1dc55d2725e8cdd6afcf)) +* **api:** manual updates ([bd6fecc](https://github.com/digitalocean/gradient-python/commit/bd6feccf97fa5877085783419f11dad04c57d700)) +* **api:** manual updates ([c2b96ce](https://github.com/digitalocean/gradient-python/commit/c2b96ce3d95cc9b74bffd8d6a499927eefd23b14)) +* **api:** share chat completion chunk model between chat and agent.chat ([d67371f](https://github.com/digitalocean/gradient-python/commit/d67371f9f4d0761ea03097820bc3e77654b4d2bf)) +* clean up environment call outs ([64ee5b4](https://github.com/digitalocean/gradient-python/commit/64ee5b449c0195288d0a1dc55d2725e8cdd6afcf)) ### Bug Fixes -* **client:** don't send Content-Type header on GET requests ([507a342](https://github.com/digitalocean/gradientai-python/commit/507a342fbcc7c801ba36708e56ea2d2a28a1a392)) -* **parsing:** correctly handle nested discriminated unions ([569e473](https://github.com/digitalocean/gradientai-python/commit/569e473d422928597ccf762133d5e52ac9a8665a)) +* **client:** don't send Content-Type header on GET requests ([507a342](https://github.com/digitalocean/gradient-python/commit/507a342fbcc7c801ba36708e56ea2d2a28a1a392)) +* **parsing:** correctly handle nested discriminated unions ([569e473](https://github.com/digitalocean/gradient-python/commit/569e473d422928597ccf762133d5e52ac9a8665a)) ### Chores -* **internal:** bump pinned h11 dep ([6f4e960](https://github.com/digitalocean/gradientai-python/commit/6f4e960b6cb838cbf5e50301375fcb4b60a2cfb3)) -* **internal:** codegen related update ([1df657d](https://github.com/digitalocean/gradientai-python/commit/1df657d9b384cb85d27fe839c0dab212a7773f8f)) -* **package:** mark python 3.13 as supported ([1a899b6](https://github.com/digitalocean/gradientai-python/commit/1a899b66a484986672a380e405f09b1ae94b6310)) -* **readme:** fix version rendering on pypi ([6fbe83b](https://github.com/digitalocean/gradientai-python/commit/6fbe83b11a9e3dbb40cf7f9f627abbbd086ee24a)) +* **internal:** bump pinned h11 dep ([6f4e960](https://github.com/digitalocean/gradient-python/commit/6f4e960b6cb838cbf5e50301375fcb4b60a2cfb3)) +* **internal:** codegen related update ([1df657d](https://github.com/digitalocean/gradient-python/commit/1df657d9b384cb85d27fe839c0dab212a7773f8f)) +* **package:** mark python 3.13 as supported ([1a899b6](https://github.com/digitalocean/gradient-python/commit/1a899b66a484986672a380e405f09b1ae94b6310)) +* **readme:** fix version rendering on pypi ([6fbe83b](https://github.com/digitalocean/gradient-python/commit/6fbe83b11a9e3dbb40cf7f9f627abbbd086ee24a)) ## 0.1.0-alpha.12 (2025-07-02) -Full Changelog: [v0.1.0-alpha.11...v0.1.0-alpha.12](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.11...v0.1.0-alpha.12) +Full Changelog: [v0.1.0-alpha.11...v0.1.0-alpha.12](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.11...v0.1.0-alpha.12) ### Bug Fixes -* **ci:** correct conditional ([646b4c6](https://github.com/digitalocean/gradientai-python/commit/646b4c62044c9bb5211c50e008ef30c777715acb)) +* **ci:** correct conditional ([646b4c6](https://github.com/digitalocean/gradient-python/commit/646b4c62044c9bb5211c50e008ef30c777715acb)) ### Chores -* **ci:** change upload type ([7449413](https://github.com/digitalocean/gradientai-python/commit/7449413efc16c58bc484f5f5793aa9cd36c3f405)) -* **internal:** codegen related update ([434929c](https://github.com/digitalocean/gradientai-python/commit/434929ce29b314182dec1542a3093c98ca0bb24a)) +* **ci:** change upload type ([7449413](https://github.com/digitalocean/gradient-python/commit/7449413efc16c58bc484f5f5793aa9cd36c3f405)) +* **internal:** codegen related update ([434929c](https://github.com/digitalocean/gradient-python/commit/434929ce29b314182dec1542a3093c98ca0bb24a)) ## 0.1.0-alpha.11 (2025-06-28) -Full Changelog: [v0.1.0-alpha.10...v0.1.0-alpha.11](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.10...v0.1.0-alpha.11) +Full Changelog: [v0.1.0-alpha.10...v0.1.0-alpha.11](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.10...v0.1.0-alpha.11) ### Features -* **api:** manual updates ([8d918dc](https://github.com/digitalocean/gradientai-python/commit/8d918dcc45f03d799b3aed4e94276086e2d7ea9b)) +* **api:** manual updates ([8d918dc](https://github.com/digitalocean/gradient-python/commit/8d918dcc45f03d799b3aed4e94276086e2d7ea9b)) ### Chores -* **ci:** only run for pushes and fork pull requests ([adfb5b5](https://github.com/digitalocean/gradientai-python/commit/adfb5b51149f667bf9a0b4b4c4c6418e91f843d8)) -* Move model providers ([8d918dc](https://github.com/digitalocean/gradientai-python/commit/8d918dcc45f03d799b3aed4e94276086e2d7ea9b)) +* **ci:** only run for pushes and fork pull requests ([adfb5b5](https://github.com/digitalocean/gradient-python/commit/adfb5b51149f667bf9a0b4b4c4c6418e91f843d8)) +* Move model providers ([8d918dc](https://github.com/digitalocean/gradient-python/commit/8d918dcc45f03d799b3aed4e94276086e2d7ea9b)) ## 0.1.0-alpha.10 (2025-06-28) -Full Changelog: [v0.1.0-alpha.9...v0.1.0-alpha.10](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.9...v0.1.0-alpha.10) +Full Changelog: [v0.1.0-alpha.9...v0.1.0-alpha.10](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.9...v0.1.0-alpha.10) ### Features -* **api:** manual updates ([0e5effc](https://github.com/digitalocean/gradientai-python/commit/0e5effc727cebe88ea38f0ec4c3fcb45ffeb4924)) -* **api:** manual updates ([d510ae0](https://github.com/digitalocean/gradientai-python/commit/d510ae03f13669af7f47093af06a00609e9b7c07)) -* **api:** manual updates ([c5bc3ca](https://github.com/digitalocean/gradientai-python/commit/c5bc3caa477945dc19bbf90661ffeea86370189d)) +* **api:** manual updates ([0e5effc](https://github.com/digitalocean/gradient-python/commit/0e5effc727cebe88ea38f0ec4c3fcb45ffeb4924)) +* **api:** manual updates ([d510ae0](https://github.com/digitalocean/gradient-python/commit/d510ae03f13669af7f47093af06a00609e9b7c07)) +* **api:** manual updates ([c5bc3ca](https://github.com/digitalocean/gradient-python/commit/c5bc3caa477945dc19bbf90661ffeea86370189d)) ## 0.1.0-alpha.9 (2025-06-28) -Full Changelog: [v0.1.0-alpha.8...v0.1.0-alpha.9](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.8...v0.1.0-alpha.9) +Full Changelog: [v0.1.0-alpha.8...v0.1.0-alpha.9](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.8...v0.1.0-alpha.9) ### Features -* **api:** manual updates ([e0c210a](https://github.com/digitalocean/gradientai-python/commit/e0c210a0ffde24bd2c5877689f8ab222288cc597)) +* **api:** manual updates ([e0c210a](https://github.com/digitalocean/gradient-python/commit/e0c210a0ffde24bd2c5877689f8ab222288cc597)) ## 0.1.0-alpha.8 (2025-06-27) -Full Changelog: [v0.1.0-alpha.7...v0.1.0-alpha.8](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.7...v0.1.0-alpha.8) +Full Changelog: [v0.1.0-alpha.7...v0.1.0-alpha.8](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.7...v0.1.0-alpha.8) ### Features -* **client:** setup streaming ([3fd6e57](https://github.com/digitalocean/gradientai-python/commit/3fd6e575f6f5952860e42d8c1fa22ccb0b10c623)) +* **client:** setup streaming ([3fd6e57](https://github.com/digitalocean/gradient-python/commit/3fd6e575f6f5952860e42d8c1fa22ccb0b10c623)) ## 0.1.0-alpha.7 (2025-06-27) -Full Changelog: [v0.1.0-alpha.6...v0.1.0-alpha.7](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.6...v0.1.0-alpha.7) +Full Changelog: [v0.1.0-alpha.6...v0.1.0-alpha.7](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.6...v0.1.0-alpha.7) ### Features -* **api:** manual updates ([63b9ec0](https://github.com/digitalocean/gradientai-python/commit/63b9ec02a646dad258afbd048db8db1af8d4401b)) -* **api:** manual updates ([5247aee](https://github.com/digitalocean/gradientai-python/commit/5247aee6d6052f6380fbe892d7c2bd9a8d0a32c0)) -* **api:** manual updates ([aa9e2c7](https://github.com/digitalocean/gradientai-python/commit/aa9e2c78956162f6195fdbaa1c95754ee4af207e)) -* **client:** add agent_domain option ([b4b6260](https://github.com/digitalocean/gradientai-python/commit/b4b62609a12a1dfa0b505e9ec54334b776fb0515)) +* **api:** manual updates ([63b9ec0](https://github.com/digitalocean/gradient-python/commit/63b9ec02a646dad258afbd048db8db1af8d4401b)) +* **api:** manual updates ([5247aee](https://github.com/digitalocean/gradient-python/commit/5247aee6d6052f6380fbe892d7c2bd9a8d0a32c0)) +* **api:** manual updates ([aa9e2c7](https://github.com/digitalocean/gradient-python/commit/aa9e2c78956162f6195fdbaa1c95754ee4af207e)) +* **client:** add agent_domain option ([b4b6260](https://github.com/digitalocean/gradient-python/commit/b4b62609a12a1dfa0b505e9ec54334b776fb0515)) ## 0.1.0-alpha.6 (2025-06-27) -Full Changelog: [v0.1.0-alpha.5...v0.1.0-alpha.6](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.5...v0.1.0-alpha.6) +Full Changelog: [v0.1.0-alpha.5...v0.1.0-alpha.6](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.5...v0.1.0-alpha.6) ### Features -* **api:** manual updates ([04eb1be](https://github.com/digitalocean/gradientai-python/commit/04eb1be35de7db04e1f0d4e1da8719b54a353bb5)) +* **api:** manual updates ([04eb1be](https://github.com/digitalocean/gradient-python/commit/04eb1be35de7db04e1f0d4e1da8719b54a353bb5)) ## 0.1.0-alpha.5 (2025-06-27) -Full Changelog: [v0.1.0-alpha.4...v0.1.0-alpha.5](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.4...v0.1.0-alpha.5) +Full Changelog: [v0.1.0-alpha.4...v0.1.0-alpha.5](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.4...v0.1.0-alpha.5) ### Features -* **api:** define api links and meta as shared models ([8d87001](https://github.com/digitalocean/gradientai-python/commit/8d87001b51de17dd1a36419c0e926cef119f20b8)) -* **api:** update OpenAI spec and add endpoint/smodels ([e92c54b](https://github.com/digitalocean/gradientai-python/commit/e92c54b05f1025b6173945524724143fdafc7728)) -* **api:** update via SDK Studio ([1ae76f7](https://github.com/digitalocean/gradientai-python/commit/1ae76f78ce9e74f8fd555e3497299127e9aa6889)) -* **api:** update via SDK Studio ([98424f4](https://github.com/digitalocean/gradientai-python/commit/98424f4a2c7e00138fb5eecf94ca72e2ffcc1212)) -* **api:** update via SDK Studio ([299fd1b](https://github.com/digitalocean/gradientai-python/commit/299fd1b29b42f6f2581150e52dcf65fc73270862)) -* **api:** update via SDK Studio ([9a45427](https://github.com/digitalocean/gradientai-python/commit/9a45427678644c34afe9792a2561f394718e64ff)) -* **api:** update via SDK Studio ([abe573f](https://github.com/digitalocean/gradientai-python/commit/abe573fcc2233c7d71f0a925eea8fa9dd4d0fb91)) -* **api:** update via SDK Studio ([e5ce590](https://github.com/digitalocean/gradientai-python/commit/e5ce59057792968892317215078ac2c11e811812)) -* **api:** update via SDK Studio ([1daa3f5](https://github.com/digitalocean/gradientai-python/commit/1daa3f55a49b5411d1b378fce30aea3ccbccb6d7)) -* **api:** update via SDK Studio ([1c702b3](https://github.com/digitalocean/gradientai-python/commit/1c702b340e4fd723393c0f02df2a87d03ca8c9bb)) -* **api:** update via SDK Studio ([891d6b3](https://github.com/digitalocean/gradientai-python/commit/891d6b32e5bdb07d23abf898cec17a60ee64f99d)) -* **api:** update via SDK Studio ([dcbe442](https://github.com/digitalocean/gradientai-python/commit/dcbe442efc67554e60b3b28360a4d9f7dcbb313a)) -* use inference key for chat.completions.create() ([5d38e2e](https://github.com/digitalocean/gradientai-python/commit/5d38e2eb8604a0a4065d146ba71aa4a5a0e93d85)) +* **api:** define api links and meta as shared models ([8d87001](https://github.com/digitalocean/gradient-python/commit/8d87001b51de17dd1a36419c0e926cef119f20b8)) +* **api:** update OpenAI spec and add endpoint/smodels ([e92c54b](https://github.com/digitalocean/gradient-python/commit/e92c54b05f1025b6173945524724143fdafc7728)) +* **api:** update via SDK Studio ([1ae76f7](https://github.com/digitalocean/gradient-python/commit/1ae76f78ce9e74f8fd555e3497299127e9aa6889)) +* **api:** update via SDK Studio ([98424f4](https://github.com/digitalocean/gradient-python/commit/98424f4a2c7e00138fb5eecf94ca72e2ffcc1212)) +* **api:** update via SDK Studio ([299fd1b](https://github.com/digitalocean/gradient-python/commit/299fd1b29b42f6f2581150e52dcf65fc73270862)) +* **api:** update via SDK Studio ([9a45427](https://github.com/digitalocean/gradient-python/commit/9a45427678644c34afe9792a2561f394718e64ff)) +* **api:** update via SDK Studio ([abe573f](https://github.com/digitalocean/gradient-python/commit/abe573fcc2233c7d71f0a925eea8fa9dd4d0fb91)) +* **api:** update via SDK Studio ([e5ce590](https://github.com/digitalocean/gradient-python/commit/e5ce59057792968892317215078ac2c11e811812)) +* **api:** update via SDK Studio ([1daa3f5](https://github.com/digitalocean/gradient-python/commit/1daa3f55a49b5411d1b378fce30aea3ccbccb6d7)) +* **api:** update via SDK Studio ([1c702b3](https://github.com/digitalocean/gradient-python/commit/1c702b340e4fd723393c0f02df2a87d03ca8c9bb)) +* **api:** update via SDK Studio ([891d6b3](https://github.com/digitalocean/gradient-python/commit/891d6b32e5bdb07d23abf898cec17a60ee64f99d)) +* **api:** update via SDK Studio ([dcbe442](https://github.com/digitalocean/gradient-python/commit/dcbe442efc67554e60b3b28360a4d9f7dcbb313a)) +* use inference key for chat.completions.create() ([5d38e2e](https://github.com/digitalocean/gradient-python/commit/5d38e2eb8604a0a4065d146ba71aa4a5a0e93d85)) ### Bug Fixes -* **ci:** release-doctor — report correct token name ([4d2b3dc](https://github.com/digitalocean/gradientai-python/commit/4d2b3dcefdefc3830d631c5ac27b58778a299983)) +* **ci:** release-doctor — report correct token name ([4d2b3dc](https://github.com/digitalocean/gradient-python/commit/4d2b3dcefdefc3830d631c5ac27b58778a299983)) ### Chores -* clean up pyproject ([78637e9](https://github.com/digitalocean/gradientai-python/commit/78637e99816d459c27b4f2fd2f6d79c8d32ecfbe)) -* **internal:** codegen related update ([58d7319](https://github.com/digitalocean/gradientai-python/commit/58d7319ce68c639c2151a3e96a5d522ec06ff96f)) +* clean up pyproject ([78637e9](https://github.com/digitalocean/gradient-python/commit/78637e99816d459c27b4f2fd2f6d79c8d32ecfbe)) +* **internal:** codegen related update ([58d7319](https://github.com/digitalocean/gradient-python/commit/58d7319ce68c639c2151a3e96a5d522ec06ff96f)) ## 0.1.0-alpha.4 (2025-06-25) -Full Changelog: [v0.1.0-alpha.3...v0.1.0-alpha.4](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.3...v0.1.0-alpha.4) +Full Changelog: [v0.1.0-alpha.3...v0.1.0-alpha.4](https://github.com/digitalocean/gradient-python/compare/v0.1.0-alpha.3...v0.1.0-alpha.4) ### Features -* **api:** update via SDK Studio ([d1ea884](https://github.com/digitalocean/gradientai-python/commit/d1ea884c9be72b3f8804c5ba91bf4f77a3284a6c)) -* **api:** update via SDK Studio ([584f9f1](https://github.com/digitalocean/gradientai-python/commit/584f9f1304b3612eb25f1438041d287592463438)) -* **api:** update via SDK Studio ([7aee6e5](https://github.com/digitalocean/gradientai-python/commit/7aee6e55a0574fc1b6ab73a1777c92e4f3a940ea)) -* **api:** update via SDK Studio ([4212f62](https://github.com/digitalocean/gradientai-python/commit/4212f62b19c44bcb12c02fe396e8c51dd89d3868)) -* **api:** update via SDK Studio ([b16cceb](https://github.com/digitalocean/gradientai-python/commit/b16cceb63edb4253084036b693834bde5da10943)) -* **api:** update via SDK Studio ([34382c0](https://github.com/digitalocean/gradientai-python/commit/34382c06c5d61ac97572cb4977d020e1ede9d4ff)) -* **api:** update via SDK Studio ([c33920a](https://github.com/digitalocean/gradientai-python/commit/c33920aba0dc1f3b8f4f890ce706c86fd452dd6b)) -* **api:** update via SDK Studio ([359c8d8](https://github.com/digitalocean/gradientai-python/commit/359c8d88cec1d60f0beb810b5a0139443d0a3348)) -* **api:** update via SDK Studio ([f27643e](https://github.com/digitalocean/gradientai-python/commit/f27643e1e00f606029be919a7117801facc6e5b7)) -* **api:** update via SDK Studio ([e59144c](https://github.com/digitalocean/gradientai-python/commit/e59144c2d474a4003fd28b8eded08814ffa8d2f3)) -* **api:** update via SDK Studio ([97e1768](https://github.com/digitalocean/gradientai-python/commit/97e17687a348b8ef218c23a06729b6edb1ac5ea9)) -* **api:** update via SDK Studio ([eac41f1](https://github.com/digitalocean/gradientai-python/commit/eac41f12912b8d32ffa23d225f4ca56fa5c72505)) -* **api:** update via SDK Studio ([1fa7ebb](https://github.com/digitalocean/gradientai-python/commit/1fa7ebb0080db9087b82d29e7197e44dfbb1ebed)) -* **api:** update via SDK Studio ([aa2610a](https://github.com/digitalocean/gradientai-python/commit/aa2610afe7da79429e05bff64b4796de7f525681)) -* **api:** update via SDK Studio ([e5c8d76](https://github.com/digitalocean/gradientai-python/commit/e5c8d768388b16c06fcc2abee71a53dcc8b3e8c5)) -* **api:** update via SDK Studio ([5f700dc](https://github.com/digitalocean/gradientai-python/commit/5f700dc7a4e757015d3bd6f2e82a311114b82d77)) -* **api:** update via SDK Studio ([c042496](https://github.com/digitalocean/gradientai-python/commit/c04249614917198b1eb2324438605d99b719a1cf)) -* **api:** update via SDK Studio ([5ebec81](https://github.com/digitalocean/gradientai-python/commit/5ebec81604a206eba5e75a7e8990bd7711ba8f47)) -* **api:** update via SDK Studio ([cac54a8](https://github.com/digitalocean/gradientai-python/commit/cac54a81a3f22d34b2de0ebfac3c68a982178cad)) -* **api:** update via SDK Studio ([6d62ab0](https://github.com/digitalocean/gradientai-python/commit/6d62ab00594d70df0458a0a401f866af15a9298e)) -* **api:** update via SDK Studio ([0ccc62c](https://github.com/digitalocean/gradientai-python/commit/0ccc62cb8ef387e0aaf6784db25d5f99a587e5da)) -* **api:** update via SDK Studio ([e75adfb](https://github.com/digitalocean/gradientai-python/commit/e75adfbd2d035e57ae110a1d78ea40fb116975e5)) -* **api:** update via SDK Studio ([8bd264b](https://github.com/digitalocean/gradientai-python/commit/8bd264b4b4686ca078bf4eb4b5462f058406df3e)) -* **api:** update via SDK Studio ([6254ccf](https://github.com/digitalocean/gradientai-python/commit/6254ccf45cbe50ca8191c7149824964f5d00d82f)) -* **api:** update via SDK Studio ([8f5761b](https://github.com/digitalocean/gradientai-python/commit/8f5761b1d18fb48ad7488e6f0ad771c077eb7961)) -* **api:** update via SDK Studio ([f853616](https://github.com/digitalocean/gradientai-python/commit/f8536166320d1d5bacf1d10a5edb2f71691dde8b)) -* **client:** add support for aiohttp ([494afde](https://github.com/digitalocean/gradientai-python/commit/494afde754f735d1ba95011fc83d23d2410fcfdd)) +* **api:** update via SDK Studio ([d1ea884](https://github.com/digitalocean/gradient-python/commit/d1ea884c9be72b3f8804c5ba91bf4f77a3284a6c)) +* **api:** update via SDK Studio ([584f9f1](https://github.com/digitalocean/gradient-python/commit/584f9f1304b3612eb25f1438041d287592463438)) +* **api:** update via SDK Studio ([7aee6e5](https://github.com/digitalocean/gradient-python/commit/7aee6e55a0574fc1b6ab73a1777c92e4f3a940ea)) +* **api:** update via SDK Studio ([4212f62](https://github.com/digitalocean/gradient-python/commit/4212f62b19c44bcb12c02fe396e8c51dd89d3868)) +* **api:** update via SDK Studio ([b16cceb](https://github.com/digitalocean/gradient-python/commit/b16cceb63edb4253084036b693834bde5da10943)) +* **api:** update via SDK Studio ([34382c0](https://github.com/digitalocean/gradient-python/commit/34382c06c5d61ac97572cb4977d020e1ede9d4ff)) +* **api:** update via SDK Studio ([c33920a](https://github.com/digitalocean/gradient-python/commit/c33920aba0dc1f3b8f4f890ce706c86fd452dd6b)) +* **api:** update via SDK Studio ([359c8d8](https://github.com/digitalocean/gradient-python/commit/359c8d88cec1d60f0beb810b5a0139443d0a3348)) +* **api:** update via SDK Studio ([f27643e](https://github.com/digitalocean/gradient-python/commit/f27643e1e00f606029be919a7117801facc6e5b7)) +* **api:** update via SDK Studio ([e59144c](https://github.com/digitalocean/gradient-python/commit/e59144c2d474a4003fd28b8eded08814ffa8d2f3)) +* **api:** update via SDK Studio ([97e1768](https://github.com/digitalocean/gradient-python/commit/97e17687a348b8ef218c23a06729b6edb1ac5ea9)) +* **api:** update via SDK Studio ([eac41f1](https://github.com/digitalocean/gradient-python/commit/eac41f12912b8d32ffa23d225f4ca56fa5c72505)) +* **api:** update via SDK Studio ([1fa7ebb](https://github.com/digitalocean/gradient-python/commit/1fa7ebb0080db9087b82d29e7197e44dfbb1ebed)) +* **api:** update via SDK Studio ([aa2610a](https://github.com/digitalocean/gradient-python/commit/aa2610afe7da79429e05bff64b4796de7f525681)) +* **api:** update via SDK Studio ([e5c8d76](https://github.com/digitalocean/gradient-python/commit/e5c8d768388b16c06fcc2abee71a53dcc8b3e8c5)) +* **api:** update via SDK Studio ([5f700dc](https://github.com/digitalocean/gradient-python/commit/5f700dc7a4e757015d3bd6f2e82a311114b82d77)) +* **api:** update via SDK Studio ([c042496](https://github.com/digitalocean/gradient-python/commit/c04249614917198b1eb2324438605d99b719a1cf)) +* **api:** update via SDK Studio ([5ebec81](https://github.com/digitalocean/gradient-python/commit/5ebec81604a206eba5e75a7e8990bd7711ba8f47)) +* **api:** update via SDK Studio ([cac54a8](https://github.com/digitalocean/gradient-python/commit/cac54a81a3f22d34b2de0ebfac3c68a982178cad)) +* **api:** update via SDK Studio ([6d62ab0](https://github.com/digitalocean/gradient-python/commit/6d62ab00594d70df0458a0a401f866af15a9298e)) +* **api:** update via SDK Studio ([0ccc62c](https://github.com/digitalocean/gradient-python/commit/0ccc62cb8ef387e0aaf6784db25d5f99a587e5da)) +* **api:** update via SDK Studio ([e75adfb](https://github.com/digitalocean/gradient-python/commit/e75adfbd2d035e57ae110a1d78ea40fb116975e5)) +* **api:** update via SDK Studio ([8bd264b](https://github.com/digitalocean/gradient-python/commit/8bd264b4b4686ca078bf4eb4b5462f058406df3e)) +* **api:** update via SDK Studio ([6254ccf](https://github.com/digitalocean/gradient-python/commit/6254ccf45cbe50ca8191c7149824964f5d00d82f)) +* **api:** update via SDK Studio ([8f5761b](https://github.com/digitalocean/gradient-python/commit/8f5761b1d18fb48ad7488e6f0ad771c077eb7961)) +* **api:** update via SDK Studio ([f853616](https://github.com/digitalocean/gradient-python/commit/f8536166320d1d5bacf1d10a5edb2f71691dde8b)) +* **client:** add support for aiohttp ([494afde](https://github.com/digitalocean/gradient-python/commit/494afde754f735d1ba95011fc83d23d2410fcfdd)) ### Bug Fixes -* **client:** correctly parse binary response | stream ([abba5be](https://github.com/digitalocean/gradientai-python/commit/abba5be958d03a7e5ce7d1cbf8069c0bcf52ee20)) -* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([e649dcb](https://github.com/digitalocean/gradientai-python/commit/e649dcb0f9416e9bf568cc9f3480d7e222052391)) +* **client:** correctly parse binary response | stream ([abba5be](https://github.com/digitalocean/gradient-python/commit/abba5be958d03a7e5ce7d1cbf8069c0bcf52ee20)) +* **tests:** fix: tests which call HTTP endpoints directly with the example parameters ([e649dcb](https://github.com/digitalocean/gradient-python/commit/e649dcb0f9416e9bf568cc9f3480d7e222052391)) ### Chores -* **ci:** enable for pull requests ([b6b3f9e](https://github.com/digitalocean/gradientai-python/commit/b6b3f9ea85918cfc6fc7304b2d21c340d82a0083)) -* **internal:** codegen related update ([4126872](https://github.com/digitalocean/gradientai-python/commit/41268721eafd33fcca5688ca5dff7401f25bdeb2)) -* **internal:** codegen related update ([10b79fb](https://github.com/digitalocean/gradientai-python/commit/10b79fb1d51bcff6ed0d18e5ccd18fd1cd75af9f)) -* **internal:** update conftest.py ([12e2103](https://github.com/digitalocean/gradientai-python/commit/12e210389204ff74f504e1ec3aa5ba99f1b4971c)) -* **readme:** update badges ([6e40dc3](https://github.com/digitalocean/gradientai-python/commit/6e40dc3fa4e33082be7b0bbf65d07e9ae9ac6370)) -* **tests:** add tests for httpx client instantiation & proxies ([7ecf66c](https://github.com/digitalocean/gradientai-python/commit/7ecf66c58a124c153a32055967beacbd1a3bbcf3)) -* **tests:** run tests in parallel ([861dd6b](https://github.com/digitalocean/gradientai-python/commit/861dd6b75956f2c12814ad32b05624d8d8537d52)) -* **tests:** skip some failing tests on the latest python versions ([75b4539](https://github.com/digitalocean/gradientai-python/commit/75b45398c18e75be3389be20479f54521c2e474a)) -* update SDK settings ([ed595b0](https://github.com/digitalocean/gradientai-python/commit/ed595b0a23df125ffba733d7339e771997c3f149)) +* **ci:** enable for pull requests ([b6b3f9e](https://github.com/digitalocean/gradient-python/commit/b6b3f9ea85918cfc6fc7304b2d21c340d82a0083)) +* **internal:** codegen related update ([4126872](https://github.com/digitalocean/gradient-python/commit/41268721eafd33fcca5688ca5dff7401f25bdeb2)) +* **internal:** codegen related update ([10b79fb](https://github.com/digitalocean/gradient-python/commit/10b79fb1d51bcff6ed0d18e5ccd18fd1cd75af9f)) +* **internal:** update conftest.py ([12e2103](https://github.com/digitalocean/gradient-python/commit/12e210389204ff74f504e1ec3aa5ba99f1b4971c)) +* **readme:** update badges ([6e40dc3](https://github.com/digitalocean/gradient-python/commit/6e40dc3fa4e33082be7b0bbf65d07e9ae9ac6370)) +* **tests:** add tests for httpx client instantiation & proxies ([7ecf66c](https://github.com/digitalocean/gradient-python/commit/7ecf66c58a124c153a32055967beacbd1a3bbcf3)) +* **tests:** run tests in parallel ([861dd6b](https://github.com/digitalocean/gradient-python/commit/861dd6b75956f2c12814ad32b05624d8d8537d52)) +* **tests:** skip some failing tests on the latest python versions ([75b4539](https://github.com/digitalocean/gradient-python/commit/75b45398c18e75be3389be20479f54521c2e474a)) +* update SDK settings ([ed595b0](https://github.com/digitalocean/gradient-python/commit/ed595b0a23df125ffba733d7339e771997c3f149)) ### Documentation -* **client:** fix httpx.Timeout documentation reference ([5d452d7](https://github.com/digitalocean/gradientai-python/commit/5d452d7245af6c80f47f8395f1c03493dfb53a52)) +* **client:** fix httpx.Timeout documentation reference ([5d452d7](https://github.com/digitalocean/gradient-python/commit/5d452d7245af6c80f47f8395f1c03493dfb53a52)) ## 0.1.0-alpha.3 (2025-06-12) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4f59c83a..212c4e40 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -36,7 +36,7 @@ $ pip install -r requirements-dev.lock Most of the SDK is generated code. Modifications to code will be persisted between generations, but may result in merge conflicts between manual patches and changes from the generator. The generator will never -modify the contents of the `src/do_gradientai/lib/` and `examples/` directories. +modify the contents of the `src/gradient/lib/` and `examples/` directories. ## Adding and running examples @@ -62,7 +62,7 @@ If you’d like to use the repository from source, you can either install from g To install via git: ```sh -$ pip install git+ssh://git@github.com/digitalocean/gradientai-python.git +$ pip install git+ssh://git@github.com/digitalocean/gradient-python.git ``` Alternatively, you can build from source and install the wheel file: @@ -120,7 +120,7 @@ the changes aren't made through the automated pipeline, you may want to make rel ### Publish with a GitHub workflow -You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/digitalocean/gradientai-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up. +You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/digitalocean/gradient-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up. ### Publish manually diff --git a/LICENSE b/LICENSE index 974cb08a..656d8887 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2025 Gradient AI + Copyright 2025 Gradient Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index 18767b69..ca6a00e8 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ > Use with care in production environments and keep an eye on releases for updates or breaking changes. -[![PyPI version](https://img.shields.io/pypi/v/do_gradientai.svg?label=pypi%20(stable))](https://pypi.org/project/do_gradientai/) +[![PyPI version](https://img.shields.io/pypi/v/gradient.svg?label=pypi%20(stable))](https://pypi.org/project/gradient/) [![Docs](https://img.shields.io/badge/Docs-8A2BE2)](https://gradientai.digitalocean.com/getting-started/overview/) The Gradient Python library provides convenient access to the Gradient REST API from any Python 3.8+ @@ -25,7 +25,7 @@ The full API of this library can be found in [api.md](api.md). ```sh # install from PyPI -pip install --pre do_gradientai +pip install --pre gradient ``` ## Usage @@ -39,18 +39,18 @@ The full API of this library can be found in [api.md](api.md). ```python import os -from do_gradientai import GradientAI +from gradient import Gradient -api_client = GradientAI( - api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted +client = Gradient( + api_key=os.environ.get("GRADIENT_API_KEY"), # This is the default and can be omitted ) -inference_client = GradientAI( +inference_client = Gradient( inference_key=os.environ.get( - "GRADIENTAI_INFERENCE_KEY" + "GRADIENT_INFERENCE_KEY" ), # This is the default and can be omitted ) -agent_client = GradientAI( - agent_key=os.environ.get("GRADIENTAI_AGENT_KEY"), # This is the default and can be omitted +agent_client = Gradient( + agent_key=os.environ.get("GRADIENT_AGENT_KEY"), # This is the default and can be omitted agent_endpoint="https://my-agent.agents.do-ai.run", ) @@ -92,20 +92,20 @@ print(agent_response.choices[0].message.content) While you can provide an `api_key`, `inference_key` keyword argument, we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) -to add `GRADIENTAI_API_KEY="My API Key"`, `GRADIENTAI_INFERENCE_KEY="My INFERENCE Key"` to your `.env` file +to add `GRADIENT_API_KEY="My API Key"`, `GRADIENT_INFERENCE_KEY="My INFERENCE Key"` to your `.env` file so that your keys are not stored in source control. ## Async usage -Simply import `AsyncGradientAI` instead of `GradientAI` and use `await` with each API call: +Simply import `AsyncGradient` instead of `Gradient` and use `await` with each API call: ```python import os import asyncio -from do_gradientai import AsyncGradientAI +from gradient import AsyncGradient -client = AsyncGradientAI( - api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted +client = AsyncGradient( + api_key=os.environ.get("GRADIENT_API_KEY"), # This is the default and can be omitted ) @@ -135,19 +135,19 @@ You can enable this by installing `aiohttp`: ```sh # install from PyPI -pip install --pre do_gradientai[aiohttp] +pip install --pre gradient[aiohttp] ``` Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`: ```python import asyncio -from do_gradientai import DefaultAioHttpClient -from do_gradientai import AsyncGradientAI +from gradient import DefaultAioHttpClient +from gradient import AsyncGradient async def main() -> None: - async with AsyncGradientAI( + async with AsyncGradient( api_key="My API Key", http_client=DefaultAioHttpClient(), ) as client: @@ -171,9 +171,9 @@ asyncio.run(main()) We provide support for streaming responses using Server Side Events (SSE). ```python -from do_gradientai import GradientAI +from gradient import Gradient -client = GradientAI() +client = Gradient() stream = client.chat.completions.create( messages=[ @@ -192,9 +192,9 @@ for completion in stream: The async client uses the exact same interface. ```python -from do_gradientai import AsyncGradientAI +from gradient import AsyncGradient -client = AsyncGradientAI() +client = AsyncGradient() stream = await client.chat.completions.create( messages=[ @@ -224,9 +224,9 @@ Typed requests and responses provide autocomplete and documentation within your Nested parameters are dictionaries, typed using `TypedDict`, for example: ```python -from do_gradientai import GradientAI +from gradient import Gradient -client = GradientAI() +client = Gradient() completion = client.chat.completions.create( messages=[ @@ -243,18 +243,18 @@ print(completion.stream_options) ## Handling errors -When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `do_gradientai.APIConnectionError` is raised. +When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `gradient.APIConnectionError` is raised. When the API returns a non-success status code (that is, 4xx or 5xx -response), a subclass of `do_gradientai.APIStatusError` is raised, containing `status_code` and `response` properties. +response), a subclass of `gradient.APIStatusError` is raised, containing `status_code` and `response` properties. -All errors inherit from `do_gradientai.APIError`. +All errors inherit from `gradient.APIError`. ```python -import do_gradientai -from do_gradientai import GradientAI +import gradient +from gradient import Gradient -client = GradientAI() +client = Gradient() try: client.chat.completions.create( @@ -266,12 +266,12 @@ try: ], model="llama3.3-70b-instruct", ) -except do_gradientai.APIConnectionError as e: +except gradient.APIConnectionError as e: print("The server could not be reached") print(e.__cause__) # an underlying Exception, likely raised within httpx. -except do_gradientai.RateLimitError as e: +except gradient.RateLimitError as e: print("A 429 status code was received; we should back off a bit.") -except do_gradientai.APIStatusError as e: +except gradient.APIStatusError as e: print("Another non-200-range status code was received") print(e.status_code) print(e.response) @@ -299,10 +299,10 @@ Connection errors (for example, due to a network connectivity problem), 408 Requ You can use the `max_retries` option to configure or disable retry settings: ```python -from do_gradientai import GradientAI +from gradient import Gradient # Configure the default for all requests: -client = GradientAI( +client = Gradient( # default is 2 max_retries=0, ) @@ -325,16 +325,16 @@ By default requests time out after 1 minute. You can configure this with a `time which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object: ```python -from do_gradientai import GradientAI +from gradient import Gradient # Configure the default for all requests: -client = GradientAI( +client = Gradient( # 20 seconds (default is 1 minute) timeout=20.0, ) # More granular control: -client = GradientAI( +client = Gradient( timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), ) @@ -360,10 +360,10 @@ Note that requests that time out are [retried twice by default](#retries). We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. -You can enable logging by setting the environment variable `GRADIENT_AI_LOG` to `info`. +You can enable logging by setting the environment variable `GRADIENT_LOG` to `info`. ```shell -$ export GRADIENT_AI_LOG=info +$ export GRADIENT_LOG=info ``` Or to `debug` for more verbose logging. @@ -385,9 +385,9 @@ if response.my_field is None: The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., ```py -from do_gradientai import GradientAI +from gradient import Gradient -client = GradientAI() +client = Gradient() response = client.chat.completions.with_raw_response.create( messages=[{ "role": "user", @@ -401,9 +401,9 @@ completion = response.parse() # get the object that `chat.completions.create()` print(completion.choices) ``` -These methods return an [`APIResponse`](https://github.com/digitalocean/gradientai-python/tree/main/src/do_gradientai/_response.py) object. +These methods return an [`APIResponse`](https://github.com/digitalocean/gradient-python/tree/main/src/gradient/_response.py) object. -The async client returns an [`AsyncAPIResponse`](https://github.com/digitalocean/gradientai-python/tree/main/src/do_gradientai/_response.py) with the same structure, the only difference being `await`able methods for reading the response content. +The async client returns an [`AsyncAPIResponse`](https://github.com/digitalocean/gradient-python/tree/main/src/gradient/_response.py) with the same structure, the only difference being `await`able methods for reading the response content. #### `.with_streaming_response` @@ -473,10 +473,10 @@ You can directly override the [httpx client](https://www.python-httpx.org/api/#c ```python import httpx -from do_gradientai import GradientAI, DefaultHttpxClient +from gradient import Gradient, DefaultHttpxClient -client = GradientAI( - # Or use the `GRADIENT_AI_BASE_URL` env var +client = Gradient( + # Or use the `GRADIENT_BASE_URL` env var base_url="http://my.test.server.example.com:8083", http_client=DefaultHttpxClient( proxy="http://my.test.proxy.example.com", @@ -496,9 +496,9 @@ client.with_options(http_client=DefaultHttpxClient(...)) By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. ```py -from do_gradientai import GradientAI +from gradient import Gradient -with GradientAI() as client: +with Gradient() as client: # make requests here ... @@ -515,7 +515,7 @@ This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) con We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. -We are keen for your feedback; please open an [issue](https://www.github.com/digitalocean/gradientai-python/issues) with questions, bugs, or suggestions. +We are keen for your feedback; please open an [issue](https://www.github.com/digitalocean/gradient-python/issues) with questions, bugs, or suggestions. ### Determining the installed version @@ -524,8 +524,8 @@ If you've upgraded to the latest version but aren't seeing any new features you You can determine the version that is being used at runtime with: ```py -import do_gradientai -print(do_gradientai.__version__) +import gradient +print(gradient.__version__) ``` ## Requirements diff --git a/SECURITY.md b/SECURITY.md index a7593759..fe1c055c 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -16,7 +16,7 @@ before making any information public. ## Reporting Non-SDK Related Security Issues If you encounter security issues that are not directly related to SDKs but pertain to the services -or products provided by Gradient AI, please follow the respective company's security reporting guidelines. +or products provided by Gradient, please follow the respective company's security reporting guidelines. --- diff --git a/api.md b/api.md index dc52233d..1091e4dc 100644 --- a/api.md +++ b/api.md @@ -1,7 +1,7 @@ # Shared Types ```python -from do_gradientai.types import ( +from gradient.types import ( Action, ActionLink, APILinks, @@ -37,7 +37,7 @@ from do_gradientai.types import ( Types: ```python -from do_gradientai.types import ( +from gradient.types import ( APIAgent, APIAgentAPIKeyInfo, APIAgentModel, @@ -57,19 +57,19 @@ from do_gradientai.types import ( Methods: -- client.agents.create(\*\*params) -> AgentCreateResponse -- client.agents.retrieve(uuid) -> AgentRetrieveResponse -- client.agents.update(path_uuid, \*\*params) -> AgentUpdateResponse -- client.agents.list(\*\*params) -> AgentListResponse -- client.agents.delete(uuid) -> AgentDeleteResponse -- client.agents.update_status(path_uuid, \*\*params) -> AgentUpdateStatusResponse +- client.agents.create(\*\*params) -> AgentCreateResponse +- client.agents.retrieve(uuid) -> AgentRetrieveResponse +- client.agents.update(path_uuid, \*\*params) -> AgentUpdateResponse +- client.agents.list(\*\*params) -> AgentListResponse +- client.agents.delete(uuid) -> AgentDeleteResponse +- client.agents.update_status(path_uuid, \*\*params) -> AgentUpdateStatusResponse ## APIKeys Types: ```python -from do_gradientai.types.agents import ( +from gradient.types.agents import ( APIKeyCreateResponse, APIKeyUpdateResponse, APIKeyListResponse, @@ -80,11 +80,11 @@ from do_gradientai.types.agents import ( Methods: -- client.agents.api_keys.create(path_agent_uuid, \*\*params) -> APIKeyCreateResponse -- client.agents.api_keys.update(path_api_key_uuid, \*, path_agent_uuid, \*\*params) -> APIKeyUpdateResponse -- client.agents.api_keys.list(agent_uuid, \*\*params) -> APIKeyListResponse -- client.agents.api_keys.delete(api_key_uuid, \*, agent_uuid) -> APIKeyDeleteResponse -- client.agents.api_keys.regenerate(api_key_uuid, \*, agent_uuid) -> APIKeyRegenerateResponse +- client.agents.api_keys.create(path_agent_uuid, \*\*params) -> APIKeyCreateResponse +- client.agents.api_keys.update(path_api_key_uuid, \*, path_agent_uuid, \*\*params) -> APIKeyUpdateResponse +- client.agents.api_keys.list(agent_uuid, \*\*params) -> APIKeyListResponse +- client.agents.api_keys.delete(api_key_uuid, \*, agent_uuid) -> APIKeyDeleteResponse +- client.agents.api_keys.regenerate(api_key_uuid, \*, agent_uuid) -> APIKeyRegenerateResponse ## Chat @@ -93,35 +93,32 @@ Methods: Types: ```python -from do_gradientai.types.agents.chat import CompletionCreateResponse +from gradient.types.agents.chat import CompletionCreateResponse ``` Methods: -- client.agents.chat.completions.create(\*\*params) -> CompletionCreateResponse +- client.agents.chat.completions.create(\*\*params) -> CompletionCreateResponse ## EvaluationMetrics Types: ```python -from do_gradientai.types.agents import ( - EvaluationMetricListResponse, - EvaluationMetricListRegionsResponse, -) +from gradient.types.agents import EvaluationMetricListResponse, EvaluationMetricListRegionsResponse ``` Methods: -- client.agents.evaluation_metrics.list() -> EvaluationMetricListResponse -- client.agents.evaluation_metrics.list_regions(\*\*params) -> EvaluationMetricListRegionsResponse +- client.agents.evaluation_metrics.list() -> EvaluationMetricListResponse +- client.agents.evaluation_metrics.list_regions(\*\*params) -> EvaluationMetricListRegionsResponse ### Workspaces Types: ```python -from do_gradientai.types.agents.evaluation_metrics import ( +from gradient.types.agents.evaluation_metrics import ( WorkspaceCreateResponse, WorkspaceRetrieveResponse, WorkspaceUpdateResponse, @@ -133,40 +130,37 @@ from do_gradientai.types.agents.evaluation_metrics import ( Methods: -- client.agents.evaluation_metrics.workspaces.create(\*\*params) -> WorkspaceCreateResponse -- client.agents.evaluation_metrics.workspaces.retrieve(workspace_uuid) -> WorkspaceRetrieveResponse -- client.agents.evaluation_metrics.workspaces.update(path_workspace_uuid, \*\*params) -> WorkspaceUpdateResponse -- client.agents.evaluation_metrics.workspaces.list() -> WorkspaceListResponse -- client.agents.evaluation_metrics.workspaces.delete(workspace_uuid) -> WorkspaceDeleteResponse -- client.agents.evaluation_metrics.workspaces.list_evaluation_test_cases(workspace_uuid) -> WorkspaceListEvaluationTestCasesResponse +- client.agents.evaluation_metrics.workspaces.create(\*\*params) -> WorkspaceCreateResponse +- client.agents.evaluation_metrics.workspaces.retrieve(workspace_uuid) -> WorkspaceRetrieveResponse +- client.agents.evaluation_metrics.workspaces.update(path_workspace_uuid, \*\*params) -> WorkspaceUpdateResponse +- client.agents.evaluation_metrics.workspaces.list() -> WorkspaceListResponse +- client.agents.evaluation_metrics.workspaces.delete(workspace_uuid) -> WorkspaceDeleteResponse +- client.agents.evaluation_metrics.workspaces.list_evaluation_test_cases(workspace_uuid) -> WorkspaceListEvaluationTestCasesResponse #### Agents Types: ```python -from do_gradientai.types.agents.evaluation_metrics.workspaces import ( - AgentListResponse, - AgentMoveResponse, -) +from gradient.types.agents.evaluation_metrics.workspaces import AgentListResponse, AgentMoveResponse ``` Methods: -- client.agents.evaluation_metrics.workspaces.agents.list(workspace_uuid, \*\*params) -> AgentListResponse -- client.agents.evaluation_metrics.workspaces.agents.move(path_workspace_uuid, \*\*params) -> AgentMoveResponse +- client.agents.evaluation_metrics.workspaces.agents.list(workspace_uuid, \*\*params) -> AgentListResponse +- client.agents.evaluation_metrics.workspaces.agents.move(path_workspace_uuid, \*\*params) -> AgentMoveResponse ### Models Types: ```python -from do_gradientai.types.agents.evaluation_metrics import ModelListResponse +from gradient.types.agents.evaluation_metrics import ModelListResponse ``` Methods: -- client.agents.evaluation_metrics.models.list(\*\*params) -> ModelListResponse +- client.agents.evaluation_metrics.models.list(\*\*params) -> ModelListResponse ### Anthropic @@ -175,7 +169,7 @@ Methods: Types: ```python -from do_gradientai.types.agents.evaluation_metrics.anthropic import ( +from gradient.types.agents.evaluation_metrics.anthropic import ( KeyCreateResponse, KeyRetrieveResponse, KeyUpdateResponse, @@ -187,12 +181,12 @@ from do_gradientai.types.agents.evaluation_metrics.anthropic import ( Methods: -- client.agents.evaluation_metrics.anthropic.keys.create(\*\*params) -> KeyCreateResponse -- client.agents.evaluation_metrics.anthropic.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse -- client.agents.evaluation_metrics.anthropic.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse -- client.agents.evaluation_metrics.anthropic.keys.list(\*\*params) -> KeyListResponse -- client.agents.evaluation_metrics.anthropic.keys.delete(api_key_uuid) -> KeyDeleteResponse -- client.agents.evaluation_metrics.anthropic.keys.list_agents(uuid, \*\*params) -> KeyListAgentsResponse +- client.agents.evaluation_metrics.anthropic.keys.create(\*\*params) -> KeyCreateResponse +- client.agents.evaluation_metrics.anthropic.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse +- client.agents.evaluation_metrics.anthropic.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse +- client.agents.evaluation_metrics.anthropic.keys.list(\*\*params) -> KeyListResponse +- client.agents.evaluation_metrics.anthropic.keys.delete(api_key_uuid) -> KeyDeleteResponse +- client.agents.evaluation_metrics.anthropic.keys.list_agents(uuid, \*\*params) -> KeyListAgentsResponse ### OpenAI @@ -201,7 +195,7 @@ Methods: Types: ```python -from do_gradientai.types.agents.evaluation_metrics.openai import ( +from gradient.types.agents.evaluation_metrics.openai import ( KeyCreateResponse, KeyRetrieveResponse, KeyUpdateResponse, @@ -213,19 +207,19 @@ from do_gradientai.types.agents.evaluation_metrics.openai import ( Methods: -- client.agents.evaluation_metrics.openai.keys.create(\*\*params) -> KeyCreateResponse -- client.agents.evaluation_metrics.openai.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse -- client.agents.evaluation_metrics.openai.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse -- client.agents.evaluation_metrics.openai.keys.list(\*\*params) -> KeyListResponse -- client.agents.evaluation_metrics.openai.keys.delete(api_key_uuid) -> KeyDeleteResponse -- client.agents.evaluation_metrics.openai.keys.list_agents(uuid, \*\*params) -> KeyListAgentsResponse +- client.agents.evaluation_metrics.openai.keys.create(\*\*params) -> KeyCreateResponse +- client.agents.evaluation_metrics.openai.keys.retrieve(api_key_uuid) -> KeyRetrieveResponse +- client.agents.evaluation_metrics.openai.keys.update(path_api_key_uuid, \*\*params) -> KeyUpdateResponse +- client.agents.evaluation_metrics.openai.keys.list(\*\*params) -> KeyListResponse +- client.agents.evaluation_metrics.openai.keys.delete(api_key_uuid) -> KeyDeleteResponse +- client.agents.evaluation_metrics.openai.keys.list_agents(uuid, \*\*params) -> KeyListAgentsResponse ## EvaluationRuns Types: ```python -from do_gradientai.types.agents import ( +from gradient.types.agents import ( APIEvaluationMetric, APIEvaluationMetricResult, APIEvaluationPrompt, @@ -239,17 +233,17 @@ from do_gradientai.types.agents import ( Methods: -- client.agents.evaluation_runs.create(\*\*params) -> EvaluationRunCreateResponse -- client.agents.evaluation_runs.retrieve(evaluation_run_uuid) -> EvaluationRunRetrieveResponse -- client.agents.evaluation_runs.list_results(evaluation_run_uuid, \*\*params) -> EvaluationRunListResultsResponse -- client.agents.evaluation_runs.retrieve_results(prompt_id, \*, evaluation_run_uuid) -> EvaluationRunRetrieveResultsResponse +- client.agents.evaluation_runs.create(\*\*params) -> EvaluationRunCreateResponse +- client.agents.evaluation_runs.retrieve(evaluation_run_uuid) -> EvaluationRunRetrieveResponse +- client.agents.evaluation_runs.list_results(evaluation_run_uuid, \*\*params) -> EvaluationRunListResultsResponse +- client.agents.evaluation_runs.retrieve_results(prompt_id, \*, evaluation_run_uuid) -> EvaluationRunRetrieveResultsResponse ## EvaluationTestCases Types: ```python -from do_gradientai.types.agents import ( +from gradient.types.agents import ( APIEvaluationTestCase, APIStarMetric, EvaluationTestCaseCreateResponse, @@ -262,18 +256,18 @@ from do_gradientai.types.agents import ( Methods: -- client.agents.evaluation_test_cases.create(\*\*params) -> EvaluationTestCaseCreateResponse -- client.agents.evaluation_test_cases.retrieve(test_case_uuid, \*\*params) -> EvaluationTestCaseRetrieveResponse -- client.agents.evaluation_test_cases.update(path_test_case_uuid, \*\*params) -> EvaluationTestCaseUpdateResponse -- client.agents.evaluation_test_cases.list() -> EvaluationTestCaseListResponse -- client.agents.evaluation_test_cases.list_evaluation_runs(evaluation_test_case_uuid, \*\*params) -> EvaluationTestCaseListEvaluationRunsResponse +- client.agents.evaluation_test_cases.create(\*\*params) -> EvaluationTestCaseCreateResponse +- client.agents.evaluation_test_cases.retrieve(test_case_uuid, \*\*params) -> EvaluationTestCaseRetrieveResponse +- client.agents.evaluation_test_cases.update(path_test_case_uuid, \*\*params) -> EvaluationTestCaseUpdateResponse +- client.agents.evaluation_test_cases.list() -> EvaluationTestCaseListResponse +- client.agents.evaluation_test_cases.list_evaluation_runs(evaluation_test_case_uuid, \*\*params) -> EvaluationTestCaseListEvaluationRunsResponse ## EvaluationDatasets Types: ```python -from do_gradientai.types.agents import ( +from gradient.types.agents import ( EvaluationDatasetCreateResponse, EvaluationDatasetCreateFileUploadPresignedURLsResponse, ) @@ -281,15 +275,15 @@ from do_gradientai.types.agents import ( Methods: -- client.agents.evaluation_datasets.create(\*\*params) -> EvaluationDatasetCreateResponse -- client.agents.evaluation_datasets.create_file_upload_presigned_urls(\*\*params) -> EvaluationDatasetCreateFileUploadPresignedURLsResponse +- client.agents.evaluation_datasets.create(\*\*params) -> EvaluationDatasetCreateResponse +- client.agents.evaluation_datasets.create_file_upload_presigned_urls(\*\*params) -> EvaluationDatasetCreateFileUploadPresignedURLsResponse ## Functions Types: ```python -from do_gradientai.types.agents import ( +from gradient.types.agents import ( FunctionCreateResponse, FunctionUpdateResponse, FunctionDeleteResponse, @@ -298,43 +292,43 @@ from do_gradientai.types.agents import ( Methods: -- client.agents.functions.create(path_agent_uuid, \*\*params) -> FunctionCreateResponse -- client.agents.functions.update(path_function_uuid, \*, path_agent_uuid, \*\*params) -> FunctionUpdateResponse -- client.agents.functions.delete(function_uuid, \*, agent_uuid) -> FunctionDeleteResponse +- client.agents.functions.create(path_agent_uuid, \*\*params) -> FunctionCreateResponse +- client.agents.functions.update(path_function_uuid, \*, path_agent_uuid, \*\*params) -> FunctionUpdateResponse +- client.agents.functions.delete(function_uuid, \*, agent_uuid) -> FunctionDeleteResponse ## Versions Types: ```python -from do_gradientai.types.agents import VersionUpdateResponse, VersionListResponse +from gradient.types.agents import VersionUpdateResponse, VersionListResponse ``` Methods: -- client.agents.versions.update(path_uuid, \*\*params) -> VersionUpdateResponse -- client.agents.versions.list(uuid, \*\*params) -> VersionListResponse +- client.agents.versions.update(path_uuid, \*\*params) -> VersionUpdateResponse +- client.agents.versions.list(uuid, \*\*params) -> VersionListResponse ## KnowledgeBases Types: ```python -from do_gradientai.types.agents import APILinkKnowledgeBaseOutput, KnowledgeBaseDetachResponse +from gradient.types.agents import APILinkKnowledgeBaseOutput, KnowledgeBaseDetachResponse ``` Methods: -- client.agents.knowledge_bases.attach(agent_uuid) -> APILinkKnowledgeBaseOutput -- client.agents.knowledge_bases.attach_single(knowledge_base_uuid, \*, agent_uuid) -> APILinkKnowledgeBaseOutput -- client.agents.knowledge_bases.detach(knowledge_base_uuid, \*, agent_uuid) -> KnowledgeBaseDetachResponse +- client.agents.knowledge_bases.attach(agent_uuid) -> APILinkKnowledgeBaseOutput +- client.agents.knowledge_bases.attach_single(knowledge_base_uuid, \*, agent_uuid) -> APILinkKnowledgeBaseOutput +- client.agents.knowledge_bases.detach(knowledge_base_uuid, \*, agent_uuid) -> KnowledgeBaseDetachResponse ## Routes Types: ```python -from do_gradientai.types.agents import ( +from gradient.types.agents import ( RouteUpdateResponse, RouteDeleteResponse, RouteAddResponse, @@ -344,10 +338,10 @@ from do_gradientai.types.agents import ( Methods: -- client.agents.routes.update(path_child_agent_uuid, \*, path_parent_agent_uuid, \*\*params) -> RouteUpdateResponse -- client.agents.routes.delete(child_agent_uuid, \*, parent_agent_uuid) -> RouteDeleteResponse -- client.agents.routes.add(path_child_agent_uuid, \*, path_parent_agent_uuid, \*\*params) -> RouteAddResponse -- client.agents.routes.view(uuid) -> RouteViewResponse +- client.agents.routes.update(path_child_agent_uuid, \*, path_parent_agent_uuid, \*\*params) -> RouteUpdateResponse +- client.agents.routes.delete(child_agent_uuid, \*, parent_agent_uuid) -> RouteDeleteResponse +- client.agents.routes.add(path_child_agent_uuid, \*, path_parent_agent_uuid, \*\*params) -> RouteAddResponse +- client.agents.routes.view(uuid) -> RouteViewResponse # Chat @@ -356,19 +350,19 @@ Methods: Types: ```python -from do_gradientai.types.chat import CompletionCreateResponse +from gradient.types.chat import CompletionCreateResponse ``` Methods: -- client.chat.completions.create(\*\*params) -> CompletionCreateResponse +- client.chat.completions.create(\*\*params) -> CompletionCreateResponse # GPUDroplets Types: ```python -from do_gradientai.types import ( +from gradient.types import ( DropletBackupPolicy, GPUDropletCreateResponse, GPUDropletRetrieveResponse, @@ -382,22 +376,22 @@ from do_gradientai.types import ( Methods: -- client.gpu_droplets.create(\*\*params) -> GPUDropletCreateResponse -- client.gpu_droplets.retrieve(droplet_id) -> GPUDropletRetrieveResponse -- client.gpu_droplets.list(\*\*params) -> GPUDropletListResponse -- client.gpu_droplets.delete(droplet_id) -> None -- client.gpu_droplets.delete_by_tag(\*\*params) -> None -- client.gpu_droplets.list_firewalls(droplet_id, \*\*params) -> GPUDropletListFirewallsResponse -- client.gpu_droplets.list_kernels(droplet_id, \*\*params) -> GPUDropletListKernelsResponse -- client.gpu_droplets.list_neighbors(droplet_id) -> GPUDropletListNeighborsResponse -- client.gpu_droplets.list_snapshots(droplet_id, \*\*params) -> GPUDropletListSnapshotsResponse +- client.gpu_droplets.create(\*\*params) -> GPUDropletCreateResponse +- client.gpu_droplets.retrieve(droplet_id) -> GPUDropletRetrieveResponse +- client.gpu_droplets.list(\*\*params) -> GPUDropletListResponse +- client.gpu_droplets.delete(droplet_id) -> None +- client.gpu_droplets.delete_by_tag(\*\*params) -> None +- client.gpu_droplets.list_firewalls(droplet_id, \*\*params) -> GPUDropletListFirewallsResponse +- client.gpu_droplets.list_kernels(droplet_id, \*\*params) -> GPUDropletListKernelsResponse +- client.gpu_droplets.list_neighbors(droplet_id) -> GPUDropletListNeighborsResponse +- client.gpu_droplets.list_snapshots(droplet_id, \*\*params) -> GPUDropletListSnapshotsResponse ## Backups Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( BackupListResponse, BackupListPoliciesResponse, BackupListSupportedPoliciesResponse, @@ -407,17 +401,17 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.backups.list(droplet_id, \*\*params) -> BackupListResponse -- client.gpu_droplets.backups.list_policies(\*\*params) -> BackupListPoliciesResponse -- client.gpu_droplets.backups.list_supported_policies() -> BackupListSupportedPoliciesResponse -- client.gpu_droplets.backups.retrieve_policy(droplet_id) -> BackupRetrievePolicyResponse +- client.gpu_droplets.backups.list(droplet_id, \*\*params) -> BackupListResponse +- client.gpu_droplets.backups.list_policies(\*\*params) -> BackupListPoliciesResponse +- client.gpu_droplets.backups.list_supported_policies() -> BackupListSupportedPoliciesResponse +- client.gpu_droplets.backups.retrieve_policy(droplet_id) -> BackupRetrievePolicyResponse ## Actions Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( ActionRetrieveResponse, ActionListResponse, ActionBulkInitiateResponse, @@ -427,17 +421,17 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.actions.retrieve(action_id, \*, droplet_id) -> ActionRetrieveResponse -- client.gpu_droplets.actions.list(droplet_id, \*\*params) -> ActionListResponse -- client.gpu_droplets.actions.bulk_initiate(\*\*params) -> ActionBulkInitiateResponse -- client.gpu_droplets.actions.initiate(droplet_id, \*\*params) -> ActionInitiateResponse +- client.gpu_droplets.actions.retrieve(action_id, \*, droplet_id) -> ActionRetrieveResponse +- client.gpu_droplets.actions.list(droplet_id, \*\*params) -> ActionListResponse +- client.gpu_droplets.actions.bulk_initiate(\*\*params) -> ActionBulkInitiateResponse +- client.gpu_droplets.actions.initiate(droplet_id, \*\*params) -> ActionInitiateResponse ## DestroyWithAssociatedResources Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( AssociatedResource, DestroyedAssociatedResource, DestroyWithAssociatedResourceListResponse, @@ -447,18 +441,18 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.destroy_with_associated_resources.list(droplet_id) -> DestroyWithAssociatedResourceListResponse -- client.gpu_droplets.destroy_with_associated_resources.check_status(droplet_id) -> DestroyWithAssociatedResourceCheckStatusResponse -- client.gpu_droplets.destroy_with_associated_resources.delete_dangerous(droplet_id) -> None -- client.gpu_droplets.destroy_with_associated_resources.delete_selective(droplet_id, \*\*params) -> None -- client.gpu_droplets.destroy_with_associated_resources.retry(droplet_id) -> None +- client.gpu_droplets.destroy_with_associated_resources.list(droplet_id) -> DestroyWithAssociatedResourceListResponse +- client.gpu_droplets.destroy_with_associated_resources.check_status(droplet_id) -> DestroyWithAssociatedResourceCheckStatusResponse +- client.gpu_droplets.destroy_with_associated_resources.delete_dangerous(droplet_id) -> None +- client.gpu_droplets.destroy_with_associated_resources.delete_selective(droplet_id, \*\*params) -> None +- client.gpu_droplets.destroy_with_associated_resources.retry(droplet_id) -> None ## Autoscale Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( AutoscalePool, AutoscalePoolDropletTemplate, AutoscalePoolDynamicConfig, @@ -475,21 +469,21 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.autoscale.create(\*\*params) -> AutoscaleCreateResponse -- client.gpu_droplets.autoscale.retrieve(autoscale_pool_id) -> AutoscaleRetrieveResponse -- client.gpu_droplets.autoscale.update(autoscale_pool_id, \*\*params) -> AutoscaleUpdateResponse -- client.gpu_droplets.autoscale.list(\*\*params) -> AutoscaleListResponse -- client.gpu_droplets.autoscale.delete(autoscale_pool_id) -> None -- client.gpu_droplets.autoscale.delete_dangerous(autoscale_pool_id) -> None -- client.gpu_droplets.autoscale.list_history(autoscale_pool_id, \*\*params) -> AutoscaleListHistoryResponse -- client.gpu_droplets.autoscale.list_members(autoscale_pool_id, \*\*params) -> AutoscaleListMembersResponse +- client.gpu_droplets.autoscale.create(\*\*params) -> AutoscaleCreateResponse +- client.gpu_droplets.autoscale.retrieve(autoscale_pool_id) -> AutoscaleRetrieveResponse +- client.gpu_droplets.autoscale.update(autoscale_pool_id, \*\*params) -> AutoscaleUpdateResponse +- client.gpu_droplets.autoscale.list(\*\*params) -> AutoscaleListResponse +- client.gpu_droplets.autoscale.delete(autoscale_pool_id) -> None +- client.gpu_droplets.autoscale.delete_dangerous(autoscale_pool_id) -> None +- client.gpu_droplets.autoscale.list_history(autoscale_pool_id, \*\*params) -> AutoscaleListHistoryResponse +- client.gpu_droplets.autoscale.list_members(autoscale_pool_id, \*\*params) -> AutoscaleListMembersResponse ## Firewalls Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( Firewall, FirewallCreateResponse, FirewallRetrieveResponse, @@ -500,39 +494,39 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.firewalls.create(\*\*params) -> FirewallCreateResponse -- client.gpu_droplets.firewalls.retrieve(firewall_id) -> FirewallRetrieveResponse -- client.gpu_droplets.firewalls.update(firewall_id, \*\*params) -> FirewallUpdateResponse -- client.gpu_droplets.firewalls.list(\*\*params) -> FirewallListResponse -- client.gpu_droplets.firewalls.delete(firewall_id) -> None +- client.gpu_droplets.firewalls.create(\*\*params) -> FirewallCreateResponse +- client.gpu_droplets.firewalls.retrieve(firewall_id) -> FirewallRetrieveResponse +- client.gpu_droplets.firewalls.update(firewall_id, \*\*params) -> FirewallUpdateResponse +- client.gpu_droplets.firewalls.list(\*\*params) -> FirewallListResponse +- client.gpu_droplets.firewalls.delete(firewall_id) -> None ### Droplets Methods: -- client.gpu_droplets.firewalls.droplets.add(firewall_id, \*\*params) -> None -- client.gpu_droplets.firewalls.droplets.remove(firewall_id, \*\*params) -> None +- client.gpu_droplets.firewalls.droplets.add(firewall_id, \*\*params) -> None +- client.gpu_droplets.firewalls.droplets.remove(firewall_id, \*\*params) -> None ### Tags Methods: -- client.gpu_droplets.firewalls.tags.add(firewall_id, \*\*params) -> None -- client.gpu_droplets.firewalls.tags.remove(firewall_id, \*\*params) -> None +- client.gpu_droplets.firewalls.tags.add(firewall_id, \*\*params) -> None +- client.gpu_droplets.firewalls.tags.remove(firewall_id, \*\*params) -> None ### Rules Methods: -- client.gpu_droplets.firewalls.rules.add(firewall_id, \*\*params) -> None -- client.gpu_droplets.firewalls.rules.remove(firewall_id, \*\*params) -> None +- client.gpu_droplets.firewalls.rules.add(firewall_id, \*\*params) -> None +- client.gpu_droplets.firewalls.rules.remove(firewall_id, \*\*params) -> None ## FloatingIPs Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( FloatingIP, FloatingIPCreateResponse, FloatingIPRetrieveResponse, @@ -542,17 +536,17 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.floating_ips.create(\*\*params) -> FloatingIPCreateResponse -- client.gpu_droplets.floating_ips.retrieve(floating_ip) -> FloatingIPRetrieveResponse -- client.gpu_droplets.floating_ips.list(\*\*params) -> FloatingIPListResponse -- client.gpu_droplets.floating_ips.delete(floating_ip) -> None +- client.gpu_droplets.floating_ips.create(\*\*params) -> FloatingIPCreateResponse +- client.gpu_droplets.floating_ips.retrieve(floating_ip) -> FloatingIPRetrieveResponse +- client.gpu_droplets.floating_ips.list(\*\*params) -> FloatingIPListResponse +- client.gpu_droplets.floating_ips.delete(floating_ip) -> None ### Actions Types: ```python -from do_gradientai.types.gpu_droplets.floating_ips import ( +from gradient.types.gpu_droplets.floating_ips import ( ActionCreateResponse, ActionRetrieveResponse, ActionListResponse, @@ -561,16 +555,16 @@ from do_gradientai.types.gpu_droplets.floating_ips import ( Methods: -- client.gpu_droplets.floating_ips.actions.create(floating_ip, \*\*params) -> ActionCreateResponse -- client.gpu_droplets.floating_ips.actions.retrieve(action_id, \*, floating_ip) -> ActionRetrieveResponse -- client.gpu_droplets.floating_ips.actions.list(floating_ip) -> ActionListResponse +- client.gpu_droplets.floating_ips.actions.create(floating_ip, \*\*params) -> ActionCreateResponse +- client.gpu_droplets.floating_ips.actions.retrieve(action_id, \*, floating_ip) -> ActionRetrieveResponse +- client.gpu_droplets.floating_ips.actions.list(floating_ip) -> ActionListResponse ## Images Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( ImageCreateResponse, ImageRetrieveResponse, ImageUpdateResponse, @@ -580,32 +574,32 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.images.create(\*\*params) -> ImageCreateResponse -- client.gpu_droplets.images.retrieve(image_id) -> ImageRetrieveResponse -- client.gpu_droplets.images.update(image_id, \*\*params) -> ImageUpdateResponse -- client.gpu_droplets.images.list(\*\*params) -> ImageListResponse -- client.gpu_droplets.images.delete(image_id) -> None +- client.gpu_droplets.images.create(\*\*params) -> ImageCreateResponse +- client.gpu_droplets.images.retrieve(image_id) -> ImageRetrieveResponse +- client.gpu_droplets.images.update(image_id, \*\*params) -> ImageUpdateResponse +- client.gpu_droplets.images.list(\*\*params) -> ImageListResponse +- client.gpu_droplets.images.delete(image_id) -> None ### Actions Types: ```python -from do_gradientai.types.gpu_droplets.images import ActionListResponse +from gradient.types.gpu_droplets.images import ActionListResponse ``` Methods: -- client.gpu_droplets.images.actions.create(image_id, \*\*params) -> Action -- client.gpu_droplets.images.actions.retrieve(action_id, \*, image_id) -> Action -- client.gpu_droplets.images.actions.list(image_id) -> ActionListResponse +- client.gpu_droplets.images.actions.create(image_id, \*\*params) -> Action +- client.gpu_droplets.images.actions.retrieve(action_id, \*, image_id) -> Action +- client.gpu_droplets.images.actions.list(image_id) -> ActionListResponse ## LoadBalancers Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( Domains, ForwardingRule, GlbSettings, @@ -622,59 +616,59 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.load_balancers.create(\*\*params) -> LoadBalancerCreateResponse -- client.gpu_droplets.load_balancers.retrieve(lb_id) -> LoadBalancerRetrieveResponse -- client.gpu_droplets.load_balancers.update(lb_id, \*\*params) -> LoadBalancerUpdateResponse -- client.gpu_droplets.load_balancers.list(\*\*params) -> LoadBalancerListResponse -- client.gpu_droplets.load_balancers.delete(lb_id) -> None -- client.gpu_droplets.load_balancers.delete_cache(lb_id) -> None +- client.gpu_droplets.load_balancers.create(\*\*params) -> LoadBalancerCreateResponse +- client.gpu_droplets.load_balancers.retrieve(lb_id) -> LoadBalancerRetrieveResponse +- client.gpu_droplets.load_balancers.update(lb_id, \*\*params) -> LoadBalancerUpdateResponse +- client.gpu_droplets.load_balancers.list(\*\*params) -> LoadBalancerListResponse +- client.gpu_droplets.load_balancers.delete(lb_id) -> None +- client.gpu_droplets.load_balancers.delete_cache(lb_id) -> None ### Droplets Methods: -- client.gpu_droplets.load_balancers.droplets.add(lb_id, \*\*params) -> None -- client.gpu_droplets.load_balancers.droplets.remove(lb_id, \*\*params) -> None +- client.gpu_droplets.load_balancers.droplets.add(lb_id, \*\*params) -> None +- client.gpu_droplets.load_balancers.droplets.remove(lb_id, \*\*params) -> None ### ForwardingRules Methods: -- client.gpu_droplets.load_balancers.forwarding_rules.add(lb_id, \*\*params) -> None -- client.gpu_droplets.load_balancers.forwarding_rules.remove(lb_id, \*\*params) -> None +- client.gpu_droplets.load_balancers.forwarding_rules.add(lb_id, \*\*params) -> None +- client.gpu_droplets.load_balancers.forwarding_rules.remove(lb_id, \*\*params) -> None ## Sizes Types: ```python -from do_gradientai.types.gpu_droplets import SizeListResponse +from gradient.types.gpu_droplets import SizeListResponse ``` Methods: -- client.gpu_droplets.sizes.list(\*\*params) -> SizeListResponse +- client.gpu_droplets.sizes.list(\*\*params) -> SizeListResponse ## Snapshots Types: ```python -from do_gradientai.types.gpu_droplets import SnapshotRetrieveResponse, SnapshotListResponse +from gradient.types.gpu_droplets import SnapshotRetrieveResponse, SnapshotListResponse ``` Methods: -- client.gpu_droplets.snapshots.retrieve(snapshot_id) -> SnapshotRetrieveResponse -- client.gpu_droplets.snapshots.list(\*\*params) -> SnapshotListResponse -- client.gpu_droplets.snapshots.delete(snapshot_id) -> None +- client.gpu_droplets.snapshots.retrieve(snapshot_id) -> SnapshotRetrieveResponse +- client.gpu_droplets.snapshots.list(\*\*params) -> SnapshotListResponse +- client.gpu_droplets.snapshots.delete(snapshot_id) -> None ## Volumes Types: ```python -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( VolumeCreateResponse, VolumeRetrieveResponse, VolumeListResponse, @@ -683,18 +677,18 @@ from do_gradientai.types.gpu_droplets import ( Methods: -- client.gpu_droplets.volumes.create(\*\*params) -> VolumeCreateResponse -- client.gpu_droplets.volumes.retrieve(volume_id) -> VolumeRetrieveResponse -- client.gpu_droplets.volumes.list(\*\*params) -> VolumeListResponse -- client.gpu_droplets.volumes.delete(volume_id) -> None -- client.gpu_droplets.volumes.delete_by_name(\*\*params) -> None +- client.gpu_droplets.volumes.create(\*\*params) -> VolumeCreateResponse +- client.gpu_droplets.volumes.retrieve(volume_id) -> VolumeRetrieveResponse +- client.gpu_droplets.volumes.list(\*\*params) -> VolumeListResponse +- client.gpu_droplets.volumes.delete(volume_id) -> None +- client.gpu_droplets.volumes.delete_by_name(\*\*params) -> None ### Actions Types: ```python -from do_gradientai.types.gpu_droplets.volumes import ( +from gradient.types.gpu_droplets.volumes import ( VolumeAction, ActionRetrieveResponse, ActionListResponse, @@ -705,17 +699,17 @@ from do_gradientai.types.gpu_droplets.volumes import ( Methods: -- client.gpu_droplets.volumes.actions.retrieve(action_id, \*, volume_id, \*\*params) -> ActionRetrieveResponse -- client.gpu_droplets.volumes.actions.list(volume_id, \*\*params) -> ActionListResponse -- client.gpu_droplets.volumes.actions.initiate_by_id(volume_id, \*\*params) -> ActionInitiateByIDResponse -- client.gpu_droplets.volumes.actions.initiate_by_name(\*\*params) -> ActionInitiateByNameResponse +- client.gpu_droplets.volumes.actions.retrieve(action_id, \*, volume_id, \*\*params) -> ActionRetrieveResponse +- client.gpu_droplets.volumes.actions.list(volume_id, \*\*params) -> ActionListResponse +- client.gpu_droplets.volumes.actions.initiate_by_id(volume_id, \*\*params) -> ActionInitiateByIDResponse +- client.gpu_droplets.volumes.actions.initiate_by_name(\*\*params) -> ActionInitiateByNameResponse ### Snapshots Types: ```python -from do_gradientai.types.gpu_droplets.volumes import ( +from gradient.types.gpu_droplets.volumes import ( SnapshotCreateResponse, SnapshotRetrieveResponse, SnapshotListResponse, @@ -724,10 +718,10 @@ from do_gradientai.types.gpu_droplets.volumes import ( Methods: -- client.gpu_droplets.volumes.snapshots.create(volume_id, \*\*params) -> SnapshotCreateResponse -- client.gpu_droplets.volumes.snapshots.retrieve(snapshot_id) -> SnapshotRetrieveResponse -- client.gpu_droplets.volumes.snapshots.list(volume_id, \*\*params) -> SnapshotListResponse -- client.gpu_droplets.volumes.snapshots.delete(snapshot_id) -> None +- client.gpu_droplets.volumes.snapshots.create(volume_id, \*\*params) -> SnapshotCreateResponse +- client.gpu_droplets.volumes.snapshots.retrieve(snapshot_id) -> SnapshotRetrieveResponse +- client.gpu_droplets.volumes.snapshots.list(volume_id, \*\*params) -> SnapshotListResponse +- client.gpu_droplets.volumes.snapshots.delete(snapshot_id) -> None ## Account @@ -736,7 +730,7 @@ Methods: Types: ```python -from do_gradientai.types.gpu_droplets.account import ( +from gradient.types.gpu_droplets.account import ( KeyCreateResponse, KeyRetrieveResponse, KeyUpdateResponse, @@ -746,11 +740,11 @@ from do_gradientai.types.gpu_droplets.account import ( Methods: -- client.gpu_droplets.account.keys.create(\*\*params) -> KeyCreateResponse -- client.gpu_droplets.account.keys.retrieve(ssh_key_identifier) -> KeyRetrieveResponse -- client.gpu_droplets.account.keys.update(ssh_key_identifier, \*\*params) -> KeyUpdateResponse -- client.gpu_droplets.account.keys.list(\*\*params) -> KeyListResponse -- client.gpu_droplets.account.keys.delete(ssh_key_identifier) -> None +- client.gpu_droplets.account.keys.create(\*\*params) -> KeyCreateResponse +- client.gpu_droplets.account.keys.retrieve(ssh_key_identifier) -> KeyRetrieveResponse +- client.gpu_droplets.account.keys.update(ssh_key_identifier, \*\*params) -> KeyUpdateResponse +- client.gpu_droplets.account.keys.list(\*\*params) -> KeyListResponse +- client.gpu_droplets.account.keys.delete(ssh_key_identifier) -> None # Inference @@ -759,7 +753,7 @@ Methods: Types: ```python -from do_gradientai.types.inference import ( +from gradient.types.inference import ( APIModelAPIKeyInfo, APIKeyCreateResponse, APIKeyUpdateResponse, @@ -771,18 +765,18 @@ from do_gradientai.types.inference import ( Methods: -- client.inference.api_keys.create(\*\*params) -> APIKeyCreateResponse -- client.inference.api_keys.update(path_api_key_uuid, \*\*params) -> APIKeyUpdateResponse -- client.inference.api_keys.list(\*\*params) -> APIKeyListResponse -- client.inference.api_keys.delete(api_key_uuid) -> APIKeyDeleteResponse -- client.inference.api_keys.update_regenerate(api_key_uuid) -> APIKeyUpdateRegenerateResponse +- client.inference.api_keys.create(\*\*params) -> APIKeyCreateResponse +- client.inference.api_keys.update(path_api_key_uuid, \*\*params) -> APIKeyUpdateResponse +- client.inference.api_keys.list(\*\*params) -> APIKeyListResponse +- client.inference.api_keys.delete(api_key_uuid) -> APIKeyDeleteResponse +- client.inference.api_keys.update_regenerate(api_key_uuid) -> APIKeyUpdateRegenerateResponse # KnowledgeBases Types: ```python -from do_gradientai.types import ( +from gradient.types import ( APIKnowledgeBase, KnowledgeBaseCreateResponse, KnowledgeBaseRetrieveResponse, @@ -794,18 +788,18 @@ from do_gradientai.types import ( Methods: -- client.knowledge_bases.create(\*\*params) -> KnowledgeBaseCreateResponse -- client.knowledge_bases.retrieve(uuid) -> KnowledgeBaseRetrieveResponse -- client.knowledge_bases.update(path_uuid, \*\*params) -> KnowledgeBaseUpdateResponse -- client.knowledge_bases.list(\*\*params) -> KnowledgeBaseListResponse -- client.knowledge_bases.delete(uuid) -> KnowledgeBaseDeleteResponse +- client.knowledge_bases.create(\*\*params) -> KnowledgeBaseCreateResponse +- client.knowledge_bases.retrieve(uuid) -> KnowledgeBaseRetrieveResponse +- client.knowledge_bases.update(path_uuid, \*\*params) -> KnowledgeBaseUpdateResponse +- client.knowledge_bases.list(\*\*params) -> KnowledgeBaseListResponse +- client.knowledge_bases.delete(uuid) -> KnowledgeBaseDeleteResponse ## DataSources Types: ```python -from do_gradientai.types.knowledge_bases import ( +from gradient.types.knowledge_bases import ( APIFileUploadDataSource, APIKnowledgeBaseDataSource, APISpacesDataSource, @@ -819,16 +813,16 @@ from do_gradientai.types.knowledge_bases import ( Methods: -- client.knowledge_bases.data_sources.create(path_knowledge_base_uuid, \*\*params) -> DataSourceCreateResponse -- client.knowledge_bases.data_sources.list(knowledge_base_uuid, \*\*params) -> DataSourceListResponse -- client.knowledge_bases.data_sources.delete(data_source_uuid, \*, knowledge_base_uuid) -> DataSourceDeleteResponse +- client.knowledge_bases.data_sources.create(path_knowledge_base_uuid, \*\*params) -> DataSourceCreateResponse +- client.knowledge_bases.data_sources.list(knowledge_base_uuid, \*\*params) -> DataSourceListResponse +- client.knowledge_bases.data_sources.delete(data_source_uuid, \*, knowledge_base_uuid) -> DataSourceDeleteResponse ## IndexingJobs Types: ```python -from do_gradientai.types.knowledge_bases import ( +from gradient.types.knowledge_bases import ( APIIndexedDataSource, APIIndexingJob, IndexingJobCreateResponse, @@ -841,23 +835,23 @@ from do_gradientai.types.knowledge_bases import ( Methods: -- client.knowledge_bases.indexing_jobs.create(\*\*params) -> IndexingJobCreateResponse -- client.knowledge_bases.indexing_jobs.retrieve(uuid) -> IndexingJobRetrieveResponse -- client.knowledge_bases.indexing_jobs.list(\*\*params) -> IndexingJobListResponse -- client.knowledge_bases.indexing_jobs.retrieve_data_sources(indexing_job_uuid) -> IndexingJobRetrieveDataSourcesResponse -- client.knowledge_bases.indexing_jobs.update_cancel(path_uuid, \*\*params) -> IndexingJobUpdateCancelResponse +- client.knowledge_bases.indexing_jobs.create(\*\*params) -> IndexingJobCreateResponse +- client.knowledge_bases.indexing_jobs.retrieve(uuid) -> IndexingJobRetrieveResponse +- client.knowledge_bases.indexing_jobs.list(\*\*params) -> IndexingJobListResponse +- client.knowledge_bases.indexing_jobs.retrieve_data_sources(indexing_job_uuid) -> IndexingJobRetrieveDataSourcesResponse +- client.knowledge_bases.indexing_jobs.update_cancel(path_uuid, \*\*params) -> IndexingJobUpdateCancelResponse # Models Types: ```python -from do_gradientai.types import APIAgreement, APIModel, APIModelVersion, ModelListResponse +from gradient.types import APIAgreement, APIModel, APIModelVersion, ModelListResponse ``` Methods: -- client.models.list(\*\*params) -> ModelListResponse +- client.models.list(\*\*params) -> ModelListResponse ## Providers @@ -866,7 +860,7 @@ Methods: Types: ```python -from do_gradientai.types.models.providers import ( +from gradient.types.models.providers import ( AnthropicCreateResponse, AnthropicRetrieveResponse, AnthropicUpdateResponse, @@ -878,19 +872,19 @@ from do_gradientai.types.models.providers import ( Methods: -- client.models.providers.anthropic.create(\*\*params) -> AnthropicCreateResponse -- client.models.providers.anthropic.retrieve(api_key_uuid) -> AnthropicRetrieveResponse -- client.models.providers.anthropic.update(path_api_key_uuid, \*\*params) -> AnthropicUpdateResponse -- client.models.providers.anthropic.list(\*\*params) -> AnthropicListResponse -- client.models.providers.anthropic.delete(api_key_uuid) -> AnthropicDeleteResponse -- client.models.providers.anthropic.list_agents(uuid, \*\*params) -> AnthropicListAgentsResponse +- client.models.providers.anthropic.create(\*\*params) -> AnthropicCreateResponse +- client.models.providers.anthropic.retrieve(api_key_uuid) -> AnthropicRetrieveResponse +- client.models.providers.anthropic.update(path_api_key_uuid, \*\*params) -> AnthropicUpdateResponse +- client.models.providers.anthropic.list(\*\*params) -> AnthropicListResponse +- client.models.providers.anthropic.delete(api_key_uuid) -> AnthropicDeleteResponse +- client.models.providers.anthropic.list_agents(uuid, \*\*params) -> AnthropicListAgentsResponse ### OpenAI Types: ```python -from do_gradientai.types.models.providers import ( +from gradient.types.models.providers import ( OpenAICreateResponse, OpenAIRetrieveResponse, OpenAIUpdateResponse, @@ -902,24 +896,24 @@ from do_gradientai.types.models.providers import ( Methods: -- client.models.providers.openai.create(\*\*params) -> OpenAICreateResponse -- client.models.providers.openai.retrieve(api_key_uuid) -> OpenAIRetrieveResponse -- client.models.providers.openai.update(path_api_key_uuid, \*\*params) -> OpenAIUpdateResponse -- client.models.providers.openai.list(\*\*params) -> OpenAIListResponse -- client.models.providers.openai.delete(api_key_uuid) -> OpenAIDeleteResponse -- client.models.providers.openai.retrieve_agents(uuid, \*\*params) -> OpenAIRetrieveAgentsResponse +- client.models.providers.openai.create(\*\*params) -> OpenAICreateResponse +- client.models.providers.openai.retrieve(api_key_uuid) -> OpenAIRetrieveResponse +- client.models.providers.openai.update(path_api_key_uuid, \*\*params) -> OpenAIUpdateResponse +- client.models.providers.openai.list(\*\*params) -> OpenAIListResponse +- client.models.providers.openai.delete(api_key_uuid) -> OpenAIDeleteResponse +- client.models.providers.openai.retrieve_agents(uuid, \*\*params) -> OpenAIRetrieveAgentsResponse # Regions Types: ```python -from do_gradientai.types import RegionListResponse +from gradient.types import RegionListResponse ``` Methods: -- client.regions.list(\*\*params) -> RegionListResponse +- client.regions.list(\*\*params) -> RegionListResponse # Databases @@ -930,7 +924,7 @@ Methods: Types: ```python -from do_gradientai.types.databases.schema_registry import ( +from gradient.types.databases.schema_registry import ( ConfigRetrieveResponse, ConfigUpdateResponse, ConfigRetrieveSubjectResponse, @@ -940,7 +934,7 @@ from do_gradientai.types.databases.schema_registry import ( Methods: -- client.databases.schema_registry.config.retrieve(database_cluster_uuid) -> ConfigRetrieveResponse -- client.databases.schema_registry.config.update(database_cluster_uuid, \*\*params) -> ConfigUpdateResponse -- client.databases.schema_registry.config.retrieve_subject(subject_name, \*, database_cluster_uuid) -> ConfigRetrieveSubjectResponse -- client.databases.schema_registry.config.update_subject(subject_name, \*, database_cluster_uuid, \*\*params) -> ConfigUpdateSubjectResponse +- client.databases.schema_registry.config.retrieve(database_cluster_uuid) -> ConfigRetrieveResponse +- client.databases.schema_registry.config.update(database_cluster_uuid, \*\*params) -> ConfigUpdateResponse +- client.databases.schema_registry.config.retrieve_subject(subject_name, \*, database_cluster_uuid) -> ConfigRetrieveSubjectResponse +- client.databases.schema_registry.config.update_subject(subject_name, \*, database_cluster_uuid, \*\*params) -> ConfigUpdateSubjectResponse diff --git a/mypy.ini b/mypy.ini index 82b0c891..9a8e555e 100644 --- a/mypy.ini +++ b/mypy.ini @@ -8,7 +8,7 @@ show_error_codes = True # # We also exclude our `tests` as mypy doesn't always infer # types correctly and Pyright will still catch any type errors. -exclude = ^(src/do_gradientai/_files\.py|_dev/.*\.py|tests/.*)$ +exclude = ^(src/gradient/_files\.py|_dev/.*\.py|tests/.*)$ strict_equality = True implicit_reexport = True diff --git a/pyproject.toml b/pyproject.toml index 7bb32f6f..6628f6fa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,19 +1,17 @@ [project] -name = "do_gradientai" -version = "0.1.0-beta.4" -description = "The official Python library for GradientAI" +name = "gradient" +version = "3.0.0-beta.1" +description = "The official Python library for the Gradient API" dynamic = ["readme"] license = "Apache-2.0" -authors = [ -{ name = "Gradient AI", email = "" }, -] +authors = [{ name = "Gradient", email = "" }] dependencies = [ - "httpx>=0.23.0, <1", - "pydantic>=1.9.0, <3", - "typing-extensions>=4.10, <5", - "anyio>=3.5.0, <5", - "distro>=1.7.0, <2", - "sniffio", + "httpx>=0.23.0, <1", + "pydantic>=1.9.0, <3", + "typing-extensions>=4.10, <5", + "anyio>=3.5.0, <5", + "distro>=1.7.0, <2", + "sniffio", ] requires-python = ">= 3.8" classifiers = [ @@ -31,12 +29,12 @@ classifiers = [ "Operating System :: POSIX :: Linux", "Operating System :: Microsoft :: Windows", "Topic :: Software Development :: Libraries :: Python Modules", - "License :: OSI Approved :: Apache Software License" + "License :: OSI Approved :: Apache Software License", ] [project.urls] -Homepage = "https://github.com/digitalocean/gradientai-python" -Repository = "https://github.com/digitalocean/gradientai-python" +Homepage = "https://github.com/digitalocean/gradient-python" +Repository = "https://github.com/digitalocean/gradient-python" [project.optional-dependencies] aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"] @@ -45,19 +43,19 @@ aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"] managed = true # version pins are in requirements-dev.lock dev-dependencies = [ - "pyright==1.1.399", - "mypy", - "respx", - "pytest", - "pytest-asyncio", - "ruff", - "time-machine", - "nox", - "dirty-equals>=0.6.0", - "importlib-metadata>=6.7.0", - "rich>=13.7.1", - "nest_asyncio==1.6.0", - "pytest-xdist>=3.6.1", + "pyright==1.1.399", + "mypy", + "respx", + "pytest", + "pytest-asyncio", + "ruff", + "time-machine", + "nox", + "dirty-equals>=0.6.0", + "importlib-metadata>=6.7.0", + "rich>=13.7.1", + "nest_asyncio==1.6.0", + "pytest-xdist>=3.6.1", ] [tool.rye.scripts] @@ -67,26 +65,19 @@ format = { chain = [ "fix:ruff", # run formatting again to fix any inconsistencies when imports are stripped "format:ruff", -]} +] } "format:docs" = "python scripts/utils/ruffen-docs.py README.md api.md" "format:ruff" = "ruff format" -"lint" = { chain = [ - "check:ruff", - "typecheck", - "check:importable", -]} +"lint" = { chain = ["check:ruff", "typecheck", "check:importable"] } "check:ruff" = "ruff check ." "fix:ruff" = "ruff check --fix ." -"check:importable" = "python -c 'import do_gradientai'" +"check:importable" = "python -c 'import gradient'" -typecheck = { chain = [ - "typecheck:pyright", - "typecheck:mypy" -]} +typecheck = { chain = ["typecheck:pyright", "typecheck:mypy"] } "typecheck:pyright" = "pyright" -"typecheck:verify-types" = "pyright --verifytypes do_gradientai --ignoreexternal" +"typecheck:verify-types" = "pyright --verifytypes gradient --ignoreexternal" "typecheck:mypy" = "mypy ." [build-system] @@ -94,12 +85,10 @@ requires = ["hatchling==1.26.3", "hatch-fancy-pypi-readme"] build-backend = "hatchling.build" [tool.hatch.build] -include = [ - "src/*" -] +include = ["src/*"] [tool.hatch.build.targets.wheel] -packages = ["src/do_gradientai"] +packages = ["src/gradient"] [tool.hatch.build.targets.sdist] # Basically everything except hidden files/directories (such as .github, .devcontainers, .python-version, etc) @@ -125,7 +114,7 @@ path = "README.md" [[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]] # replace relative links with absolute links pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)' -replacement = '[\1](https://github.com/digitalocean/gradientai-python/tree/main/\g<2>)' +replacement = '[\1](https://github.com/digitalocean/gradient-python/tree/main/\g<2>)' [tool.pytest.ini_options] testpaths = ["tests"] @@ -133,9 +122,7 @@ addopts = "--tb=short -n auto" xfail_strict = true asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "session" -filterwarnings = [ - "error" -] +filterwarnings = ["error"] [tool.pyright] # this enables practically every flag given by pyright. @@ -144,11 +131,7 @@ filterwarnings = [ typeCheckingMode = "strict" pythonVersion = "3.8" -exclude = [ - "_dev", - ".venv", - ".nox", -] +exclude = ["_dev", ".venv", ".nox"] reportImplicitOverride = true reportOverlappingOverload = false @@ -202,7 +185,7 @@ length-sort = true length-sort-straight = true combine-as-imports = true extra-standard-library = ["typing_extensions"] -known-first-party = ["do_gradientai", "tests"] +known-first-party = ["gradient", "tests"] [tool.ruff.lint.per-file-ignores] "bin/**.py" = ["T201", "T203"] diff --git a/release-please-config.json b/release-please-config.json index a320c1a8..0b0d1705 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -61,6 +61,6 @@ ], "release-type": "python", "extra-files": [ - "src/do_gradientai/_version.py" + "src/gradient/_version.py" ] } \ No newline at end of file diff --git a/requirements-dev.lock b/requirements-dev.lock index f839fd0e..7a0f60ab 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -13,14 +13,14 @@ aiohappyeyeballs==2.6.1 # via aiohttp aiohttp==3.12.8 - # via do-gradientai + # via gradient # via httpx-aiohttp aiosignal==1.3.2 # via aiohttp annotated-types==0.6.0 # via pydantic anyio==4.4.0 - # via do-gradientai + # via gradient # via httpx argcomplete==3.1.2 # via nox @@ -37,7 +37,7 @@ dirty-equals==0.6.0 distlib==0.3.7 # via virtualenv distro==1.8.0 - # via do-gradientai + # via gradient exceptiongroup==1.2.2 # via anyio # via pytest @@ -53,11 +53,11 @@ h11==0.16.0 httpcore==1.0.9 # via httpx httpx==0.28.1 - # via do-gradientai + # via gradient # via httpx-aiohttp # via respx httpx-aiohttp==0.1.8 - # via do-gradientai + # via gradient idna==3.4 # via anyio # via httpx @@ -90,7 +90,7 @@ propcache==0.3.1 # via aiohttp # via yarl pydantic==2.10.3 - # via do-gradientai + # via gradient pydantic-core==2.27.1 # via pydantic pygments==2.18.0 @@ -114,14 +114,14 @@ six==1.16.0 # via python-dateutil sniffio==1.3.0 # via anyio - # via do-gradientai + # via gradient time-machine==2.9.0 tomli==2.0.2 # via mypy # via pytest typing-extensions==4.12.2 # via anyio - # via do-gradientai + # via gradient # via multidict # via mypy # via pydantic diff --git a/requirements.lock b/requirements.lock index 33a3cfb2..f9072669 100644 --- a/requirements.lock +++ b/requirements.lock @@ -13,14 +13,14 @@ aiohappyeyeballs==2.6.1 # via aiohttp aiohttp==3.12.8 - # via do-gradientai + # via gradient # via httpx-aiohttp aiosignal==1.3.2 # via aiohttp annotated-types==0.6.0 # via pydantic anyio==4.4.0 - # via do-gradientai + # via gradient # via httpx async-timeout==5.0.1 # via aiohttp @@ -30,7 +30,7 @@ certifi==2023.7.22 # via httpcore # via httpx distro==1.8.0 - # via do-gradientai + # via gradient exceptiongroup==1.2.2 # via anyio frozenlist==1.6.2 @@ -41,10 +41,10 @@ h11==0.16.0 httpcore==1.0.9 # via httpx httpx==0.28.1 - # via do-gradientai + # via gradient # via httpx-aiohttp httpx-aiohttp==0.1.8 - # via do-gradientai + # via gradient idna==3.4 # via anyio # via httpx @@ -56,15 +56,15 @@ propcache==0.3.1 # via aiohttp # via yarl pydantic==2.10.3 - # via do-gradientai + # via gradient pydantic-core==2.27.1 # via pydantic sniffio==1.3.0 # via anyio - # via do-gradientai + # via gradient typing-extensions==4.12.2 # via anyio - # via do-gradientai + # via gradient # via multidict # via pydantic # via pydantic-core diff --git a/scripts/lint b/scripts/lint index e46e909b..9ccb6ae5 100755 --- a/scripts/lint +++ b/scripts/lint @@ -8,4 +8,4 @@ echo "==> Running lints" rye run lint echo "==> Making sure it imports" -rye run python -c 'import do_gradientai' +rye run python -c 'import gradient' diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh index 170e8cfe..d93584b2 100755 --- a/scripts/utils/upload-artifact.sh +++ b/scripts/utils/upload-artifact.sh @@ -20,7 +20,7 @@ UPLOAD_RESPONSE=$(curl -v -X PUT \ if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then echo -e "\033[32mUploaded build to Stainless storage.\033[0m" - echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/gradientai-python/$SHA/$FILENAME'\033[0m" + echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/gradient-python/$SHA/$FILENAME'\033[0m" else echo -e "\033[31mFailed to upload artifact.\033[0m" exit 1 diff --git a/src/do_gradientai/__init__.py b/src/gradient/__init__.py similarity index 90% rename from src/do_gradientai/__init__.py rename to src/gradient/__init__.py index 41b943b2..c78eff30 100644 --- a/src/do_gradientai/__init__.py +++ b/src/gradient/__init__.py @@ -9,12 +9,12 @@ Client, Stream, Timeout, + Gradient, Transport, - GradientAI, AsyncClient, AsyncStream, + AsyncGradient, RequestOptions, - AsyncGradientAI, ) from ._models import BaseModel from ._version import __title__, __version__ @@ -23,12 +23,12 @@ from ._exceptions import ( APIError, ConflictError, + GradientError, NotFoundError, APIStatusError, RateLimitError, APITimeoutError, BadRequestError, - GradientAIError, APIConnectionError, AuthenticationError, InternalServerError, @@ -49,7 +49,7 @@ "NotGiven", "NOT_GIVEN", "Omit", - "GradientAIError", + "GradientError", "APIError", "APIStatusError", "APITimeoutError", @@ -69,8 +69,8 @@ "AsyncClient", "Stream", "AsyncStream", - "GradientAI", - "AsyncGradientAI", + "Gradient", + "AsyncGradient", "file_from_path", "BaseModel", "DEFAULT_TIMEOUT", @@ -89,12 +89,12 @@ # Update the __module__ attribute for exported symbols so that # error messages point to this module instead of the module # it was originally defined in, e.g. -# do_gradientai._exceptions.NotFoundError -> do_gradientai.NotFoundError +# gradient._exceptions.NotFoundError -> gradient.NotFoundError __locals = locals() for __name in __all__: if not __name.startswith("__"): try: - __locals[__name].__module__ = "do_gradientai" + __locals[__name].__module__ = "gradient" except (TypeError, AttributeError): # Some of our exported symbols are builtins which we can't set attributes for. pass diff --git a/src/do_gradientai/_base_client.py b/src/gradient/_base_client.py similarity index 99% rename from src/do_gradientai/_base_client.py rename to src/gradient/_base_client.py index 326c662c..74f3c57a 100644 --- a/src/do_gradientai/_base_client.py +++ b/src/gradient/_base_client.py @@ -389,7 +389,7 @@ def __init__( if max_retries is None: # pyright: ignore[reportUnnecessaryComparison] raise TypeError( - "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `do_gradientai.DEFAULT_MAX_RETRIES`" + "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `gradient.DEFAULT_MAX_RETRIES`" ) def _enforce_trailing_slash(self, url: URL) -> URL: @@ -532,7 +532,10 @@ def _build_request( is_body_allowed = options.method.lower() != "get" if is_body_allowed: - kwargs["json"] = json_data if is_given(json_data) else None + if isinstance(json_data, bytes): + kwargs["content"] = json_data + else: + kwargs["json"] = json_data if is_given(json_data) else None kwargs["files"] = files else: headers.pop("Content-Type", None) diff --git a/src/do_gradientai/_client.py b/src/gradient/_client.py similarity index 81% rename from src/do_gradientai/_client.py rename to src/gradient/_client.py index 691dfc21..9693f622 100644 --- a/src/do_gradientai/_client.py +++ b/src/gradient/_client.py @@ -32,16 +32,7 @@ ) if TYPE_CHECKING: - from .resources import ( - chat, - agents, - models, - regions, - databases, - inference, - gpu_droplets, - knowledge_bases, - ) + from .resources import chat, agents, models, regions, databases, inference, gpu_droplets, knowledge_bases from .resources.regions import RegionsResource, AsyncRegionsResource from .resources.chat.chat import ChatResource, AsyncChatResource from .resources.gpu_droplets import ( @@ -50,50 +41,26 @@ ) from .resources.agents.agents import AgentsResource, AsyncAgentsResource from .resources.models.models import ModelsResource, AsyncModelsResource - from .resources.gpu_droplets.sizes import ( - SizesResource, - ) from .resources.databases.databases import DatabasesResource, AsyncDatabasesResource from .resources.inference.inference import InferenceResource, AsyncInferenceResource - from .resources.gpu_droplets.snapshots import ( - SnapshotsResource, - ) - from .resources.gpu_droplets.images.images import ( - ImagesResource, - ) - from .resources.gpu_droplets.account.account import ( - AccountResource, - ) - from .resources.gpu_droplets.volumes.volumes import ( - VolumesResource, - ) from .resources.knowledge_bases.knowledge_bases import ( KnowledgeBasesResource, AsyncKnowledgeBasesResource, ) - from .resources.gpu_droplets.firewalls.firewalls import ( - FirewallsResource, - ) - from .resources.gpu_droplets.floating_ips.floating_ips import ( - FloatingIPsResource, - ) - from .resources.gpu_droplets.load_balancers.load_balancers import ( - LoadBalancersResource, - ) __all__ = [ "Timeout", "Transport", "ProxiesTypes", "RequestOptions", - "GradientAI", - "AsyncGradientAI", + "Gradient", + "AsyncGradient", "Client", "AsyncClient", ] -class GradientAI(SyncAPIClient): +class Gradient(SyncAPIClient): # client options api_key: str | None inference_key: str | None @@ -126,29 +93,29 @@ def __init__( # part of our public interface in the future. _strict_response_validation: bool = False, ) -> None: - """Construct a new synchronous GradientAI client instance. + """Construct a new synchronous Gradient client instance. This automatically infers the following arguments from their corresponding environment variables if they are not provided: - - `api_key` from `GRADIENTAI_API_KEY` - - `inference_key` from `GRADIENTAI_INFERENCE_KEY` - - `agent_key` from `GRADIENTAI_AGENT_KEY` + - `api_key` from `GRADIENT_API_KEY` + - `inference_key` from `GRADIENT_INFERENCE_KEY` + - `agent_key` from `GRADIENT_AGENT_KEY` """ if api_key is None: - api_key = os.environ.get("GRADIENTAI_API_KEY") + api_key = os.environ.get("GRADIENT_API_KEY") self.api_key = api_key if inference_key is None: - inference_key = os.environ.get("GRADIENTAI_INFERENCE_KEY") + inference_key = os.environ.get("GRADIENT_INFERENCE_KEY") self.inference_key = inference_key if agent_key is None: - agent_key = os.environ.get("GRADIENTAI_AGENT_KEY") + agent_key = os.environ.get("GRADIENT_AGENT_KEY") self.agent_key = agent_key self._agent_endpoint = agent_endpoint if base_url is None: - base_url = os.environ.get("GRADIENT_AI_BASE_URL") + base_url = os.environ.get("GRADIENT_BASE_URL") self._base_url_overridden = base_url is not None if base_url is None: base_url = f"https://api.digitalocean.com/" @@ -228,60 +195,12 @@ def databases(self) -> DatabasesResource: return DatabasesResource(self) @cached_property - def firewalls(self) -> FirewallsResource: - from .resources.gpu_droplets.firewalls import FirewallsResource - - return FirewallsResource(self) - - @cached_property - def floating_ips(self) -> FloatingIPsResource: - from .resources.gpu_droplets.floating_ips import FloatingIPsResource - - return FloatingIPsResource(self) - - @cached_property - def images(self) -> ImagesResource: - from .resources.gpu_droplets.images import ImagesResource - - return ImagesResource(self) - - @cached_property - def load_balancers(self) -> LoadBalancersResource: - from .resources.gpu_droplets.load_balancers import LoadBalancersResource - - return LoadBalancersResource(self) - - @cached_property - def sizes(self) -> SizesResource: - from .resources.gpu_droplets.sizes import SizesResource - - return SizesResource(self) - - @cached_property - def snapshots(self) -> SnapshotsResource: - from .resources.gpu_droplets.snapshots import SnapshotsResource - - return SnapshotsResource(self) - - @cached_property - def volumes(self) -> VolumesResource: - from .resources.gpu_droplets.volumes import VolumesResource - - return VolumesResource(self) + def with_raw_response(self) -> GradientWithRawResponse: + return GradientWithRawResponse(self) @cached_property - def account(self) -> AccountResource: - from .resources.gpu_droplets.account import AccountResource - - return AccountResource(self) - - @cached_property - def with_raw_response(self) -> GradientAIWithRawResponse: - return GradientAIWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> GradientAIWithStreamedResponse: - return GradientAIWithStreamedResponse(self) + def with_streaming_response(self) -> GradientWithStreamedResponse: + return GradientWithStreamedResponse(self) @property @override @@ -307,9 +226,7 @@ def default_headers(self) -> dict[str, str | Omit]: @override def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None: - if (self.api_key or self.agent_key or self.inference_key) and headers.get( - "Authorization" - ): + if (self.api_key or self.agent_key or self.inference_key) and headers.get("Authorization"): return if isinstance(custom_headers.get("Authorization"), Omit): return @@ -339,14 +256,10 @@ def copy( Create a new client instance re-using the same options given to the current client with optional overriding. """ if default_headers is not None and set_default_headers is not None: - raise ValueError( - "The `default_headers` and `set_default_headers` arguments are mutually exclusive" - ) + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") if default_query is not None and set_default_query is not None: - raise ValueError( - "The `default_query` and `set_default_query` arguments are mutually exclusive" - ) + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") headers = self._custom_headers if default_headers is not None: @@ -393,14 +306,10 @@ def _make_status_error( return _exceptions.BadRequestError(err_msg, response=response, body=body) if response.status_code == 401: - return _exceptions.AuthenticationError( - err_msg, response=response, body=body - ) + return _exceptions.AuthenticationError(err_msg, response=response, body=body) if response.status_code == 403: - return _exceptions.PermissionDeniedError( - err_msg, response=response, body=body - ) + return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) if response.status_code == 404: return _exceptions.NotFoundError(err_msg, response=response, body=body) @@ -409,21 +318,17 @@ def _make_status_error( return _exceptions.ConflictError(err_msg, response=response, body=body) if response.status_code == 422: - return _exceptions.UnprocessableEntityError( - err_msg, response=response, body=body - ) + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) if response.status_code == 429: return _exceptions.RateLimitError(err_msg, response=response, body=body) if response.status_code >= 500: - return _exceptions.InternalServerError( - err_msg, response=response, body=body - ) + return _exceptions.InternalServerError(err_msg, response=response, body=body) return APIStatusError(err_msg, response=response, body=body) -class AsyncGradientAI(AsyncAPIClient): +class AsyncGradient(AsyncAPIClient): # client options api_key: str | None inference_key: str | None @@ -456,29 +361,29 @@ def __init__( # part of our public interface in the future. _strict_response_validation: bool = False, ) -> None: - """Construct a new async AsyncGradientAI client instance. + """Construct a new async AsyncGradient client instance. This automatically infers the following arguments from their corresponding environment variables if they are not provided: - - `api_key` from `GRADIENTAI_API_KEY` - - `inference_key` from `GRADIENTAI_INFERENCE_KEY` - - `agent_key` from `GRADIENTAI_AGENT_KEY` + - `api_key` from `GRADIENT_API_KEY` + - `inference_key` from `GRADIENT_INFERENCE_KEY` + - `agent_key` from `GRADIENT_AGENT_KEY` """ if api_key is None: - api_key = os.environ.get("GRADIENTAI_API_KEY") + api_key = os.environ.get("GRADIENT_API_KEY") self.api_key = api_key if inference_key is None: - inference_key = os.environ.get("GRADIENTAI_INFERENCE_KEY") + inference_key = os.environ.get("GRADIENT_INFERENCE_KEY") self.inference_key = inference_key if agent_key is None: - agent_key = os.environ.get("GRADIENTAI_AGENT_KEY") + agent_key = os.environ.get("GRADIENT_AGENT_KEY") self.agent_key = agent_key self._agent_endpoint = agent_endpoint if base_url is None: - base_url = os.environ.get("GRADIENT_AI_BASE_URL") + base_url = os.environ.get("GRADIENT_BASE_URL") self._base_url_overridden = base_url is not None if base_url is None: base_url = f"https://api.digitalocean.com/" @@ -558,12 +463,12 @@ def databases(self) -> AsyncDatabasesResource: return AsyncDatabasesResource(self) @cached_property - def with_raw_response(self) -> AsyncGradientAIWithRawResponse: - return AsyncGradientAIWithRawResponse(self) + def with_raw_response(self) -> AsyncGradientWithRawResponse: + return AsyncGradientWithRawResponse(self) @cached_property - def with_streaming_response(self) -> AsyncGradientAIWithStreamedResponse: - return AsyncGradientAIWithStreamedResponse(self) + def with_streaming_response(self) -> AsyncGradientWithStreamedResponse: + return AsyncGradientWithStreamedResponse(self) @property @override @@ -589,9 +494,7 @@ def default_headers(self) -> dict[str, str | Omit]: @override def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None: - if (self.api_key or self.agent_key or self.inference_key) and headers.get( - "Authorization" - ): + if (self.api_key or self.agent_key or self.inference_key) and headers.get("Authorization"): return if isinstance(custom_headers.get("Authorization"), Omit): return @@ -621,14 +524,10 @@ def copy( Create a new client instance re-using the same options given to the current client with optional overriding. """ if default_headers is not None and set_default_headers is not None: - raise ValueError( - "The `default_headers` and `set_default_headers` arguments are mutually exclusive" - ) + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") if default_query is not None and set_default_query is not None: - raise ValueError( - "The `default_query` and `set_default_query` arguments are mutually exclusive" - ) + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") headers = self._custom_headers if default_headers is not None: @@ -675,14 +574,10 @@ def _make_status_error( return _exceptions.BadRequestError(err_msg, response=response, body=body) if response.status_code == 401: - return _exceptions.AuthenticationError( - err_msg, response=response, body=body - ) + return _exceptions.AuthenticationError(err_msg, response=response, body=body) if response.status_code == 403: - return _exceptions.PermissionDeniedError( - err_msg, response=response, body=body - ) + return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) if response.status_code == 404: return _exceptions.NotFoundError(err_msg, response=response, body=body) @@ -691,24 +586,20 @@ def _make_status_error( return _exceptions.ConflictError(err_msg, response=response, body=body) if response.status_code == 422: - return _exceptions.UnprocessableEntityError( - err_msg, response=response, body=body - ) + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) if response.status_code == 429: return _exceptions.RateLimitError(err_msg, response=response, body=body) if response.status_code >= 500: - return _exceptions.InternalServerError( - err_msg, response=response, body=body - ) + return _exceptions.InternalServerError(err_msg, response=response, body=body) return APIStatusError(err_msg, response=response, body=body) -class GradientAIWithRawResponse: - _client: GradientAI +class GradientWithRawResponse: + _client: Gradient - def __init__(self, client: GradientAI) -> None: + def __init__(self, client: Gradient) -> None: self._client = client @cached_property @@ -760,10 +651,10 @@ def databases(self) -> databases.DatabasesResourceWithRawResponse: return DatabasesResourceWithRawResponse(self._client.databases) -class AsyncGradientAIWithRawResponse: - _client: AsyncGradientAI +class AsyncGradientWithRawResponse: + _client: AsyncGradient - def __init__(self, client: AsyncGradientAI) -> None: + def __init__(self, client: AsyncGradient) -> None: self._client = client @cached_property @@ -819,10 +710,10 @@ def databases(self) -> databases.AsyncDatabasesResourceWithRawResponse: return AsyncDatabasesResourceWithRawResponse(self._client.databases) -class GradientAIWithStreamedResponse: - _client: GradientAI +class GradientWithStreamedResponse: + _client: Gradient - def __init__(self, client: GradientAI) -> None: + def __init__(self, client: Gradient) -> None: self._client = client @cached_property @@ -878,10 +769,10 @@ def databases(self) -> databases.DatabasesResourceWithStreamingResponse: return DatabasesResourceWithStreamingResponse(self._client.databases) -class AsyncGradientAIWithStreamedResponse: - _client: AsyncGradientAI +class AsyncGradientWithStreamedResponse: + _client: AsyncGradient - def __init__(self, client: AsyncGradientAI) -> None: + def __init__(self, client: AsyncGradient) -> None: self._client = client @cached_property @@ -920,9 +811,7 @@ def knowledge_bases( AsyncKnowledgeBasesResourceWithStreamingResponse, ) - return AsyncKnowledgeBasesResourceWithStreamingResponse( - self._client.knowledge_bases - ) + return AsyncKnowledgeBasesResourceWithStreamingResponse(self._client.knowledge_bases) @cached_property def models(self) -> models.AsyncModelsResourceWithStreamingResponse: @@ -943,6 +832,6 @@ def databases(self) -> databases.AsyncDatabasesResourceWithStreamingResponse: return AsyncDatabasesResourceWithStreamingResponse(self._client.databases) -Client = GradientAI +Client = Gradient -AsyncClient = AsyncGradientAI +AsyncClient = AsyncGradient diff --git a/src/do_gradientai/_compat.py b/src/gradient/_compat.py similarity index 100% rename from src/do_gradientai/_compat.py rename to src/gradient/_compat.py diff --git a/src/do_gradientai/_constants.py b/src/gradient/_constants.py similarity index 100% rename from src/do_gradientai/_constants.py rename to src/gradient/_constants.py diff --git a/src/do_gradientai/_exceptions.py b/src/gradient/_exceptions.py similarity index 97% rename from src/do_gradientai/_exceptions.py rename to src/gradient/_exceptions.py index 759c8d86..5db08573 100644 --- a/src/do_gradientai/_exceptions.py +++ b/src/gradient/_exceptions.py @@ -18,11 +18,11 @@ ] -class GradientAIError(Exception): +class GradientError(Exception): pass -class APIError(GradientAIError): +class APIError(GradientError): message: str request: httpx.Request diff --git a/src/do_gradientai/_files.py b/src/gradient/_files.py similarity index 92% rename from src/do_gradientai/_files.py rename to src/gradient/_files.py index 715cc207..cc14c14f 100644 --- a/src/do_gradientai/_files.py +++ b/src/gradient/_files.py @@ -69,12 +69,12 @@ def _transform_file(file: FileTypes) -> HttpxFileTypes: return file if is_tuple_t(file): - return (file[0], _read_file_content(file[1]), *file[2:]) + return (file[0], read_file_content(file[1]), *file[2:]) raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") -def _read_file_content(file: FileContent) -> HttpxFileContent: +def read_file_content(file: FileContent) -> HttpxFileContent: if isinstance(file, os.PathLike): return pathlib.Path(file).read_bytes() return file @@ -111,12 +111,12 @@ async def _async_transform_file(file: FileTypes) -> HttpxFileTypes: return file if is_tuple_t(file): - return (file[0], await _async_read_file_content(file[1]), *file[2:]) + return (file[0], await async_read_file_content(file[1]), *file[2:]) raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") -async def _async_read_file_content(file: FileContent) -> HttpxFileContent: +async def async_read_file_content(file: FileContent) -> HttpxFileContent: if isinstance(file, os.PathLike): return await anyio.Path(file).read_bytes() diff --git a/src/do_gradientai/_models.py b/src/gradient/_models.py similarity index 100% rename from src/do_gradientai/_models.py rename to src/gradient/_models.py diff --git a/src/do_gradientai/_qs.py b/src/gradient/_qs.py similarity index 100% rename from src/do_gradientai/_qs.py rename to src/gradient/_qs.py diff --git a/src/do_gradientai/_resource.py b/src/gradient/_resource.py similarity index 80% rename from src/do_gradientai/_resource.py rename to src/gradient/_resource.py index 9182ee0b..f2bb6c14 100644 --- a/src/do_gradientai/_resource.py +++ b/src/gradient/_resource.py @@ -8,13 +8,13 @@ import anyio if TYPE_CHECKING: - from ._client import GradientAI, AsyncGradientAI + from ._client import Gradient, AsyncGradient class SyncAPIResource: - _client: GradientAI + _client: Gradient - def __init__(self, client: GradientAI) -> None: + def __init__(self, client: Gradient) -> None: self._client = client self._get = client.get self._post = client.post @@ -28,9 +28,9 @@ def _sleep(self, seconds: float) -> None: class AsyncAPIResource: - _client: AsyncGradientAI + _client: AsyncGradient - def __init__(self, client: AsyncGradientAI) -> None: + def __init__(self, client: AsyncGradient) -> None: self._client = client self._get = client.get self._post = client.post diff --git a/src/do_gradientai/_response.py b/src/gradient/_response.py similarity index 98% rename from src/do_gradientai/_response.py rename to src/gradient/_response.py index 8ca43971..4702edaf 100644 --- a/src/do_gradientai/_response.py +++ b/src/gradient/_response.py @@ -29,7 +29,7 @@ from ._models import BaseModel, is_basemodel from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type -from ._exceptions import GradientAIError, APIResponseValidationError +from ._exceptions import GradientError, APIResponseValidationError if TYPE_CHECKING: from ._models import FinalRequestOptions @@ -217,9 +217,7 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T: and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel) ): - raise TypeError( - "Pydantic models must subclass our base model type, e.g. `from do_gradientai import BaseModel`" - ) + raise TypeError("Pydantic models must subclass our base model type, e.g. `from gradient import BaseModel`") if ( cast_to is not object @@ -285,7 +283,7 @@ def parse(self, *, to: type[_T] | None = None) -> R | _T: the `to` argument, e.g. ```py - from do_gradientai import BaseModel + from gradient import BaseModel class MyModel(BaseModel): @@ -387,7 +385,7 @@ async def parse(self, *, to: type[_T] | None = None) -> R | _T: the `to` argument, e.g. ```py - from do_gradientai import BaseModel + from gradient import BaseModel class MyModel(BaseModel): @@ -558,11 +556,11 @@ async def stream_to_file( class MissingStreamClassError(TypeError): def __init__(self) -> None: super().__init__( - "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `do_gradientai._streaming` for reference", + "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `gradient._streaming` for reference", ) -class StreamAlreadyConsumed(GradientAIError): +class StreamAlreadyConsumed(GradientError): """ Attempted to read or stream content, but the content has already been streamed. diff --git a/src/do_gradientai/_streaming.py b/src/gradient/_streaming.py similarity index 99% rename from src/do_gradientai/_streaming.py rename to src/gradient/_streaming.py index 69a805ad..eb9be89d 100644 --- a/src/do_gradientai/_streaming.py +++ b/src/gradient/_streaming.py @@ -13,7 +13,7 @@ from ._exceptions import APIError if TYPE_CHECKING: - from ._client import GradientAI, AsyncGradientAI + from ._client import Gradient, AsyncGradient _T = TypeVar("_T") @@ -31,7 +31,7 @@ def __init__( *, cast_to: type[_T], response: httpx.Response, - client: GradientAI, + client: Gradient, ) -> None: self.response = response self._cast_to = cast_to @@ -112,7 +112,7 @@ def __init__( *, cast_to: type[_T], response: httpx.Response, - client: AsyncGradientAI, + client: AsyncGradient, ) -> None: self.response = response self._cast_to = cast_to diff --git a/src/do_gradientai/_types.py b/src/gradient/_types.py similarity index 99% rename from src/do_gradientai/_types.py rename to src/gradient/_types.py index c356c700..b44bb2d9 100644 --- a/src/do_gradientai/_types.py +++ b/src/gradient/_types.py @@ -81,7 +81,7 @@ # This unfortunately means that you will either have # to import this type and pass it explicitly: # -# from do_gradientai import NoneType +# from gradient import NoneType # client.get('/foo', cast_to=NoneType) # # or build it yourself: diff --git a/src/do_gradientai/_utils/__init__.py b/src/gradient/_utils/__init__.py similarity index 100% rename from src/do_gradientai/_utils/__init__.py rename to src/gradient/_utils/__init__.py diff --git a/src/do_gradientai/_utils/_logs.py b/src/gradient/_utils/_logs.py similarity index 70% rename from src/do_gradientai/_utils/_logs.py rename to src/gradient/_utils/_logs.py index ac45b1a5..a60da7f9 100644 --- a/src/do_gradientai/_utils/_logs.py +++ b/src/gradient/_utils/_logs.py @@ -1,12 +1,12 @@ import os import logging -logger: logging.Logger = logging.getLogger("do_gradientai") +logger: logging.Logger = logging.getLogger("gradient") httpx_logger: logging.Logger = logging.getLogger("httpx") def _basic_config() -> None: - # e.g. [2023-10-05 14:12:26 - do_gradientai._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" + # e.g. [2023-10-05 14:12:26 - gradient._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" logging.basicConfig( format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S", @@ -14,7 +14,7 @@ def _basic_config() -> None: def setup_logging() -> None: - env = os.environ.get("GRADIENT_AI_LOG") + env = os.environ.get("GRADIENT_LOG") if env == "debug": _basic_config() logger.setLevel(logging.DEBUG) diff --git a/src/do_gradientai/_utils/_proxy.py b/src/gradient/_utils/_proxy.py similarity index 100% rename from src/do_gradientai/_utils/_proxy.py rename to src/gradient/_utils/_proxy.py diff --git a/src/do_gradientai/_utils/_reflection.py b/src/gradient/_utils/_reflection.py similarity index 100% rename from src/do_gradientai/_utils/_reflection.py rename to src/gradient/_utils/_reflection.py diff --git a/src/do_gradientai/_utils/_resources_proxy.py b/src/gradient/_utils/_resources_proxy.py similarity index 50% rename from src/do_gradientai/_utils/_resources_proxy.py rename to src/gradient/_utils/_resources_proxy.py index 03763c3b..bf3e570d 100644 --- a/src/do_gradientai/_utils/_resources_proxy.py +++ b/src/gradient/_utils/_resources_proxy.py @@ -7,17 +7,17 @@ class ResourcesProxy(LazyProxy[Any]): - """A proxy for the `do_gradientai.resources` module. + """A proxy for the `gradient.resources` module. - This is used so that we can lazily import `do_gradientai.resources` only when - needed *and* so that users can just import `do_gradientai` and reference `do_gradientai.resources` + This is used so that we can lazily import `gradient.resources` only when + needed *and* so that users can just import `gradient` and reference `gradient.resources` """ @override def __load__(self) -> Any: import importlib - mod = importlib.import_module("do_gradientai.resources") + mod = importlib.import_module("gradient.resources") return mod diff --git a/src/do_gradientai/_utils/_streams.py b/src/gradient/_utils/_streams.py similarity index 100% rename from src/do_gradientai/_utils/_streams.py rename to src/gradient/_utils/_streams.py diff --git a/src/do_gradientai/_utils/_sync.py b/src/gradient/_utils/_sync.py similarity index 100% rename from src/do_gradientai/_utils/_sync.py rename to src/gradient/_utils/_sync.py diff --git a/src/do_gradientai/_utils/_transform.py b/src/gradient/_utils/_transform.py similarity index 100% rename from src/do_gradientai/_utils/_transform.py rename to src/gradient/_utils/_transform.py diff --git a/src/do_gradientai/_utils/_typing.py b/src/gradient/_utils/_typing.py similarity index 100% rename from src/do_gradientai/_utils/_typing.py rename to src/gradient/_utils/_typing.py diff --git a/src/do_gradientai/_utils/_utils.py b/src/gradient/_utils/_utils.py similarity index 100% rename from src/do_gradientai/_utils/_utils.py rename to src/gradient/_utils/_utils.py diff --git a/src/do_gradientai/_version.py b/src/gradient/_version.py similarity index 50% rename from src/do_gradientai/_version.py rename to src/gradient/_version.py index a0ffe639..f0d3c820 100644 --- a/src/do_gradientai/_version.py +++ b/src/gradient/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -__title__ = "do_gradientai" -__version__ = "0.1.0-beta.4" # x-release-please-version +__title__ = "gradient" +__version__ = "3.0.0-beta.1" # x-release-please-version diff --git a/src/gradient/lib/.keep b/src/gradient/lib/.keep new file mode 100644 index 00000000..5e2c99fd --- /dev/null +++ b/src/gradient/lib/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store custom files to expand the SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/src/do_gradientai/py.typed b/src/gradient/py.typed similarity index 100% rename from src/do_gradientai/py.typed rename to src/gradient/py.typed diff --git a/src/do_gradientai/resources/__init__.py b/src/gradient/resources/__init__.py similarity index 100% rename from src/do_gradientai/resources/__init__.py rename to src/gradient/resources/__init__.py diff --git a/src/do_gradientai/resources/agents/__init__.py b/src/gradient/resources/agents/__init__.py similarity index 100% rename from src/do_gradientai/resources/agents/__init__.py rename to src/gradient/resources/agents/__init__.py diff --git a/src/do_gradientai/resources/agents/agents.py b/src/gradient/resources/agents/agents.py similarity index 99% rename from src/do_gradientai/resources/agents/agents.py rename to src/gradient/resources/agents/agents.py index 92d696ba..cff147c9 100644 --- a/src/do_gradientai/resources/agents/agents.py +++ b/src/gradient/resources/agents/agents.py @@ -164,7 +164,7 @@ def with_raw_response(self) -> AgentsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AgentsResourceWithRawResponse(self) @@ -173,7 +173,7 @@ def with_streaming_response(self) -> AgentsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AgentsResourceWithStreamingResponse(self) @@ -605,7 +605,7 @@ def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncAgentsResourceWithRawResponse(self) @@ -614,7 +614,7 @@ def with_streaming_response(self) -> AsyncAgentsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncAgentsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/api_keys.py b/src/gradient/resources/agents/api_keys.py similarity index 99% rename from src/do_gradientai/resources/agents/api_keys.py rename to src/gradient/resources/agents/api_keys.py index 9f4d9660..7e9feb51 100644 --- a/src/do_gradientai/resources/agents/api_keys.py +++ b/src/gradient/resources/agents/api_keys.py @@ -32,7 +32,7 @@ def with_raw_response(self) -> APIKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return APIKeysResourceWithRawResponse(self) @@ -41,7 +41,7 @@ def with_streaming_response(self) -> APIKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return APIKeysResourceWithStreamingResponse(self) @@ -288,7 +288,7 @@ def with_raw_response(self) -> AsyncAPIKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncAPIKeysResourceWithRawResponse(self) @@ -297,7 +297,7 @@ def with_streaming_response(self) -> AsyncAPIKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncAPIKeysResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/chat/__init__.py b/src/gradient/resources/agents/chat/__init__.py similarity index 100% rename from src/do_gradientai/resources/agents/chat/__init__.py rename to src/gradient/resources/agents/chat/__init__.py diff --git a/src/do_gradientai/resources/agents/chat/chat.py b/src/gradient/resources/agents/chat/chat.py similarity index 93% rename from src/do_gradientai/resources/agents/chat/chat.py rename to src/gradient/resources/agents/chat/chat.py index c87bd158..80947cfb 100644 --- a/src/do_gradientai/resources/agents/chat/chat.py +++ b/src/gradient/resources/agents/chat/chat.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> ChatResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ChatResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> ChatResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ChatResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncChatResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncChatResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncChatResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncChatResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/chat/completions.py b/src/gradient/resources/agents/chat/completions.py similarity index 99% rename from src/do_gradientai/resources/agents/chat/completions.py rename to src/gradient/resources/agents/chat/completions.py index 23b17011..67f5fc47 100644 --- a/src/do_gradientai/resources/agents/chat/completions.py +++ b/src/gradient/resources/agents/chat/completions.py @@ -33,7 +33,7 @@ def with_raw_response(self) -> CompletionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return CompletionsResourceWithRawResponse(self) @@ -42,7 +42,7 @@ def with_streaming_response(self) -> CompletionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return CompletionsResourceWithStreamingResponse(self) @@ -524,7 +524,7 @@ def with_raw_response(self) -> AsyncCompletionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncCompletionsResourceWithRawResponse(self) @@ -533,7 +533,7 @@ def with_streaming_response(self) -> AsyncCompletionsResourceWithStreamingRespon """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncCompletionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_datasets.py b/src/gradient/resources/agents/evaluation_datasets.py similarity index 98% rename from src/do_gradientai/resources/agents/evaluation_datasets.py rename to src/gradient/resources/agents/evaluation_datasets.py index 42eca703..d8e960de 100644 --- a/src/do_gradientai/resources/agents/evaluation_datasets.py +++ b/src/gradient/resources/agents/evaluation_datasets.py @@ -37,7 +37,7 @@ def with_raw_response(self) -> EvaluationDatasetsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return EvaluationDatasetsResourceWithRawResponse(self) @@ -46,7 +46,7 @@ def with_streaming_response(self) -> EvaluationDatasetsResourceWithStreamingResp """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return EvaluationDatasetsResourceWithStreamingResponse(self) @@ -144,7 +144,7 @@ def with_raw_response(self) -> AsyncEvaluationDatasetsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncEvaluationDatasetsResourceWithRawResponse(self) @@ -153,7 +153,7 @@ def with_streaming_response(self) -> AsyncEvaluationDatasetsResourceWithStreamin """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncEvaluationDatasetsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/__init__.py b/src/gradient/resources/agents/evaluation_metrics/__init__.py similarity index 100% rename from src/do_gradientai/resources/agents/evaluation_metrics/__init__.py rename to src/gradient/resources/agents/evaluation_metrics/__init__.py diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/anthropic/__init__.py b/src/gradient/resources/agents/evaluation_metrics/anthropic/__init__.py similarity index 100% rename from src/do_gradientai/resources/agents/evaluation_metrics/anthropic/__init__.py rename to src/gradient/resources/agents/evaluation_metrics/anthropic/__init__.py diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/anthropic/anthropic.py b/src/gradient/resources/agents/evaluation_metrics/anthropic/anthropic.py similarity index 93% rename from src/do_gradientai/resources/agents/evaluation_metrics/anthropic/anthropic.py rename to src/gradient/resources/agents/evaluation_metrics/anthropic/anthropic.py index 1532f98e..0079d59b 100644 --- a/src/do_gradientai/resources/agents/evaluation_metrics/anthropic/anthropic.py +++ b/src/gradient/resources/agents/evaluation_metrics/anthropic/anthropic.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> AnthropicResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AnthropicResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> AnthropicResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AnthropicResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncAnthropicResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncAnthropicResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncAnthropicResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncAnthropicResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/anthropic/keys.py b/src/gradient/resources/agents/evaluation_metrics/anthropic/keys.py similarity index 99% rename from src/do_gradientai/resources/agents/evaluation_metrics/anthropic/keys.py rename to src/gradient/resources/agents/evaluation_metrics/anthropic/keys.py index 959e786b..6111bf6f 100644 --- a/src/do_gradientai/resources/agents/evaluation_metrics/anthropic/keys.py +++ b/src/gradient/resources/agents/evaluation_metrics/anthropic/keys.py @@ -38,7 +38,7 @@ def with_raw_response(self) -> KeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return KeysResourceWithRawResponse(self) @@ -47,7 +47,7 @@ def with_streaming_response(self) -> KeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return KeysResourceWithStreamingResponse(self) @@ -330,7 +330,7 @@ def with_raw_response(self) -> AsyncKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncKeysResourceWithRawResponse(self) @@ -339,7 +339,7 @@ def with_streaming_response(self) -> AsyncKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncKeysResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/evaluation_metrics.py b/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py similarity index 98% rename from src/do_gradientai/resources/agents/evaluation_metrics/evaluation_metrics.py rename to src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py index 533a68bd..f6453d4d 100644 --- a/src/do_gradientai/resources/agents/evaluation_metrics/evaluation_metrics.py +++ b/src/gradient/resources/agents/evaluation_metrics/evaluation_metrics.py @@ -77,7 +77,7 @@ def with_raw_response(self) -> EvaluationMetricsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return EvaluationMetricsResourceWithRawResponse(self) @@ -86,7 +86,7 @@ def with_streaming_response(self) -> EvaluationMetricsResourceWithStreamingRespo """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return EvaluationMetricsResourceWithStreamingResponse(self) @@ -186,7 +186,7 @@ def with_raw_response(self) -> AsyncEvaluationMetricsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncEvaluationMetricsResourceWithRawResponse(self) @@ -195,7 +195,7 @@ def with_streaming_response(self) -> AsyncEvaluationMetricsResourceWithStreaming """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncEvaluationMetricsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/models.py b/src/gradient/resources/agents/evaluation_metrics/models.py similarity index 97% rename from src/do_gradientai/resources/agents/evaluation_metrics/models.py rename to src/gradient/resources/agents/evaluation_metrics/models.py index 20a44a22..1902a4f0 100644 --- a/src/do_gradientai/resources/agents/evaluation_metrics/models.py +++ b/src/gradient/resources/agents/evaluation_metrics/models.py @@ -31,7 +31,7 @@ def with_raw_response(self) -> ModelsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ModelsResourceWithRawResponse(self) @@ -40,7 +40,7 @@ def with_streaming_response(self) -> ModelsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ModelsResourceWithStreamingResponse(self) @@ -128,7 +128,7 @@ def with_raw_response(self) -> AsyncModelsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncModelsResourceWithRawResponse(self) @@ -137,7 +137,7 @@ def with_streaming_response(self) -> AsyncModelsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncModelsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/openai/__init__.py b/src/gradient/resources/agents/evaluation_metrics/openai/__init__.py similarity index 100% rename from src/do_gradientai/resources/agents/evaluation_metrics/openai/__init__.py rename to src/gradient/resources/agents/evaluation_metrics/openai/__init__.py diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/openai/keys.py b/src/gradient/resources/agents/evaluation_metrics/openai/keys.py similarity index 99% rename from src/do_gradientai/resources/agents/evaluation_metrics/openai/keys.py rename to src/gradient/resources/agents/evaluation_metrics/openai/keys.py index 33a71ae1..00131691 100644 --- a/src/do_gradientai/resources/agents/evaluation_metrics/openai/keys.py +++ b/src/gradient/resources/agents/evaluation_metrics/openai/keys.py @@ -38,7 +38,7 @@ def with_raw_response(self) -> KeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return KeysResourceWithRawResponse(self) @@ -47,7 +47,7 @@ def with_streaming_response(self) -> KeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return KeysResourceWithStreamingResponse(self) @@ -328,7 +328,7 @@ def with_raw_response(self) -> AsyncKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncKeysResourceWithRawResponse(self) @@ -337,7 +337,7 @@ def with_streaming_response(self) -> AsyncKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncKeysResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/openai/openai.py b/src/gradient/resources/agents/evaluation_metrics/openai/openai.py similarity index 93% rename from src/do_gradientai/resources/agents/evaluation_metrics/openai/openai.py rename to src/gradient/resources/agents/evaluation_metrics/openai/openai.py index d66dbbde..00fd8a7d 100644 --- a/src/do_gradientai/resources/agents/evaluation_metrics/openai/openai.py +++ b/src/gradient/resources/agents/evaluation_metrics/openai/openai.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> OpenAIResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return OpenAIResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> OpenAIResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return OpenAIResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncOpenAIResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncOpenAIResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncOpenAIResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncOpenAIResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/workspaces/__init__.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/__init__.py similarity index 100% rename from src/do_gradientai/resources/agents/evaluation_metrics/workspaces/__init__.py rename to src/gradient/resources/agents/evaluation_metrics/workspaces/__init__.py diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/workspaces/agents.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py similarity index 98% rename from src/do_gradientai/resources/agents/evaluation_metrics/workspaces/agents.py rename to src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py index a5e68a45..1a73bc60 100644 --- a/src/do_gradientai/resources/agents/evaluation_metrics/workspaces/agents.py +++ b/src/gradient/resources/agents/evaluation_metrics/workspaces/agents.py @@ -31,7 +31,7 @@ def with_raw_response(self) -> AgentsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AgentsResourceWithRawResponse(self) @@ -40,7 +40,7 @@ def with_streaming_response(self) -> AgentsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AgentsResourceWithStreamingResponse(self) @@ -159,7 +159,7 @@ def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncAgentsResourceWithRawResponse(self) @@ -168,7 +168,7 @@ def with_streaming_response(self) -> AsyncAgentsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncAgentsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_metrics/workspaces/workspaces.py b/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py similarity index 99% rename from src/do_gradientai/resources/agents/evaluation_metrics/workspaces/workspaces.py rename to src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py index cb213e1d..a2cf5ebc 100644 --- a/src/do_gradientai/resources/agents/evaluation_metrics/workspaces/workspaces.py +++ b/src/gradient/resources/agents/evaluation_metrics/workspaces/workspaces.py @@ -49,7 +49,7 @@ def with_raw_response(self) -> WorkspacesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return WorkspacesResourceWithRawResponse(self) @@ -58,7 +58,7 @@ def with_streaming_response(self) -> WorkspacesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return WorkspacesResourceWithStreamingResponse(self) @@ -311,7 +311,7 @@ def with_raw_response(self) -> AsyncWorkspacesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncWorkspacesResourceWithRawResponse(self) @@ -320,7 +320,7 @@ def with_streaming_response(self) -> AsyncWorkspacesResourceWithStreamingRespons """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncWorkspacesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_runs.py b/src/gradient/resources/agents/evaluation_runs.py similarity index 98% rename from src/do_gradientai/resources/agents/evaluation_runs.py rename to src/gradient/resources/agents/evaluation_runs.py index c5ea2520..e55cc275 100644 --- a/src/do_gradientai/resources/agents/evaluation_runs.py +++ b/src/gradient/resources/agents/evaluation_runs.py @@ -33,7 +33,7 @@ def with_raw_response(self) -> EvaluationRunsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return EvaluationRunsResourceWithRawResponse(self) @@ -42,7 +42,7 @@ def with_streaming_response(self) -> EvaluationRunsResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return EvaluationRunsResourceWithStreamingResponse(self) @@ -235,7 +235,7 @@ def with_raw_response(self) -> AsyncEvaluationRunsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncEvaluationRunsResourceWithRawResponse(self) @@ -244,7 +244,7 @@ def with_streaming_response(self) -> AsyncEvaluationRunsResourceWithStreamingRes """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncEvaluationRunsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/evaluation_test_cases.py b/src/gradient/resources/agents/evaluation_test_cases.py similarity index 99% rename from src/do_gradientai/resources/agents/evaluation_test_cases.py rename to src/gradient/resources/agents/evaluation_test_cases.py index e33f9f91..454576c8 100644 --- a/src/do_gradientai/resources/agents/evaluation_test_cases.py +++ b/src/gradient/resources/agents/evaluation_test_cases.py @@ -42,7 +42,7 @@ def with_raw_response(self) -> EvaluationTestCasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return EvaluationTestCasesResourceWithRawResponse(self) @@ -51,7 +51,7 @@ def with_streaming_response(self) -> EvaluationTestCasesResourceWithStreamingRes """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return EvaluationTestCasesResourceWithStreamingResponse(self) @@ -304,7 +304,7 @@ def with_raw_response(self) -> AsyncEvaluationTestCasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncEvaluationTestCasesResourceWithRawResponse(self) @@ -313,7 +313,7 @@ def with_streaming_response(self) -> AsyncEvaluationTestCasesResourceWithStreami """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncEvaluationTestCasesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/functions.py b/src/gradient/resources/agents/functions.py similarity index 98% rename from src/do_gradientai/resources/agents/functions.py rename to src/gradient/resources/agents/functions.py index 1c5b2015..7986f750 100644 --- a/src/do_gradientai/resources/agents/functions.py +++ b/src/gradient/resources/agents/functions.py @@ -30,7 +30,7 @@ def with_raw_response(self) -> FunctionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return FunctionsResourceWithRawResponse(self) @@ -39,7 +39,7 @@ def with_streaming_response(self) -> FunctionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return FunctionsResourceWithStreamingResponse(self) @@ -235,7 +235,7 @@ def with_raw_response(self) -> AsyncFunctionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncFunctionsResourceWithRawResponse(self) @@ -244,7 +244,7 @@ def with_streaming_response(self) -> AsyncFunctionsResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncFunctionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/knowledge_bases.py b/src/gradient/resources/agents/knowledge_bases.py similarity index 98% rename from src/do_gradientai/resources/agents/knowledge_bases.py rename to src/gradient/resources/agents/knowledge_bases.py index a5486c34..1664ee84 100644 --- a/src/do_gradientai/resources/agents/knowledge_bases.py +++ b/src/gradient/resources/agents/knowledge_bases.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> KnowledgeBasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return KnowledgeBasesResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> KnowledgeBasesResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return KnowledgeBasesResourceWithStreamingResponse(self) @@ -166,7 +166,7 @@ def with_raw_response(self) -> AsyncKnowledgeBasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncKnowledgeBasesResourceWithRawResponse(self) @@ -175,7 +175,7 @@ def with_streaming_response(self) -> AsyncKnowledgeBasesResourceWithStreamingRes """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncKnowledgeBasesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/routes.py b/src/gradient/resources/agents/routes.py similarity index 99% rename from src/do_gradientai/resources/agents/routes.py rename to src/gradient/resources/agents/routes.py index a7a298f2..1007b08f 100644 --- a/src/do_gradientai/resources/agents/routes.py +++ b/src/gradient/resources/agents/routes.py @@ -31,7 +31,7 @@ def with_raw_response(self) -> RoutesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return RoutesResourceWithRawResponse(self) @@ -40,7 +40,7 @@ def with_streaming_response(self) -> RoutesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return RoutesResourceWithStreamingResponse(self) @@ -257,7 +257,7 @@ def with_raw_response(self) -> AsyncRoutesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncRoutesResourceWithRawResponse(self) @@ -266,7 +266,7 @@ def with_streaming_response(self) -> AsyncRoutesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncRoutesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/agents/versions.py b/src/gradient/resources/agents/versions.py similarity index 98% rename from src/do_gradientai/resources/agents/versions.py rename to src/gradient/resources/agents/versions.py index 77eabea9..bc56e032 100644 --- a/src/do_gradientai/resources/agents/versions.py +++ b/src/gradient/resources/agents/versions.py @@ -29,7 +29,7 @@ def with_raw_response(self) -> VersionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return VersionsResourceWithRawResponse(self) @@ -38,7 +38,7 @@ def with_streaming_response(self) -> VersionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return VersionsResourceWithStreamingResponse(self) @@ -151,7 +151,7 @@ def with_raw_response(self) -> AsyncVersionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncVersionsResourceWithRawResponse(self) @@ -160,7 +160,7 @@ def with_streaming_response(self) -> AsyncVersionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncVersionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/chat/__init__.py b/src/gradient/resources/chat/__init__.py similarity index 100% rename from src/do_gradientai/resources/chat/__init__.py rename to src/gradient/resources/chat/__init__.py diff --git a/src/do_gradientai/resources/chat/chat.py b/src/gradient/resources/chat/chat.py similarity index 93% rename from src/do_gradientai/resources/chat/chat.py rename to src/gradient/resources/chat/chat.py index 6fa2925d..ac933129 100644 --- a/src/do_gradientai/resources/chat/chat.py +++ b/src/gradient/resources/chat/chat.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> ChatResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ChatResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> ChatResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ChatResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncChatResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncChatResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncChatResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncChatResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/chat/completions.py b/src/gradient/resources/chat/completions.py similarity index 99% rename from src/do_gradientai/resources/chat/completions.py rename to src/gradient/resources/chat/completions.py index ff5c25b8..b1147020 100644 --- a/src/do_gradientai/resources/chat/completions.py +++ b/src/gradient/resources/chat/completions.py @@ -33,7 +33,7 @@ def with_raw_response(self) -> CompletionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return CompletionsResourceWithRawResponse(self) @@ -42,7 +42,7 @@ def with_streaming_response(self) -> CompletionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return CompletionsResourceWithStreamingResponse(self) @@ -515,7 +515,7 @@ def with_raw_response(self) -> AsyncCompletionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncCompletionsResourceWithRawResponse(self) @@ -524,7 +524,7 @@ def with_streaming_response(self) -> AsyncCompletionsResourceWithStreamingRespon """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncCompletionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/databases/__init__.py b/src/gradient/resources/databases/__init__.py similarity index 100% rename from src/do_gradientai/resources/databases/__init__.py rename to src/gradient/resources/databases/__init__.py diff --git a/src/do_gradientai/resources/databases/databases.py b/src/gradient/resources/databases/databases.py similarity index 94% rename from src/do_gradientai/resources/databases/databases.py rename to src/gradient/resources/databases/databases.py index e1f990d5..120ab91f 100644 --- a/src/do_gradientai/resources/databases/databases.py +++ b/src/gradient/resources/databases/databases.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> DatabasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return DatabasesResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> DatabasesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return DatabasesResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncDatabasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncDatabasesResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncDatabasesResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncDatabasesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/databases/schema_registry/__init__.py b/src/gradient/resources/databases/schema_registry/__init__.py similarity index 100% rename from src/do_gradientai/resources/databases/schema_registry/__init__.py rename to src/gradient/resources/databases/schema_registry/__init__.py diff --git a/src/do_gradientai/resources/databases/schema_registry/config.py b/src/gradient/resources/databases/schema_registry/config.py similarity index 99% rename from src/do_gradientai/resources/databases/schema_registry/config.py rename to src/gradient/resources/databases/schema_registry/config.py index a815b84e..f9c0d8d0 100644 --- a/src/do_gradientai/resources/databases/schema_registry/config.py +++ b/src/gradient/resources/databases/schema_registry/config.py @@ -33,7 +33,7 @@ def with_raw_response(self) -> ConfigResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ConfigResourceWithRawResponse(self) @@ -42,7 +42,7 @@ def with_streaming_response(self) -> ConfigResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ConfigResourceWithStreamingResponse(self) @@ -236,7 +236,7 @@ def with_raw_response(self) -> AsyncConfigResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncConfigResourceWithRawResponse(self) @@ -245,7 +245,7 @@ def with_streaming_response(self) -> AsyncConfigResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncConfigResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/databases/schema_registry/schema_registry.py b/src/gradient/resources/databases/schema_registry/schema_registry.py similarity index 94% rename from src/do_gradientai/resources/databases/schema_registry/schema_registry.py rename to src/gradient/resources/databases/schema_registry/schema_registry.py index 6a0a44fb..dd7d3dbe 100644 --- a/src/do_gradientai/resources/databases/schema_registry/schema_registry.py +++ b/src/gradient/resources/databases/schema_registry/schema_registry.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> SchemaRegistryResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return SchemaRegistryResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> SchemaRegistryResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return SchemaRegistryResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncSchemaRegistryResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncSchemaRegistryResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncSchemaRegistryResourceWithStreamingRes """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncSchemaRegistryResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/__init__.py b/src/gradient/resources/gpu_droplets/__init__.py similarity index 100% rename from src/do_gradientai/resources/gpu_droplets/__init__.py rename to src/gradient/resources/gpu_droplets/__init__.py diff --git a/src/do_gradientai/resources/gpu_droplets/account/__init__.py b/src/gradient/resources/gpu_droplets/account/__init__.py similarity index 100% rename from src/do_gradientai/resources/gpu_droplets/account/__init__.py rename to src/gradient/resources/gpu_droplets/account/__init__.py diff --git a/src/do_gradientai/resources/gpu_droplets/account/account.py b/src/gradient/resources/gpu_droplets/account/account.py similarity index 93% rename from src/do_gradientai/resources/gpu_droplets/account/account.py rename to src/gradient/resources/gpu_droplets/account/account.py index d61fb68b..5bcaf269 100644 --- a/src/do_gradientai/resources/gpu_droplets/account/account.py +++ b/src/gradient/resources/gpu_droplets/account/account.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> AccountResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AccountResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> AccountResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AccountResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncAccountResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncAccountResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncAccountResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncAccountResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/account/keys.py b/src/gradient/resources/gpu_droplets/account/keys.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/account/keys.py rename to src/gradient/resources/gpu_droplets/account/keys.py index 66d3bd55..f5cd4120 100644 --- a/src/do_gradientai/resources/gpu_droplets/account/keys.py +++ b/src/gradient/resources/gpu_droplets/account/keys.py @@ -33,7 +33,7 @@ def with_raw_response(self) -> KeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return KeysResourceWithRawResponse(self) @@ -42,7 +42,7 @@ def with_streaming_response(self) -> KeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return KeysResourceWithStreamingResponse(self) @@ -272,7 +272,7 @@ def with_raw_response(self) -> AsyncKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncKeysResourceWithRawResponse(self) @@ -281,7 +281,7 @@ def with_streaming_response(self) -> AsyncKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncKeysResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/actions.py b/src/gradient/resources/gpu_droplets/actions.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/actions.py rename to src/gradient/resources/gpu_droplets/actions.py index 197b2ce7..715fb076 100644 --- a/src/do_gradientai/resources/gpu_droplets/actions.py +++ b/src/gradient/resources/gpu_droplets/actions.py @@ -35,7 +35,7 @@ def with_raw_response(self) -> ActionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ActionsResourceWithRawResponse(self) @@ -44,7 +44,7 @@ def with_streaming_response(self) -> ActionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ActionsResourceWithStreamingResponse(self) @@ -1008,7 +1008,7 @@ def with_raw_response(self) -> AsyncActionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncActionsResourceWithRawResponse(self) @@ -1017,7 +1017,7 @@ def with_streaming_response(self) -> AsyncActionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncActionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/autoscale.py b/src/gradient/resources/gpu_droplets/autoscale.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/autoscale.py rename to src/gradient/resources/gpu_droplets/autoscale.py index a1a72430..342256f6 100644 --- a/src/do_gradientai/resources/gpu_droplets/autoscale.py +++ b/src/gradient/resources/gpu_droplets/autoscale.py @@ -40,7 +40,7 @@ def with_raw_response(self) -> AutoscaleResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AutoscaleResourceWithRawResponse(self) @@ -49,7 +49,7 @@ def with_streaming_response(self) -> AutoscaleResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AutoscaleResourceWithStreamingResponse(self) @@ -447,7 +447,7 @@ def with_raw_response(self) -> AsyncAutoscaleResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncAutoscaleResourceWithRawResponse(self) @@ -456,7 +456,7 @@ def with_streaming_response(self) -> AsyncAutoscaleResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncAutoscaleResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/backups.py b/src/gradient/resources/gpu_droplets/backups.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/backups.py rename to src/gradient/resources/gpu_droplets/backups.py index 06fca19e..9f20a047 100644 --- a/src/do_gradientai/resources/gpu_droplets/backups.py +++ b/src/gradient/resources/gpu_droplets/backups.py @@ -31,7 +31,7 @@ def with_raw_response(self) -> BackupsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return BackupsResourceWithRawResponse(self) @@ -40,7 +40,7 @@ def with_streaming_response(self) -> BackupsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return BackupsResourceWithStreamingResponse(self) @@ -213,7 +213,7 @@ def with_raw_response(self) -> AsyncBackupsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncBackupsResourceWithRawResponse(self) @@ -222,7 +222,7 @@ def with_streaming_response(self) -> AsyncBackupsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncBackupsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/destroy_with_associated_resources.py b/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/destroy_with_associated_resources.py rename to src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py index 46db6563..2f3b90cf 100644 --- a/src/do_gradientai/resources/gpu_droplets/destroy_with_associated_resources.py +++ b/src/gradient/resources/gpu_droplets/destroy_with_associated_resources.py @@ -35,7 +35,7 @@ def with_raw_response(self) -> DestroyWithAssociatedResourcesResourceWithRawResp This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return DestroyWithAssociatedResourcesResourceWithRawResponse(self) @@ -44,7 +44,7 @@ def with_streaming_response(self) -> DestroyWithAssociatedResourcesResourceWithS """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return DestroyWithAssociatedResourcesResourceWithStreamingResponse(self) @@ -291,7 +291,7 @@ def with_raw_response(self) -> AsyncDestroyWithAssociatedResourcesResourceWithRa This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncDestroyWithAssociatedResourcesResourceWithRawResponse(self) @@ -300,7 +300,7 @@ def with_streaming_response(self) -> AsyncDestroyWithAssociatedResourcesResource """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/firewalls/__init__.py b/src/gradient/resources/gpu_droplets/firewalls/__init__.py similarity index 100% rename from src/do_gradientai/resources/gpu_droplets/firewalls/__init__.py rename to src/gradient/resources/gpu_droplets/firewalls/__init__.py diff --git a/src/do_gradientai/resources/gpu_droplets/firewalls/droplets.py b/src/gradient/resources/gpu_droplets/firewalls/droplets.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/firewalls/droplets.py rename to src/gradient/resources/gpu_droplets/firewalls/droplets.py index 025d1ba4..b25aa3e3 100644 --- a/src/do_gradientai/resources/gpu_droplets/firewalls/droplets.py +++ b/src/gradient/resources/gpu_droplets/firewalls/droplets.py @@ -29,7 +29,7 @@ def with_raw_response(self) -> DropletsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return DropletsResourceWithRawResponse(self) @@ -38,7 +38,7 @@ def with_streaming_response(self) -> DropletsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return DropletsResourceWithStreamingResponse(self) @@ -142,7 +142,7 @@ def with_raw_response(self) -> AsyncDropletsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncDropletsResourceWithRawResponse(self) @@ -151,7 +151,7 @@ def with_streaming_response(self) -> AsyncDropletsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncDropletsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/firewalls/firewalls.py b/src/gradient/resources/gpu_droplets/firewalls/firewalls.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/firewalls/firewalls.py rename to src/gradient/resources/gpu_droplets/firewalls/firewalls.py index a6c21928..116cde8d 100644 --- a/src/do_gradientai/resources/gpu_droplets/firewalls/firewalls.py +++ b/src/gradient/resources/gpu_droplets/firewalls/firewalls.py @@ -68,7 +68,7 @@ def with_raw_response(self) -> FirewallsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return FirewallsResourceWithRawResponse(self) @@ -77,7 +77,7 @@ def with_streaming_response(self) -> FirewallsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return FirewallsResourceWithStreamingResponse(self) @@ -301,7 +301,7 @@ def with_raw_response(self) -> AsyncFirewallsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncFirewallsResourceWithRawResponse(self) @@ -310,7 +310,7 @@ def with_streaming_response(self) -> AsyncFirewallsResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncFirewallsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/firewalls/rules.py b/src/gradient/resources/gpu_droplets/firewalls/rules.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/firewalls/rules.py rename to src/gradient/resources/gpu_droplets/firewalls/rules.py index 61026779..d3a77cd9 100644 --- a/src/do_gradientai/resources/gpu_droplets/firewalls/rules.py +++ b/src/gradient/resources/gpu_droplets/firewalls/rules.py @@ -29,7 +29,7 @@ def with_raw_response(self) -> RulesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return RulesResourceWithRawResponse(self) @@ -38,7 +38,7 @@ def with_streaming_response(self) -> RulesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return RulesResourceWithStreamingResponse(self) @@ -154,7 +154,7 @@ def with_raw_response(self) -> AsyncRulesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncRulesResourceWithRawResponse(self) @@ -163,7 +163,7 @@ def with_streaming_response(self) -> AsyncRulesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncRulesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/firewalls/tags.py b/src/gradient/resources/gpu_droplets/firewalls/tags.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/firewalls/tags.py rename to src/gradient/resources/gpu_droplets/firewalls/tags.py index 725bc014..dc66c72f 100644 --- a/src/do_gradientai/resources/gpu_droplets/firewalls/tags.py +++ b/src/gradient/resources/gpu_droplets/firewalls/tags.py @@ -29,7 +29,7 @@ def with_raw_response(self) -> TagsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return TagsResourceWithRawResponse(self) @@ -38,7 +38,7 @@ def with_streaming_response(self) -> TagsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return TagsResourceWithStreamingResponse(self) @@ -148,7 +148,7 @@ def with_raw_response(self) -> AsyncTagsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncTagsResourceWithRawResponse(self) @@ -157,7 +157,7 @@ def with_streaming_response(self) -> AsyncTagsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncTagsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/floating_ips/__init__.py b/src/gradient/resources/gpu_droplets/floating_ips/__init__.py similarity index 100% rename from src/do_gradientai/resources/gpu_droplets/floating_ips/__init__.py rename to src/gradient/resources/gpu_droplets/floating_ips/__init__.py diff --git a/src/do_gradientai/resources/gpu_droplets/floating_ips/actions.py b/src/gradient/resources/gpu_droplets/floating_ips/actions.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/floating_ips/actions.py rename to src/gradient/resources/gpu_droplets/floating_ips/actions.py index 7ba3899d..ecf88993 100644 --- a/src/do_gradientai/resources/gpu_droplets/floating_ips/actions.py +++ b/src/gradient/resources/gpu_droplets/floating_ips/actions.py @@ -32,7 +32,7 @@ def with_raw_response(self) -> ActionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ActionsResourceWithRawResponse(self) @@ -41,7 +41,7 @@ def with_streaming_response(self) -> ActionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ActionsResourceWithStreamingResponse(self) @@ -234,7 +234,7 @@ def with_raw_response(self) -> AsyncActionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncActionsResourceWithRawResponse(self) @@ -243,7 +243,7 @@ def with_streaming_response(self) -> AsyncActionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncActionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/floating_ips/floating_ips.py b/src/gradient/resources/gpu_droplets/floating_ips/floating_ips.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/floating_ips/floating_ips.py rename to src/gradient/resources/gpu_droplets/floating_ips/floating_ips.py index cabe012e..f70f153f 100644 --- a/src/do_gradientai/resources/gpu_droplets/floating_ips/floating_ips.py +++ b/src/gradient/resources/gpu_droplets/floating_ips/floating_ips.py @@ -44,7 +44,7 @@ def with_raw_response(self) -> FloatingIPsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return FloatingIPsResourceWithRawResponse(self) @@ -53,7 +53,7 @@ def with_streaming_response(self) -> FloatingIPsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return FloatingIPsResourceWithStreamingResponse(self) @@ -301,7 +301,7 @@ def with_raw_response(self) -> AsyncFloatingIPsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncFloatingIPsResourceWithRawResponse(self) @@ -310,7 +310,7 @@ def with_streaming_response(self) -> AsyncFloatingIPsResourceWithStreamingRespon """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncFloatingIPsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/gpu_droplets.py b/src/gradient/resources/gpu_droplets/gpu_droplets.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/gpu_droplets.py rename to src/gradient/resources/gpu_droplets/gpu_droplets.py index cbb07830..0ce55ba8 100644 --- a/src/do_gradientai/resources/gpu_droplets/gpu_droplets.py +++ b/src/gradient/resources/gpu_droplets/gpu_droplets.py @@ -189,7 +189,7 @@ def with_raw_response(self) -> GPUDropletsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return GPUDropletsResourceWithRawResponse(self) @@ -198,7 +198,7 @@ def with_streaming_response(self) -> GPUDropletsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return GPUDropletsResourceWithStreamingResponse(self) @@ -960,7 +960,7 @@ def with_raw_response(self) -> AsyncGPUDropletsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncGPUDropletsResourceWithRawResponse(self) @@ -969,7 +969,7 @@ def with_streaming_response(self) -> AsyncGPUDropletsResourceWithStreamingRespon """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncGPUDropletsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/images/__init__.py b/src/gradient/resources/gpu_droplets/images/__init__.py similarity index 100% rename from src/do_gradientai/resources/gpu_droplets/images/__init__.py rename to src/gradient/resources/gpu_droplets/images/__init__.py diff --git a/src/do_gradientai/resources/gpu_droplets/images/actions.py b/src/gradient/resources/gpu_droplets/images/actions.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/images/actions.py rename to src/gradient/resources/gpu_droplets/images/actions.py index 9428418b..287558ca 100644 --- a/src/do_gradientai/resources/gpu_droplets/images/actions.py +++ b/src/gradient/resources/gpu_droplets/images/actions.py @@ -31,7 +31,7 @@ def with_raw_response(self) -> ActionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ActionsResourceWithRawResponse(self) @@ -40,7 +40,7 @@ def with_streaming_response(self) -> ActionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ActionsResourceWithStreamingResponse(self) @@ -269,7 +269,7 @@ def with_raw_response(self) -> AsyncActionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncActionsResourceWithRawResponse(self) @@ -278,7 +278,7 @@ def with_streaming_response(self) -> AsyncActionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncActionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/images/images.py b/src/gradient/resources/gpu_droplets/images/images.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/images/images.py rename to src/gradient/resources/gpu_droplets/images/images.py index 2c70e793..09994263 100644 --- a/src/do_gradientai/resources/gpu_droplets/images/images.py +++ b/src/gradient/resources/gpu_droplets/images/images.py @@ -46,7 +46,7 @@ def with_raw_response(self) -> ImagesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ImagesResourceWithRawResponse(self) @@ -55,7 +55,7 @@ def with_streaming_response(self) -> ImagesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ImagesResourceWithStreamingResponse(self) @@ -412,7 +412,7 @@ def with_raw_response(self) -> AsyncImagesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncImagesResourceWithRawResponse(self) @@ -421,7 +421,7 @@ def with_streaming_response(self) -> AsyncImagesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncImagesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/load_balancers/__init__.py b/src/gradient/resources/gpu_droplets/load_balancers/__init__.py similarity index 100% rename from src/do_gradientai/resources/gpu_droplets/load_balancers/__init__.py rename to src/gradient/resources/gpu_droplets/load_balancers/__init__.py diff --git a/src/do_gradientai/resources/gpu_droplets/load_balancers/droplets.py b/src/gradient/resources/gpu_droplets/load_balancers/droplets.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/load_balancers/droplets.py rename to src/gradient/resources/gpu_droplets/load_balancers/droplets.py index 2553a729..4d8eb4c5 100644 --- a/src/do_gradientai/resources/gpu_droplets/load_balancers/droplets.py +++ b/src/gradient/resources/gpu_droplets/load_balancers/droplets.py @@ -29,7 +29,7 @@ def with_raw_response(self) -> DropletsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return DropletsResourceWithRawResponse(self) @@ -38,7 +38,7 @@ def with_streaming_response(self) -> DropletsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return DropletsResourceWithStreamingResponse(self) @@ -145,7 +145,7 @@ def with_raw_response(self) -> AsyncDropletsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncDropletsResourceWithRawResponse(self) @@ -154,7 +154,7 @@ def with_streaming_response(self) -> AsyncDropletsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncDropletsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/load_balancers/forwarding_rules.py b/src/gradient/resources/gpu_droplets/load_balancers/forwarding_rules.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/load_balancers/forwarding_rules.py rename to src/gradient/resources/gpu_droplets/load_balancers/forwarding_rules.py index 2ba20f88..c4be65e7 100644 --- a/src/do_gradientai/resources/gpu_droplets/load_balancers/forwarding_rules.py +++ b/src/gradient/resources/gpu_droplets/load_balancers/forwarding_rules.py @@ -30,7 +30,7 @@ def with_raw_response(self) -> ForwardingRulesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ForwardingRulesResourceWithRawResponse(self) @@ -39,7 +39,7 @@ def with_streaming_response(self) -> ForwardingRulesResourceWithStreamingRespons """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ForwardingRulesResourceWithStreamingResponse(self) @@ -145,7 +145,7 @@ def with_raw_response(self) -> AsyncForwardingRulesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncForwardingRulesResourceWithRawResponse(self) @@ -154,7 +154,7 @@ def with_streaming_response(self) -> AsyncForwardingRulesResourceWithStreamingRe """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncForwardingRulesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/load_balancers/load_balancers.py b/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/load_balancers/load_balancers.py rename to src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py index c724b6d9..d876b50f 100644 --- a/src/do_gradientai/resources/gpu_droplets/load_balancers/load_balancers.py +++ b/src/gradient/resources/gpu_droplets/load_balancers/load_balancers.py @@ -68,7 +68,7 @@ def with_raw_response(self) -> LoadBalancersResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return LoadBalancersResourceWithRawResponse(self) @@ -77,7 +77,7 @@ def with_streaming_response(self) -> LoadBalancersResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return LoadBalancersResourceWithStreamingResponse(self) @@ -1080,7 +1080,7 @@ def with_raw_response(self) -> AsyncLoadBalancersResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncLoadBalancersResourceWithRawResponse(self) @@ -1089,7 +1089,7 @@ def with_streaming_response(self) -> AsyncLoadBalancersResourceWithStreamingResp """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncLoadBalancersResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/sizes.py b/src/gradient/resources/gpu_droplets/sizes.py similarity index 96% rename from src/do_gradientai/resources/gpu_droplets/sizes.py rename to src/gradient/resources/gpu_droplets/sizes.py index e37116c7..7cfc5629 100644 --- a/src/do_gradientai/resources/gpu_droplets/sizes.py +++ b/src/gradient/resources/gpu_droplets/sizes.py @@ -28,7 +28,7 @@ def with_raw_response(self) -> SizesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return SizesResourceWithRawResponse(self) @@ -37,7 +37,7 @@ def with_streaming_response(self) -> SizesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return SizesResourceWithStreamingResponse(self) @@ -99,7 +99,7 @@ def with_raw_response(self) -> AsyncSizesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncSizesResourceWithRawResponse(self) @@ -108,7 +108,7 @@ def with_streaming_response(self) -> AsyncSizesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncSizesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/snapshots.py b/src/gradient/resources/gpu_droplets/snapshots.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/snapshots.py rename to src/gradient/resources/gpu_droplets/snapshots.py index 081ab5b8..eed93cfd 100644 --- a/src/do_gradientai/resources/gpu_droplets/snapshots.py +++ b/src/gradient/resources/gpu_droplets/snapshots.py @@ -32,7 +32,7 @@ def with_raw_response(self) -> SnapshotsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return SnapshotsResourceWithRawResponse(self) @@ -41,7 +41,7 @@ def with_streaming_response(self) -> SnapshotsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return SnapshotsResourceWithStreamingResponse(self) @@ -202,7 +202,7 @@ def with_raw_response(self) -> AsyncSnapshotsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncSnapshotsResourceWithRawResponse(self) @@ -211,7 +211,7 @@ def with_streaming_response(self) -> AsyncSnapshotsResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncSnapshotsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/volumes/__init__.py b/src/gradient/resources/gpu_droplets/volumes/__init__.py similarity index 100% rename from src/do_gradientai/resources/gpu_droplets/volumes/__init__.py rename to src/gradient/resources/gpu_droplets/volumes/__init__.py diff --git a/src/do_gradientai/resources/gpu_droplets/volumes/actions.py b/src/gradient/resources/gpu_droplets/volumes/actions.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/volumes/actions.py rename to src/gradient/resources/gpu_droplets/volumes/actions.py index 9d925397..2e093136 100644 --- a/src/do_gradientai/resources/gpu_droplets/volumes/actions.py +++ b/src/gradient/resources/gpu_droplets/volumes/actions.py @@ -39,7 +39,7 @@ def with_raw_response(self) -> ActionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ActionsResourceWithRawResponse(self) @@ -48,7 +48,7 @@ def with_streaming_response(self) -> ActionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ActionsResourceWithStreamingResponse(self) @@ -764,7 +764,7 @@ def with_raw_response(self) -> AsyncActionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncActionsResourceWithRawResponse(self) @@ -773,7 +773,7 @@ def with_streaming_response(self) -> AsyncActionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncActionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/volumes/snapshots.py b/src/gradient/resources/gpu_droplets/volumes/snapshots.py similarity index 98% rename from src/do_gradientai/resources/gpu_droplets/volumes/snapshots.py rename to src/gradient/resources/gpu_droplets/volumes/snapshots.py index 766d9a3a..0f9e30fa 100644 --- a/src/do_gradientai/resources/gpu_droplets/volumes/snapshots.py +++ b/src/gradient/resources/gpu_droplets/volumes/snapshots.py @@ -32,7 +32,7 @@ def with_raw_response(self) -> SnapshotsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return SnapshotsResourceWithRawResponse(self) @@ -41,7 +41,7 @@ def with_streaming_response(self) -> SnapshotsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return SnapshotsResourceWithStreamingResponse(self) @@ -233,7 +233,7 @@ def with_raw_response(self) -> AsyncSnapshotsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncSnapshotsResourceWithRawResponse(self) @@ -242,7 +242,7 @@ def with_streaming_response(self) -> AsyncSnapshotsResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncSnapshotsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/gpu_droplets/volumes/volumes.py b/src/gradient/resources/gpu_droplets/volumes/volumes.py similarity index 99% rename from src/do_gradientai/resources/gpu_droplets/volumes/volumes.py rename to src/gradient/resources/gpu_droplets/volumes/volumes.py index efd1d4ae..ada4aedf 100644 --- a/src/do_gradientai/resources/gpu_droplets/volumes/volumes.py +++ b/src/gradient/resources/gpu_droplets/volumes/volumes.py @@ -57,7 +57,7 @@ def with_raw_response(self) -> VolumesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return VolumesResourceWithRawResponse(self) @@ -66,7 +66,7 @@ def with_streaming_response(self) -> VolumesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return VolumesResourceWithStreamingResponse(self) @@ -550,7 +550,7 @@ def with_raw_response(self) -> AsyncVolumesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncVolumesResourceWithRawResponse(self) @@ -559,7 +559,7 @@ def with_streaming_response(self) -> AsyncVolumesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncVolumesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/inference/__init__.py b/src/gradient/resources/inference/__init__.py similarity index 100% rename from src/do_gradientai/resources/inference/__init__.py rename to src/gradient/resources/inference/__init__.py diff --git a/src/do_gradientai/resources/inference/api_keys.py b/src/gradient/resources/inference/api_keys.py similarity index 99% rename from src/do_gradientai/resources/inference/api_keys.py rename to src/gradient/resources/inference/api_keys.py index 238ef6f6..fa7f86dc 100644 --- a/src/do_gradientai/resources/inference/api_keys.py +++ b/src/gradient/resources/inference/api_keys.py @@ -32,7 +32,7 @@ def with_raw_response(self) -> APIKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return APIKeysResourceWithRawResponse(self) @@ -41,7 +41,7 @@ def with_streaming_response(self) -> APIKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return APIKeysResourceWithStreamingResponse(self) @@ -258,7 +258,7 @@ def with_raw_response(self) -> AsyncAPIKeysResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncAPIKeysResourceWithRawResponse(self) @@ -267,7 +267,7 @@ def with_streaming_response(self) -> AsyncAPIKeysResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncAPIKeysResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/inference/inference.py b/src/gradient/resources/inference/inference.py similarity index 94% rename from src/do_gradientai/resources/inference/inference.py rename to src/gradient/resources/inference/inference.py index a144bae0..d22543b3 100644 --- a/src/do_gradientai/resources/inference/inference.py +++ b/src/gradient/resources/inference/inference.py @@ -27,7 +27,7 @@ def with_raw_response(self) -> InferenceResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return InferenceResourceWithRawResponse(self) @@ -36,7 +36,7 @@ def with_streaming_response(self) -> InferenceResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return InferenceResourceWithStreamingResponse(self) @@ -52,7 +52,7 @@ def with_raw_response(self) -> AsyncInferenceResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncInferenceResourceWithRawResponse(self) @@ -61,7 +61,7 @@ def with_streaming_response(self) -> AsyncInferenceResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncInferenceResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/knowledge_bases/__init__.py b/src/gradient/resources/knowledge_bases/__init__.py similarity index 100% rename from src/do_gradientai/resources/knowledge_bases/__init__.py rename to src/gradient/resources/knowledge_bases/__init__.py diff --git a/src/do_gradientai/resources/knowledge_bases/data_sources.py b/src/gradient/resources/knowledge_bases/data_sources.py similarity index 98% rename from src/do_gradientai/resources/knowledge_bases/data_sources.py rename to src/gradient/resources/knowledge_bases/data_sources.py index 8357dfda..16252324 100644 --- a/src/do_gradientai/resources/knowledge_bases/data_sources.py +++ b/src/gradient/resources/knowledge_bases/data_sources.py @@ -36,7 +36,7 @@ def with_raw_response(self) -> DataSourcesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return DataSourcesResourceWithRawResponse(self) @@ -45,7 +45,7 @@ def with_streaming_response(self) -> DataSourcesResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return DataSourcesResourceWithStreamingResponse(self) @@ -211,7 +211,7 @@ def with_raw_response(self) -> AsyncDataSourcesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncDataSourcesResourceWithRawResponse(self) @@ -220,7 +220,7 @@ def with_streaming_response(self) -> AsyncDataSourcesResourceWithStreamingRespon """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncDataSourcesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/knowledge_bases/indexing_jobs.py b/src/gradient/resources/knowledge_bases/indexing_jobs.py similarity index 99% rename from src/do_gradientai/resources/knowledge_bases/indexing_jobs.py rename to src/gradient/resources/knowledge_bases/indexing_jobs.py index 891acd0b..723b42f5 100644 --- a/src/do_gradientai/resources/knowledge_bases/indexing_jobs.py +++ b/src/gradient/resources/knowledge_bases/indexing_jobs.py @@ -38,7 +38,7 @@ def with_raw_response(self) -> IndexingJobsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return IndexingJobsResourceWithRawResponse(self) @@ -47,7 +47,7 @@ def with_streaming_response(self) -> IndexingJobsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return IndexingJobsResourceWithStreamingResponse(self) @@ -269,7 +269,7 @@ def with_raw_response(self) -> AsyncIndexingJobsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncIndexingJobsResourceWithRawResponse(self) @@ -278,7 +278,7 @@ def with_streaming_response(self) -> AsyncIndexingJobsResourceWithStreamingRespo """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncIndexingJobsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/knowledge_bases/knowledge_bases.py b/src/gradient/resources/knowledge_bases/knowledge_bases.py similarity index 99% rename from src/do_gradientai/resources/knowledge_bases/knowledge_bases.py rename to src/gradient/resources/knowledge_bases/knowledge_bases.py index c181295c..594b2ba7 100644 --- a/src/do_gradientai/resources/knowledge_bases/knowledge_bases.py +++ b/src/gradient/resources/knowledge_bases/knowledge_bases.py @@ -58,7 +58,7 @@ def with_raw_response(self) -> KnowledgeBasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return KnowledgeBasesResourceWithRawResponse(self) @@ -67,7 +67,7 @@ def with_streaming_response(self) -> KnowledgeBasesResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return KnowledgeBasesResourceWithStreamingResponse(self) @@ -346,7 +346,7 @@ def with_raw_response(self) -> AsyncKnowledgeBasesResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncKnowledgeBasesResourceWithRawResponse(self) @@ -355,7 +355,7 @@ def with_streaming_response(self) -> AsyncKnowledgeBasesResourceWithStreamingRes """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncKnowledgeBasesResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/models/__init__.py b/src/gradient/resources/models/__init__.py similarity index 100% rename from src/do_gradientai/resources/models/__init__.py rename to src/gradient/resources/models/__init__.py diff --git a/src/do_gradientai/resources/models/models.py b/src/gradient/resources/models/models.py similarity index 97% rename from src/do_gradientai/resources/models/models.py rename to src/gradient/resources/models/models.py index 3800c03c..ffb5fea9 100644 --- a/src/do_gradientai/resources/models/models.py +++ b/src/gradient/resources/models/models.py @@ -43,7 +43,7 @@ def with_raw_response(self) -> ModelsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ModelsResourceWithRawResponse(self) @@ -52,7 +52,7 @@ def with_streaming_response(self) -> ModelsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ModelsResourceWithStreamingResponse(self) @@ -144,7 +144,7 @@ def with_raw_response(self) -> AsyncModelsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncModelsResourceWithRawResponse(self) @@ -153,7 +153,7 @@ def with_streaming_response(self) -> AsyncModelsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncModelsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/models/providers/__init__.py b/src/gradient/resources/models/providers/__init__.py similarity index 100% rename from src/do_gradientai/resources/models/providers/__init__.py rename to src/gradient/resources/models/providers/__init__.py diff --git a/src/do_gradientai/resources/models/providers/anthropic.py b/src/gradient/resources/models/providers/anthropic.py similarity index 99% rename from src/do_gradientai/resources/models/providers/anthropic.py rename to src/gradient/resources/models/providers/anthropic.py index e570be51..ddb0eef8 100644 --- a/src/do_gradientai/resources/models/providers/anthropic.py +++ b/src/gradient/resources/models/providers/anthropic.py @@ -38,7 +38,7 @@ def with_raw_response(self) -> AnthropicResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AnthropicResourceWithRawResponse(self) @@ -47,7 +47,7 @@ def with_streaming_response(self) -> AnthropicResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AnthropicResourceWithStreamingResponse(self) @@ -330,7 +330,7 @@ def with_raw_response(self) -> AsyncAnthropicResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncAnthropicResourceWithRawResponse(self) @@ -339,7 +339,7 @@ def with_streaming_response(self) -> AsyncAnthropicResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncAnthropicResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/models/providers/openai.py b/src/gradient/resources/models/providers/openai.py similarity index 99% rename from src/do_gradientai/resources/models/providers/openai.py rename to src/gradient/resources/models/providers/openai.py index ccd594b8..166e284d 100644 --- a/src/do_gradientai/resources/models/providers/openai.py +++ b/src/gradient/resources/models/providers/openai.py @@ -38,7 +38,7 @@ def with_raw_response(self) -> OpenAIResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return OpenAIResourceWithRawResponse(self) @@ -47,7 +47,7 @@ def with_streaming_response(self) -> OpenAIResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return OpenAIResourceWithStreamingResponse(self) @@ -328,7 +328,7 @@ def with_raw_response(self) -> AsyncOpenAIResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncOpenAIResourceWithRawResponse(self) @@ -337,7 +337,7 @@ def with_streaming_response(self) -> AsyncOpenAIResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncOpenAIResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/models/providers/providers.py b/src/gradient/resources/models/providers/providers.py similarity index 95% rename from src/do_gradientai/resources/models/providers/providers.py rename to src/gradient/resources/models/providers/providers.py index 3e3f4dde..efb71ec5 100644 --- a/src/do_gradientai/resources/models/providers/providers.py +++ b/src/gradient/resources/models/providers/providers.py @@ -39,7 +39,7 @@ def with_raw_response(self) -> ProvidersResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return ProvidersResourceWithRawResponse(self) @@ -48,7 +48,7 @@ def with_streaming_response(self) -> ProvidersResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return ProvidersResourceWithStreamingResponse(self) @@ -68,7 +68,7 @@ def with_raw_response(self) -> AsyncProvidersResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncProvidersResourceWithRawResponse(self) @@ -77,7 +77,7 @@ def with_streaming_response(self) -> AsyncProvidersResourceWithStreamingResponse """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncProvidersResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/resources/regions.py b/src/gradient/resources/regions.py similarity index 96% rename from src/do_gradientai/resources/regions.py rename to src/gradient/resources/regions.py index e953e4f3..779bd4dd 100644 --- a/src/do_gradientai/resources/regions.py +++ b/src/gradient/resources/regions.py @@ -28,7 +28,7 @@ def with_raw_response(self) -> RegionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return RegionsResourceWithRawResponse(self) @@ -37,7 +37,7 @@ def with_streaming_response(self) -> RegionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return RegionsResourceWithStreamingResponse(self) @@ -98,7 +98,7 @@ def with_raw_response(self) -> AsyncRegionsResourceWithRawResponse: This property can be used as a prefix for any HTTP method call to return the raw response object instead of the parsed content. - For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers + For more information, see https://www.github.com/digitalocean/gradient-python#accessing-raw-response-data-eg-headers """ return AsyncRegionsResourceWithRawResponse(self) @@ -107,7 +107,7 @@ def with_streaming_response(self) -> AsyncRegionsResourceWithStreamingResponse: """ An alternative to `.with_raw_response` that doesn't eagerly read the response body. - For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response + For more information, see https://www.github.com/digitalocean/gradient-python#with_streaming_response """ return AsyncRegionsResourceWithStreamingResponse(self) diff --git a/src/do_gradientai/types/__init__.py b/src/gradient/types/__init__.py similarity index 90% rename from src/do_gradientai/types/__init__.py rename to src/gradient/types/__init__.py index 9470b525..d5486cba 100644 --- a/src/do_gradientai/types/__init__.py +++ b/src/gradient/types/__init__.py @@ -69,13 +69,7 @@ from .agent_retrieve_response import AgentRetrieveResponse as AgentRetrieveResponse from .api_openai_api_key_info import APIOpenAIAPIKeyInfo as APIOpenAIAPIKeyInfo from .gpu_droplet_list_params import GPUDropletListParams as GPUDropletListParams -from .agents.evaluation_metrics import ( - openai, # type: ignore # noqa: F401 - anthropic, # type: ignore # noqa: F401 -) -from .api_deployment_visibility import ( - APIDeploymentVisibility as APIDeploymentVisibility, -) +from .api_deployment_visibility import APIDeploymentVisibility as APIDeploymentVisibility from .gpu_droplet_create_params import GPUDropletCreateParams as GPUDropletCreateParams from .gpu_droplet_list_response import GPUDropletListResponse as GPUDropletListResponse from .agent_update_status_params import ( @@ -161,59 +155,31 @@ api_agent.APIAgent.model_rebuild(_parent_namespace_depth=0) api_workspace.APIWorkspace.model_rebuild(_parent_namespace_depth=0) agent_create_response.AgentCreateResponse.model_rebuild(_parent_namespace_depth=0) - agent_retrieve_response.AgentRetrieveResponse.model_rebuild( - _parent_namespace_depth=0 - ) + agent_retrieve_response.AgentRetrieveResponse.model_rebuild(_parent_namespace_depth=0) agent_update_response.AgentUpdateResponse.model_rebuild(_parent_namespace_depth=0) agent_delete_response.AgentDeleteResponse.model_rebuild(_parent_namespace_depth=0) - agent_update_status_response.AgentUpdateStatusResponse.model_rebuild( - _parent_namespace_depth=0 - ) - agents.evaluation_metrics.workspace_create_response.WorkspaceCreateResponse.model_rebuild( - _parent_namespace_depth=0 - ) + agent_update_status_response.AgentUpdateStatusResponse.model_rebuild(_parent_namespace_depth=0) + agents.evaluation_metrics.workspace_create_response.WorkspaceCreateResponse.model_rebuild(_parent_namespace_depth=0) agents.evaluation_metrics.workspace_retrieve_response.WorkspaceRetrieveResponse.model_rebuild( _parent_namespace_depth=0 ) - agents.evaluation_metrics.workspace_update_response.WorkspaceUpdateResponse.model_rebuild( - _parent_namespace_depth=0 - ) - agents.evaluation_metrics.workspace_list_response.WorkspaceListResponse.model_rebuild( - _parent_namespace_depth=0 - ) - agents.evaluation_metrics.workspaces.agent_list_response.AgentListResponse.model_rebuild( - _parent_namespace_depth=0 - ) - agents.evaluation_metrics.workspaces.agent_move_response.AgentMoveResponse.model_rebuild( - _parent_namespace_depth=0 - ) + agents.evaluation_metrics.workspace_update_response.WorkspaceUpdateResponse.model_rebuild(_parent_namespace_depth=0) + agents.evaluation_metrics.workspace_list_response.WorkspaceListResponse.model_rebuild(_parent_namespace_depth=0) + agents.evaluation_metrics.workspaces.agent_list_response.AgentListResponse.model_rebuild(_parent_namespace_depth=0) + agents.evaluation_metrics.workspaces.agent_move_response.AgentMoveResponse.model_rebuild(_parent_namespace_depth=0) agents.evaluation_metrics.anthropic.key_list_agents_response.KeyListAgentsResponse.model_rebuild( _parent_namespace_depth=0 ) agents.evaluation_metrics.openai.key_list_agents_response.KeyListAgentsResponse.model_rebuild( _parent_namespace_depth=0 ) - agents.function_create_response.FunctionCreateResponse.model_rebuild( - _parent_namespace_depth=0 - ) - agents.function_update_response.FunctionUpdateResponse.model_rebuild( - _parent_namespace_depth=0 - ) - agents.function_delete_response.FunctionDeleteResponse.model_rebuild( - _parent_namespace_depth=0 - ) - agents.api_link_knowledge_base_output.APILinkKnowledgeBaseOutput.model_rebuild( - _parent_namespace_depth=0 - ) - agents.knowledge_base_detach_response.KnowledgeBaseDetachResponse.model_rebuild( - _parent_namespace_depth=0 - ) - agents.route_view_response.RouteViewResponse.model_rebuild( - _parent_namespace_depth=0 - ) - models.providers.anthropic_list_agents_response.AnthropicListAgentsResponse.model_rebuild( - _parent_namespace_depth=0 - ) + agents.function_create_response.FunctionCreateResponse.model_rebuild(_parent_namespace_depth=0) + agents.function_update_response.FunctionUpdateResponse.model_rebuild(_parent_namespace_depth=0) + agents.function_delete_response.FunctionDeleteResponse.model_rebuild(_parent_namespace_depth=0) + agents.api_link_knowledge_base_output.APILinkKnowledgeBaseOutput.model_rebuild(_parent_namespace_depth=0) + agents.knowledge_base_detach_response.KnowledgeBaseDetachResponse.model_rebuild(_parent_namespace_depth=0) + agents.route_view_response.RouteViewResponse.model_rebuild(_parent_namespace_depth=0) + models.providers.anthropic_list_agents_response.AnthropicListAgentsResponse.model_rebuild(_parent_namespace_depth=0) models.providers.openai_retrieve_agents_response.OpenAIRetrieveAgentsResponse.model_rebuild( _parent_namespace_depth=0 ) diff --git a/src/do_gradientai/types/agent_create_params.py b/src/gradient/types/agent_create_params.py similarity index 100% rename from src/do_gradientai/types/agent_create_params.py rename to src/gradient/types/agent_create_params.py diff --git a/src/do_gradientai/types/agent_create_response.py b/src/gradient/types/agent_create_response.py similarity index 100% rename from src/do_gradientai/types/agent_create_response.py rename to src/gradient/types/agent_create_response.py diff --git a/src/do_gradientai/types/agent_delete_response.py b/src/gradient/types/agent_delete_response.py similarity index 100% rename from src/do_gradientai/types/agent_delete_response.py rename to src/gradient/types/agent_delete_response.py diff --git a/src/do_gradientai/types/agent_list_params.py b/src/gradient/types/agent_list_params.py similarity index 100% rename from src/do_gradientai/types/agent_list_params.py rename to src/gradient/types/agent_list_params.py diff --git a/src/do_gradientai/types/agent_list_response.py b/src/gradient/types/agent_list_response.py similarity index 100% rename from src/do_gradientai/types/agent_list_response.py rename to src/gradient/types/agent_list_response.py diff --git a/src/do_gradientai/types/agent_retrieve_response.py b/src/gradient/types/agent_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/agent_retrieve_response.py rename to src/gradient/types/agent_retrieve_response.py diff --git a/src/do_gradientai/types/agent_update_params.py b/src/gradient/types/agent_update_params.py similarity index 100% rename from src/do_gradientai/types/agent_update_params.py rename to src/gradient/types/agent_update_params.py diff --git a/src/do_gradientai/types/agent_update_response.py b/src/gradient/types/agent_update_response.py similarity index 100% rename from src/do_gradientai/types/agent_update_response.py rename to src/gradient/types/agent_update_response.py diff --git a/src/do_gradientai/types/agent_update_status_params.py b/src/gradient/types/agent_update_status_params.py similarity index 100% rename from src/do_gradientai/types/agent_update_status_params.py rename to src/gradient/types/agent_update_status_params.py diff --git a/src/do_gradientai/types/agent_update_status_response.py b/src/gradient/types/agent_update_status_response.py similarity index 100% rename from src/do_gradientai/types/agent_update_status_response.py rename to src/gradient/types/agent_update_status_response.py diff --git a/src/do_gradientai/types/agents/__init__.py b/src/gradient/types/agents/__init__.py similarity index 100% rename from src/do_gradientai/types/agents/__init__.py rename to src/gradient/types/agents/__init__.py diff --git a/src/do_gradientai/types/agents/api_evaluation_metric.py b/src/gradient/types/agents/api_evaluation_metric.py similarity index 100% rename from src/do_gradientai/types/agents/api_evaluation_metric.py rename to src/gradient/types/agents/api_evaluation_metric.py diff --git a/src/do_gradientai/types/agents/api_evaluation_metric_result.py b/src/gradient/types/agents/api_evaluation_metric_result.py similarity index 100% rename from src/do_gradientai/types/agents/api_evaluation_metric_result.py rename to src/gradient/types/agents/api_evaluation_metric_result.py diff --git a/src/do_gradientai/types/agents/api_evaluation_prompt.py b/src/gradient/types/agents/api_evaluation_prompt.py similarity index 100% rename from src/do_gradientai/types/agents/api_evaluation_prompt.py rename to src/gradient/types/agents/api_evaluation_prompt.py diff --git a/src/do_gradientai/types/agents/api_evaluation_run.py b/src/gradient/types/agents/api_evaluation_run.py similarity index 100% rename from src/do_gradientai/types/agents/api_evaluation_run.py rename to src/gradient/types/agents/api_evaluation_run.py diff --git a/src/do_gradientai/types/agents/api_evaluation_test_case.py b/src/gradient/types/agents/api_evaluation_test_case.py similarity index 100% rename from src/do_gradientai/types/agents/api_evaluation_test_case.py rename to src/gradient/types/agents/api_evaluation_test_case.py diff --git a/src/do_gradientai/types/agents/api_key_create_params.py b/src/gradient/types/agents/api_key_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/api_key_create_params.py rename to src/gradient/types/agents/api_key_create_params.py diff --git a/src/do_gradientai/types/agents/api_key_create_response.py b/src/gradient/types/agents/api_key_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/api_key_create_response.py rename to src/gradient/types/agents/api_key_create_response.py diff --git a/src/do_gradientai/types/agents/api_key_delete_response.py b/src/gradient/types/agents/api_key_delete_response.py similarity index 100% rename from src/do_gradientai/types/agents/api_key_delete_response.py rename to src/gradient/types/agents/api_key_delete_response.py diff --git a/src/do_gradientai/types/agents/api_key_list_params.py b/src/gradient/types/agents/api_key_list_params.py similarity index 100% rename from src/do_gradientai/types/agents/api_key_list_params.py rename to src/gradient/types/agents/api_key_list_params.py diff --git a/src/do_gradientai/types/agents/api_key_list_response.py b/src/gradient/types/agents/api_key_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/api_key_list_response.py rename to src/gradient/types/agents/api_key_list_response.py diff --git a/src/do_gradientai/types/agents/api_key_regenerate_response.py b/src/gradient/types/agents/api_key_regenerate_response.py similarity index 100% rename from src/do_gradientai/types/agents/api_key_regenerate_response.py rename to src/gradient/types/agents/api_key_regenerate_response.py diff --git a/src/do_gradientai/types/agents/api_key_update_params.py b/src/gradient/types/agents/api_key_update_params.py similarity index 100% rename from src/do_gradientai/types/agents/api_key_update_params.py rename to src/gradient/types/agents/api_key_update_params.py diff --git a/src/do_gradientai/types/agents/api_key_update_response.py b/src/gradient/types/agents/api_key_update_response.py similarity index 100% rename from src/do_gradientai/types/agents/api_key_update_response.py rename to src/gradient/types/agents/api_key_update_response.py diff --git a/src/do_gradientai/types/agents/api_link_knowledge_base_output.py b/src/gradient/types/agents/api_link_knowledge_base_output.py similarity index 100% rename from src/do_gradientai/types/agents/api_link_knowledge_base_output.py rename to src/gradient/types/agents/api_link_knowledge_base_output.py diff --git a/src/do_gradientai/types/agents/api_star_metric.py b/src/gradient/types/agents/api_star_metric.py similarity index 100% rename from src/do_gradientai/types/agents/api_star_metric.py rename to src/gradient/types/agents/api_star_metric.py diff --git a/src/do_gradientai/types/agents/api_star_metric_param.py b/src/gradient/types/agents/api_star_metric_param.py similarity index 100% rename from src/do_gradientai/types/agents/api_star_metric_param.py rename to src/gradient/types/agents/api_star_metric_param.py diff --git a/src/do_gradientai/types/agents/chat/__init__.py b/src/gradient/types/agents/chat/__init__.py similarity index 100% rename from src/do_gradientai/types/agents/chat/__init__.py rename to src/gradient/types/agents/chat/__init__.py diff --git a/src/do_gradientai/types/agents/chat/completion_create_params.py b/src/gradient/types/agents/chat/completion_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/chat/completion_create_params.py rename to src/gradient/types/agents/chat/completion_create_params.py diff --git a/src/do_gradientai/types/agents/chat/completion_create_response.py b/src/gradient/types/agents/chat/completion_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/chat/completion_create_response.py rename to src/gradient/types/agents/chat/completion_create_response.py diff --git a/src/do_gradientai/types/agents/evaluation_dataset_create_file_upload_presigned_urls_params.py b/src/gradient/types/agents/evaluation_dataset_create_file_upload_presigned_urls_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_dataset_create_file_upload_presigned_urls_params.py rename to src/gradient/types/agents/evaluation_dataset_create_file_upload_presigned_urls_params.py diff --git a/src/do_gradientai/types/agents/evaluation_dataset_create_file_upload_presigned_urls_response.py b/src/gradient/types/agents/evaluation_dataset_create_file_upload_presigned_urls_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_dataset_create_file_upload_presigned_urls_response.py rename to src/gradient/types/agents/evaluation_dataset_create_file_upload_presigned_urls_response.py diff --git a/src/do_gradientai/types/agents/evaluation_dataset_create_params.py b/src/gradient/types/agents/evaluation_dataset_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_dataset_create_params.py rename to src/gradient/types/agents/evaluation_dataset_create_params.py diff --git a/src/do_gradientai/types/agents/evaluation_dataset_create_response.py b/src/gradient/types/agents/evaluation_dataset_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_dataset_create_response.py rename to src/gradient/types/agents/evaluation_dataset_create_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metric_list_regions_params.py b/src/gradient/types/agents/evaluation_metric_list_regions_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metric_list_regions_params.py rename to src/gradient/types/agents/evaluation_metric_list_regions_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metric_list_regions_response.py b/src/gradient/types/agents/evaluation_metric_list_regions_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metric_list_regions_response.py rename to src/gradient/types/agents/evaluation_metric_list_regions_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metric_list_response.py b/src/gradient/types/agents/evaluation_metric_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metric_list_response.py rename to src/gradient/types/agents/evaluation_metric_list_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/__init__.py b/src/gradient/types/agents/evaluation_metrics/__init__.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/__init__.py rename to src/gradient/types/agents/evaluation_metrics/__init__.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/__init__.py b/src/gradient/types/agents/evaluation_metrics/anthropic/__init__.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/__init__.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/__init__.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_create_params.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_create_params.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_create_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_create_response.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_create_response.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_create_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_delete_response.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_delete_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_delete_response.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_delete_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_list_agents_params.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_list_agents_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_list_agents_params.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_list_agents_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_list_agents_response.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_list_agents_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_list_agents_response.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_list_agents_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_list_params.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_list_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_list_params.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_list_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_list_response.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_list_response.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_list_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_retrieve_response.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_retrieve_response.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_retrieve_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_update_params.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_update_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_update_params.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_update_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_update_response.py b/src/gradient/types/agents/evaluation_metrics/anthropic/key_update_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/anthropic/key_update_response.py rename to src/gradient/types/agents/evaluation_metrics/anthropic/key_update_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/model_list_params.py b/src/gradient/types/agents/evaluation_metrics/model_list_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/model_list_params.py rename to src/gradient/types/agents/evaluation_metrics/model_list_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/model_list_response.py b/src/gradient/types/agents/evaluation_metrics/model_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/model_list_response.py rename to src/gradient/types/agents/evaluation_metrics/model_list_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/__init__.py b/src/gradient/types/agents/evaluation_metrics/openai/__init__.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/__init__.py rename to src/gradient/types/agents/evaluation_metrics/openai/__init__.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_create_params.py b/src/gradient/types/agents/evaluation_metrics/openai/key_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_create_params.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_create_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_create_response.py b/src/gradient/types/agents/evaluation_metrics/openai/key_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_create_response.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_create_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_delete_response.py b/src/gradient/types/agents/evaluation_metrics/openai/key_delete_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_delete_response.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_delete_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_list_agents_params.py b/src/gradient/types/agents/evaluation_metrics/openai/key_list_agents_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_list_agents_params.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_list_agents_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_list_agents_response.py b/src/gradient/types/agents/evaluation_metrics/openai/key_list_agents_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_list_agents_response.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_list_agents_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_list_params.py b/src/gradient/types/agents/evaluation_metrics/openai/key_list_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_list_params.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_list_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_list_response.py b/src/gradient/types/agents/evaluation_metrics/openai/key_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_list_response.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_list_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_retrieve_response.py b/src/gradient/types/agents/evaluation_metrics/openai/key_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_retrieve_response.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_retrieve_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_update_params.py b/src/gradient/types/agents/evaluation_metrics/openai/key_update_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_update_params.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_update_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/openai/key_update_response.py b/src/gradient/types/agents/evaluation_metrics/openai/key_update_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/openai/key_update_response.py rename to src/gradient/types/agents/evaluation_metrics/openai/key_update_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspace_create_params.py b/src/gradient/types/agents/evaluation_metrics/workspace_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspace_create_params.py rename to src/gradient/types/agents/evaluation_metrics/workspace_create_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspace_create_response.py b/src/gradient/types/agents/evaluation_metrics/workspace_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspace_create_response.py rename to src/gradient/types/agents/evaluation_metrics/workspace_create_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspace_delete_response.py b/src/gradient/types/agents/evaluation_metrics/workspace_delete_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspace_delete_response.py rename to src/gradient/types/agents/evaluation_metrics/workspace_delete_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspace_list_evaluation_test_cases_response.py b/src/gradient/types/agents/evaluation_metrics/workspace_list_evaluation_test_cases_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspace_list_evaluation_test_cases_response.py rename to src/gradient/types/agents/evaluation_metrics/workspace_list_evaluation_test_cases_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspace_list_response.py b/src/gradient/types/agents/evaluation_metrics/workspace_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspace_list_response.py rename to src/gradient/types/agents/evaluation_metrics/workspace_list_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspace_retrieve_response.py b/src/gradient/types/agents/evaluation_metrics/workspace_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspace_retrieve_response.py rename to src/gradient/types/agents/evaluation_metrics/workspace_retrieve_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspace_update_params.py b/src/gradient/types/agents/evaluation_metrics/workspace_update_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspace_update_params.py rename to src/gradient/types/agents/evaluation_metrics/workspace_update_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspace_update_response.py b/src/gradient/types/agents/evaluation_metrics/workspace_update_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspace_update_response.py rename to src/gradient/types/agents/evaluation_metrics/workspace_update_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspaces/__init__.py b/src/gradient/types/agents/evaluation_metrics/workspaces/__init__.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspaces/__init__.py rename to src/gradient/types/agents/evaluation_metrics/workspaces/__init__.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspaces/agent_list_params.py b/src/gradient/types/agents/evaluation_metrics/workspaces/agent_list_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspaces/agent_list_params.py rename to src/gradient/types/agents/evaluation_metrics/workspaces/agent_list_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspaces/agent_list_response.py b/src/gradient/types/agents/evaluation_metrics/workspaces/agent_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspaces/agent_list_response.py rename to src/gradient/types/agents/evaluation_metrics/workspaces/agent_list_response.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspaces/agent_move_params.py b/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspaces/agent_move_params.py rename to src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_params.py diff --git a/src/do_gradientai/types/agents/evaluation_metrics/workspaces/agent_move_response.py b/src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_metrics/workspaces/agent_move_response.py rename to src/gradient/types/agents/evaluation_metrics/workspaces/agent_move_response.py diff --git a/src/do_gradientai/types/agents/evaluation_run_create_params.py b/src/gradient/types/agents/evaluation_run_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_run_create_params.py rename to src/gradient/types/agents/evaluation_run_create_params.py diff --git a/src/do_gradientai/types/agents/evaluation_run_create_response.py b/src/gradient/types/agents/evaluation_run_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_run_create_response.py rename to src/gradient/types/agents/evaluation_run_create_response.py diff --git a/src/do_gradientai/types/agents/evaluation_run_list_results_params.py b/src/gradient/types/agents/evaluation_run_list_results_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_run_list_results_params.py rename to src/gradient/types/agents/evaluation_run_list_results_params.py diff --git a/src/do_gradientai/types/agents/evaluation_run_list_results_response.py b/src/gradient/types/agents/evaluation_run_list_results_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_run_list_results_response.py rename to src/gradient/types/agents/evaluation_run_list_results_response.py diff --git a/src/do_gradientai/types/agents/evaluation_run_retrieve_response.py b/src/gradient/types/agents/evaluation_run_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_run_retrieve_response.py rename to src/gradient/types/agents/evaluation_run_retrieve_response.py diff --git a/src/do_gradientai/types/agents/evaluation_run_retrieve_results_response.py b/src/gradient/types/agents/evaluation_run_retrieve_results_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_run_retrieve_results_response.py rename to src/gradient/types/agents/evaluation_run_retrieve_results_response.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_create_params.py b/src/gradient/types/agents/evaluation_test_case_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_create_params.py rename to src/gradient/types/agents/evaluation_test_case_create_params.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_create_response.py b/src/gradient/types/agents/evaluation_test_case_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_create_response.py rename to src/gradient/types/agents/evaluation_test_case_create_response.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_list_evaluation_runs_params.py b/src/gradient/types/agents/evaluation_test_case_list_evaluation_runs_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_list_evaluation_runs_params.py rename to src/gradient/types/agents/evaluation_test_case_list_evaluation_runs_params.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_list_evaluation_runs_response.py b/src/gradient/types/agents/evaluation_test_case_list_evaluation_runs_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_list_evaluation_runs_response.py rename to src/gradient/types/agents/evaluation_test_case_list_evaluation_runs_response.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_list_response.py b/src/gradient/types/agents/evaluation_test_case_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_list_response.py rename to src/gradient/types/agents/evaluation_test_case_list_response.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_retrieve_params.py b/src/gradient/types/agents/evaluation_test_case_retrieve_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_retrieve_params.py rename to src/gradient/types/agents/evaluation_test_case_retrieve_params.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_retrieve_response.py b/src/gradient/types/agents/evaluation_test_case_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_retrieve_response.py rename to src/gradient/types/agents/evaluation_test_case_retrieve_response.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_update_params.py b/src/gradient/types/agents/evaluation_test_case_update_params.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_update_params.py rename to src/gradient/types/agents/evaluation_test_case_update_params.py diff --git a/src/do_gradientai/types/agents/evaluation_test_case_update_response.py b/src/gradient/types/agents/evaluation_test_case_update_response.py similarity index 100% rename from src/do_gradientai/types/agents/evaluation_test_case_update_response.py rename to src/gradient/types/agents/evaluation_test_case_update_response.py diff --git a/src/do_gradientai/types/agents/function_create_params.py b/src/gradient/types/agents/function_create_params.py similarity index 100% rename from src/do_gradientai/types/agents/function_create_params.py rename to src/gradient/types/agents/function_create_params.py diff --git a/src/do_gradientai/types/agents/function_create_response.py b/src/gradient/types/agents/function_create_response.py similarity index 100% rename from src/do_gradientai/types/agents/function_create_response.py rename to src/gradient/types/agents/function_create_response.py diff --git a/src/do_gradientai/types/agents/function_delete_response.py b/src/gradient/types/agents/function_delete_response.py similarity index 100% rename from src/do_gradientai/types/agents/function_delete_response.py rename to src/gradient/types/agents/function_delete_response.py diff --git a/src/do_gradientai/types/agents/function_update_params.py b/src/gradient/types/agents/function_update_params.py similarity index 100% rename from src/do_gradientai/types/agents/function_update_params.py rename to src/gradient/types/agents/function_update_params.py diff --git a/src/do_gradientai/types/agents/function_update_response.py b/src/gradient/types/agents/function_update_response.py similarity index 100% rename from src/do_gradientai/types/agents/function_update_response.py rename to src/gradient/types/agents/function_update_response.py diff --git a/src/do_gradientai/types/agents/knowledge_base_detach_response.py b/src/gradient/types/agents/knowledge_base_detach_response.py similarity index 100% rename from src/do_gradientai/types/agents/knowledge_base_detach_response.py rename to src/gradient/types/agents/knowledge_base_detach_response.py diff --git a/src/do_gradientai/types/agents/route_add_params.py b/src/gradient/types/agents/route_add_params.py similarity index 100% rename from src/do_gradientai/types/agents/route_add_params.py rename to src/gradient/types/agents/route_add_params.py diff --git a/src/do_gradientai/types/agents/route_add_response.py b/src/gradient/types/agents/route_add_response.py similarity index 100% rename from src/do_gradientai/types/agents/route_add_response.py rename to src/gradient/types/agents/route_add_response.py diff --git a/src/do_gradientai/types/agents/route_delete_response.py b/src/gradient/types/agents/route_delete_response.py similarity index 100% rename from src/do_gradientai/types/agents/route_delete_response.py rename to src/gradient/types/agents/route_delete_response.py diff --git a/src/do_gradientai/types/agents/route_update_params.py b/src/gradient/types/agents/route_update_params.py similarity index 100% rename from src/do_gradientai/types/agents/route_update_params.py rename to src/gradient/types/agents/route_update_params.py diff --git a/src/do_gradientai/types/agents/route_update_response.py b/src/gradient/types/agents/route_update_response.py similarity index 100% rename from src/do_gradientai/types/agents/route_update_response.py rename to src/gradient/types/agents/route_update_response.py diff --git a/src/do_gradientai/types/agents/route_view_response.py b/src/gradient/types/agents/route_view_response.py similarity index 100% rename from src/do_gradientai/types/agents/route_view_response.py rename to src/gradient/types/agents/route_view_response.py diff --git a/src/do_gradientai/types/agents/version_list_params.py b/src/gradient/types/agents/version_list_params.py similarity index 100% rename from src/do_gradientai/types/agents/version_list_params.py rename to src/gradient/types/agents/version_list_params.py diff --git a/src/do_gradientai/types/agents/version_list_response.py b/src/gradient/types/agents/version_list_response.py similarity index 100% rename from src/do_gradientai/types/agents/version_list_response.py rename to src/gradient/types/agents/version_list_response.py diff --git a/src/do_gradientai/types/agents/version_update_params.py b/src/gradient/types/agents/version_update_params.py similarity index 100% rename from src/do_gradientai/types/agents/version_update_params.py rename to src/gradient/types/agents/version_update_params.py diff --git a/src/do_gradientai/types/agents/version_update_response.py b/src/gradient/types/agents/version_update_response.py similarity index 100% rename from src/do_gradientai/types/agents/version_update_response.py rename to src/gradient/types/agents/version_update_response.py diff --git a/src/do_gradientai/types/api_agent.py b/src/gradient/types/api_agent.py similarity index 100% rename from src/do_gradientai/types/api_agent.py rename to src/gradient/types/api_agent.py diff --git a/src/do_gradientai/types/api_agent_api_key_info.py b/src/gradient/types/api_agent_api_key_info.py similarity index 100% rename from src/do_gradientai/types/api_agent_api_key_info.py rename to src/gradient/types/api_agent_api_key_info.py diff --git a/src/do_gradientai/types/api_agent_model.py b/src/gradient/types/api_agent_model.py similarity index 100% rename from src/do_gradientai/types/api_agent_model.py rename to src/gradient/types/api_agent_model.py diff --git a/src/do_gradientai/types/api_agreement.py b/src/gradient/types/api_agreement.py similarity index 100% rename from src/do_gradientai/types/api_agreement.py rename to src/gradient/types/api_agreement.py diff --git a/src/do_gradientai/types/api_anthropic_api_key_info.py b/src/gradient/types/api_anthropic_api_key_info.py similarity index 100% rename from src/do_gradientai/types/api_anthropic_api_key_info.py rename to src/gradient/types/api_anthropic_api_key_info.py diff --git a/src/do_gradientai/types/api_deployment_visibility.py b/src/gradient/types/api_deployment_visibility.py similarity index 100% rename from src/do_gradientai/types/api_deployment_visibility.py rename to src/gradient/types/api_deployment_visibility.py diff --git a/src/do_gradientai/types/api_knowledge_base.py b/src/gradient/types/api_knowledge_base.py similarity index 100% rename from src/do_gradientai/types/api_knowledge_base.py rename to src/gradient/types/api_knowledge_base.py diff --git a/src/do_gradientai/types/api_model.py b/src/gradient/types/api_model.py similarity index 100% rename from src/do_gradientai/types/api_model.py rename to src/gradient/types/api_model.py diff --git a/src/do_gradientai/types/api_model_version.py b/src/gradient/types/api_model_version.py similarity index 100% rename from src/do_gradientai/types/api_model_version.py rename to src/gradient/types/api_model_version.py diff --git a/src/do_gradientai/types/api_openai_api_key_info.py b/src/gradient/types/api_openai_api_key_info.py similarity index 100% rename from src/do_gradientai/types/api_openai_api_key_info.py rename to src/gradient/types/api_openai_api_key_info.py diff --git a/src/do_gradientai/types/api_retrieval_method.py b/src/gradient/types/api_retrieval_method.py similarity index 100% rename from src/do_gradientai/types/api_retrieval_method.py rename to src/gradient/types/api_retrieval_method.py diff --git a/src/do_gradientai/types/api_workspace.py b/src/gradient/types/api_workspace.py similarity index 100% rename from src/do_gradientai/types/api_workspace.py rename to src/gradient/types/api_workspace.py diff --git a/src/do_gradientai/types/chat/__init__.py b/src/gradient/types/chat/__init__.py similarity index 100% rename from src/do_gradientai/types/chat/__init__.py rename to src/gradient/types/chat/__init__.py diff --git a/src/do_gradientai/types/chat/completion_create_params.py b/src/gradient/types/chat/completion_create_params.py similarity index 100% rename from src/do_gradientai/types/chat/completion_create_params.py rename to src/gradient/types/chat/completion_create_params.py diff --git a/src/do_gradientai/types/chat/completion_create_response.py b/src/gradient/types/chat/completion_create_response.py similarity index 100% rename from src/do_gradientai/types/chat/completion_create_response.py rename to src/gradient/types/chat/completion_create_response.py diff --git a/src/do_gradientai/types/databases/__init__.py b/src/gradient/types/databases/__init__.py similarity index 100% rename from src/do_gradientai/types/databases/__init__.py rename to src/gradient/types/databases/__init__.py diff --git a/src/do_gradientai/types/databases/schema_registry/__init__.py b/src/gradient/types/databases/schema_registry/__init__.py similarity index 100% rename from src/do_gradientai/types/databases/schema_registry/__init__.py rename to src/gradient/types/databases/schema_registry/__init__.py diff --git a/src/do_gradientai/types/databases/schema_registry/config_retrieve_response.py b/src/gradient/types/databases/schema_registry/config_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/databases/schema_registry/config_retrieve_response.py rename to src/gradient/types/databases/schema_registry/config_retrieve_response.py diff --git a/src/do_gradientai/types/databases/schema_registry/config_retrieve_subject_response.py b/src/gradient/types/databases/schema_registry/config_retrieve_subject_response.py similarity index 100% rename from src/do_gradientai/types/databases/schema_registry/config_retrieve_subject_response.py rename to src/gradient/types/databases/schema_registry/config_retrieve_subject_response.py diff --git a/src/do_gradientai/types/databases/schema_registry/config_update_params.py b/src/gradient/types/databases/schema_registry/config_update_params.py similarity index 100% rename from src/do_gradientai/types/databases/schema_registry/config_update_params.py rename to src/gradient/types/databases/schema_registry/config_update_params.py diff --git a/src/do_gradientai/types/databases/schema_registry/config_update_response.py b/src/gradient/types/databases/schema_registry/config_update_response.py similarity index 100% rename from src/do_gradientai/types/databases/schema_registry/config_update_response.py rename to src/gradient/types/databases/schema_registry/config_update_response.py diff --git a/src/do_gradientai/types/databases/schema_registry/config_update_subject_params.py b/src/gradient/types/databases/schema_registry/config_update_subject_params.py similarity index 100% rename from src/do_gradientai/types/databases/schema_registry/config_update_subject_params.py rename to src/gradient/types/databases/schema_registry/config_update_subject_params.py diff --git a/src/do_gradientai/types/databases/schema_registry/config_update_subject_response.py b/src/gradient/types/databases/schema_registry/config_update_subject_response.py similarity index 100% rename from src/do_gradientai/types/databases/schema_registry/config_update_subject_response.py rename to src/gradient/types/databases/schema_registry/config_update_subject_response.py diff --git a/src/do_gradientai/types/droplet_backup_policy.py b/src/gradient/types/droplet_backup_policy.py similarity index 100% rename from src/do_gradientai/types/droplet_backup_policy.py rename to src/gradient/types/droplet_backup_policy.py diff --git a/src/do_gradientai/types/droplet_backup_policy_param.py b/src/gradient/types/droplet_backup_policy_param.py similarity index 100% rename from src/do_gradientai/types/droplet_backup_policy_param.py rename to src/gradient/types/droplet_backup_policy_param.py diff --git a/src/do_gradientai/types/gpu_droplet_create_params.py b/src/gradient/types/gpu_droplet_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_create_params.py rename to src/gradient/types/gpu_droplet_create_params.py diff --git a/src/do_gradientai/types/gpu_droplet_create_response.py b/src/gradient/types/gpu_droplet_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_create_response.py rename to src/gradient/types/gpu_droplet_create_response.py diff --git a/src/do_gradientai/types/gpu_droplet_delete_by_tag_params.py b/src/gradient/types/gpu_droplet_delete_by_tag_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_delete_by_tag_params.py rename to src/gradient/types/gpu_droplet_delete_by_tag_params.py diff --git a/src/do_gradientai/types/gpu_droplet_list_firewalls_params.py b/src/gradient/types/gpu_droplet_list_firewalls_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_firewalls_params.py rename to src/gradient/types/gpu_droplet_list_firewalls_params.py diff --git a/src/do_gradientai/types/gpu_droplet_list_firewalls_response.py b/src/gradient/types/gpu_droplet_list_firewalls_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_firewalls_response.py rename to src/gradient/types/gpu_droplet_list_firewalls_response.py diff --git a/src/do_gradientai/types/gpu_droplet_list_kernels_params.py b/src/gradient/types/gpu_droplet_list_kernels_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_kernels_params.py rename to src/gradient/types/gpu_droplet_list_kernels_params.py diff --git a/src/do_gradientai/types/gpu_droplet_list_kernels_response.py b/src/gradient/types/gpu_droplet_list_kernels_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_kernels_response.py rename to src/gradient/types/gpu_droplet_list_kernels_response.py diff --git a/src/do_gradientai/types/gpu_droplet_list_neighbors_response.py b/src/gradient/types/gpu_droplet_list_neighbors_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_neighbors_response.py rename to src/gradient/types/gpu_droplet_list_neighbors_response.py diff --git a/src/do_gradientai/types/gpu_droplet_list_params.py b/src/gradient/types/gpu_droplet_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_params.py rename to src/gradient/types/gpu_droplet_list_params.py diff --git a/src/do_gradientai/types/gpu_droplet_list_response.py b/src/gradient/types/gpu_droplet_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_response.py rename to src/gradient/types/gpu_droplet_list_response.py diff --git a/src/do_gradientai/types/gpu_droplet_list_snapshots_params.py b/src/gradient/types/gpu_droplet_list_snapshots_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_snapshots_params.py rename to src/gradient/types/gpu_droplet_list_snapshots_params.py diff --git a/src/do_gradientai/types/gpu_droplet_list_snapshots_response.py b/src/gradient/types/gpu_droplet_list_snapshots_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_list_snapshots_response.py rename to src/gradient/types/gpu_droplet_list_snapshots_response.py diff --git a/src/do_gradientai/types/gpu_droplet_retrieve_response.py b/src/gradient/types/gpu_droplet_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplet_retrieve_response.py rename to src/gradient/types/gpu_droplet_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/__init__.py b/src/gradient/types/gpu_droplets/__init__.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/__init__.py rename to src/gradient/types/gpu_droplets/__init__.py diff --git a/src/do_gradientai/types/gpu_droplets/account/__init__.py b/src/gradient/types/gpu_droplets/account/__init__.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/account/__init__.py rename to src/gradient/types/gpu_droplets/account/__init__.py diff --git a/src/do_gradientai/types/gpu_droplets/account/key_create_params.py b/src/gradient/types/gpu_droplets/account/key_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/account/key_create_params.py rename to src/gradient/types/gpu_droplets/account/key_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/account/key_create_response.py b/src/gradient/types/gpu_droplets/account/key_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/account/key_create_response.py rename to src/gradient/types/gpu_droplets/account/key_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/account/key_list_params.py b/src/gradient/types/gpu_droplets/account/key_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/account/key_list_params.py rename to src/gradient/types/gpu_droplets/account/key_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/account/key_list_response.py b/src/gradient/types/gpu_droplets/account/key_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/account/key_list_response.py rename to src/gradient/types/gpu_droplets/account/key_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/account/key_retrieve_response.py b/src/gradient/types/gpu_droplets/account/key_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/account/key_retrieve_response.py rename to src/gradient/types/gpu_droplets/account/key_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/account/key_update_params.py b/src/gradient/types/gpu_droplets/account/key_update_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/account/key_update_params.py rename to src/gradient/types/gpu_droplets/account/key_update_params.py diff --git a/src/do_gradientai/types/gpu_droplets/account/key_update_response.py b/src/gradient/types/gpu_droplets/account/key_update_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/account/key_update_response.py rename to src/gradient/types/gpu_droplets/account/key_update_response.py diff --git a/src/do_gradientai/types/gpu_droplets/action_bulk_initiate_params.py b/src/gradient/types/gpu_droplets/action_bulk_initiate_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/action_bulk_initiate_params.py rename to src/gradient/types/gpu_droplets/action_bulk_initiate_params.py diff --git a/src/do_gradientai/types/gpu_droplets/action_bulk_initiate_response.py b/src/gradient/types/gpu_droplets/action_bulk_initiate_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/action_bulk_initiate_response.py rename to src/gradient/types/gpu_droplets/action_bulk_initiate_response.py diff --git a/src/do_gradientai/types/gpu_droplets/action_initiate_params.py b/src/gradient/types/gpu_droplets/action_initiate_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/action_initiate_params.py rename to src/gradient/types/gpu_droplets/action_initiate_params.py diff --git a/src/do_gradientai/types/gpu_droplets/action_initiate_response.py b/src/gradient/types/gpu_droplets/action_initiate_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/action_initiate_response.py rename to src/gradient/types/gpu_droplets/action_initiate_response.py diff --git a/src/do_gradientai/types/gpu_droplets/action_list_params.py b/src/gradient/types/gpu_droplets/action_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/action_list_params.py rename to src/gradient/types/gpu_droplets/action_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/action_list_response.py b/src/gradient/types/gpu_droplets/action_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/action_list_response.py rename to src/gradient/types/gpu_droplets/action_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/action_retrieve_response.py b/src/gradient/types/gpu_droplets/action_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/action_retrieve_response.py rename to src/gradient/types/gpu_droplets/action_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/associated_resource.py b/src/gradient/types/gpu_droplets/associated_resource.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/associated_resource.py rename to src/gradient/types/gpu_droplets/associated_resource.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_create_params.py b/src/gradient/types/gpu_droplets/autoscale_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_create_params.py rename to src/gradient/types/gpu_droplets/autoscale_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_create_response.py b/src/gradient/types/gpu_droplets/autoscale_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_create_response.py rename to src/gradient/types/gpu_droplets/autoscale_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_list_history_params.py b/src/gradient/types/gpu_droplets/autoscale_list_history_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_list_history_params.py rename to src/gradient/types/gpu_droplets/autoscale_list_history_params.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_list_history_response.py b/src/gradient/types/gpu_droplets/autoscale_list_history_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_list_history_response.py rename to src/gradient/types/gpu_droplets/autoscale_list_history_response.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_list_members_params.py b/src/gradient/types/gpu_droplets/autoscale_list_members_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_list_members_params.py rename to src/gradient/types/gpu_droplets/autoscale_list_members_params.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_list_members_response.py b/src/gradient/types/gpu_droplets/autoscale_list_members_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_list_members_response.py rename to src/gradient/types/gpu_droplets/autoscale_list_members_response.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_list_params.py b/src/gradient/types/gpu_droplets/autoscale_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_list_params.py rename to src/gradient/types/gpu_droplets/autoscale_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_list_response.py b/src/gradient/types/gpu_droplets/autoscale_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_list_response.py rename to src/gradient/types/gpu_droplets/autoscale_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_pool.py b/src/gradient/types/gpu_droplets/autoscale_pool.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_pool.py rename to src/gradient/types/gpu_droplets/autoscale_pool.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_pool_droplet_template.py b/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_pool_droplet_template.py rename to src/gradient/types/gpu_droplets/autoscale_pool_droplet_template.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_pool_droplet_template_param.py b/src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_pool_droplet_template_param.py rename to src/gradient/types/gpu_droplets/autoscale_pool_droplet_template_param.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_pool_dynamic_config.py b/src/gradient/types/gpu_droplets/autoscale_pool_dynamic_config.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_pool_dynamic_config.py rename to src/gradient/types/gpu_droplets/autoscale_pool_dynamic_config.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_pool_dynamic_config_param.py b/src/gradient/types/gpu_droplets/autoscale_pool_dynamic_config_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_pool_dynamic_config_param.py rename to src/gradient/types/gpu_droplets/autoscale_pool_dynamic_config_param.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_pool_static_config.py b/src/gradient/types/gpu_droplets/autoscale_pool_static_config.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_pool_static_config.py rename to src/gradient/types/gpu_droplets/autoscale_pool_static_config.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_pool_static_config_param.py b/src/gradient/types/gpu_droplets/autoscale_pool_static_config_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_pool_static_config_param.py rename to src/gradient/types/gpu_droplets/autoscale_pool_static_config_param.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_retrieve_response.py b/src/gradient/types/gpu_droplets/autoscale_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_retrieve_response.py rename to src/gradient/types/gpu_droplets/autoscale_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_update_params.py b/src/gradient/types/gpu_droplets/autoscale_update_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_update_params.py rename to src/gradient/types/gpu_droplets/autoscale_update_params.py diff --git a/src/do_gradientai/types/gpu_droplets/autoscale_update_response.py b/src/gradient/types/gpu_droplets/autoscale_update_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/autoscale_update_response.py rename to src/gradient/types/gpu_droplets/autoscale_update_response.py diff --git a/src/do_gradientai/types/gpu_droplets/backup_list_params.py b/src/gradient/types/gpu_droplets/backup_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/backup_list_params.py rename to src/gradient/types/gpu_droplets/backup_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/backup_list_policies_params.py b/src/gradient/types/gpu_droplets/backup_list_policies_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/backup_list_policies_params.py rename to src/gradient/types/gpu_droplets/backup_list_policies_params.py diff --git a/src/do_gradientai/types/gpu_droplets/backup_list_policies_response.py b/src/gradient/types/gpu_droplets/backup_list_policies_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/backup_list_policies_response.py rename to src/gradient/types/gpu_droplets/backup_list_policies_response.py diff --git a/src/do_gradientai/types/gpu_droplets/backup_list_response.py b/src/gradient/types/gpu_droplets/backup_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/backup_list_response.py rename to src/gradient/types/gpu_droplets/backup_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/backup_list_supported_policies_response.py b/src/gradient/types/gpu_droplets/backup_list_supported_policies_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/backup_list_supported_policies_response.py rename to src/gradient/types/gpu_droplets/backup_list_supported_policies_response.py diff --git a/src/do_gradientai/types/gpu_droplets/backup_retrieve_policy_response.py b/src/gradient/types/gpu_droplets/backup_retrieve_policy_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/backup_retrieve_policy_response.py rename to src/gradient/types/gpu_droplets/backup_retrieve_policy_response.py diff --git a/src/do_gradientai/types/gpu_droplets/current_utilization.py b/src/gradient/types/gpu_droplets/current_utilization.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/current_utilization.py rename to src/gradient/types/gpu_droplets/current_utilization.py diff --git a/src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_check_status_response.py b/src/gradient/types/gpu_droplets/destroy_with_associated_resource_check_status_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_check_status_response.py rename to src/gradient/types/gpu_droplets/destroy_with_associated_resource_check_status_response.py diff --git a/src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py b/src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py rename to src/gradient/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py diff --git a/src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_list_response.py b/src/gradient/types/gpu_droplets/destroy_with_associated_resource_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_list_response.py rename to src/gradient/types/gpu_droplets/destroy_with_associated_resource_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/destroyed_associated_resource.py b/src/gradient/types/gpu_droplets/destroyed_associated_resource.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/destroyed_associated_resource.py rename to src/gradient/types/gpu_droplets/destroyed_associated_resource.py diff --git a/src/do_gradientai/types/gpu_droplets/domains.py b/src/gradient/types/gpu_droplets/domains.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/domains.py rename to src/gradient/types/gpu_droplets/domains.py diff --git a/src/do_gradientai/types/gpu_droplets/domains_param.py b/src/gradient/types/gpu_droplets/domains_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/domains_param.py rename to src/gradient/types/gpu_droplets/domains_param.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall.py b/src/gradient/types/gpu_droplets/firewall.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall.py rename to src/gradient/types/gpu_droplets/firewall.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall_create_params.py b/src/gradient/types/gpu_droplets/firewall_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall_create_params.py rename to src/gradient/types/gpu_droplets/firewall_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall_create_response.py b/src/gradient/types/gpu_droplets/firewall_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall_create_response.py rename to src/gradient/types/gpu_droplets/firewall_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall_list_params.py b/src/gradient/types/gpu_droplets/firewall_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall_list_params.py rename to src/gradient/types/gpu_droplets/firewall_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall_list_response.py b/src/gradient/types/gpu_droplets/firewall_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall_list_response.py rename to src/gradient/types/gpu_droplets/firewall_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall_param.py b/src/gradient/types/gpu_droplets/firewall_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall_param.py rename to src/gradient/types/gpu_droplets/firewall_param.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall_retrieve_response.py b/src/gradient/types/gpu_droplets/firewall_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall_retrieve_response.py rename to src/gradient/types/gpu_droplets/firewall_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall_update_params.py b/src/gradient/types/gpu_droplets/firewall_update_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall_update_params.py rename to src/gradient/types/gpu_droplets/firewall_update_params.py diff --git a/src/do_gradientai/types/gpu_droplets/firewall_update_response.py b/src/gradient/types/gpu_droplets/firewall_update_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewall_update_response.py rename to src/gradient/types/gpu_droplets/firewall_update_response.py diff --git a/src/do_gradientai/types/gpu_droplets/firewalls/__init__.py b/src/gradient/types/gpu_droplets/firewalls/__init__.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewalls/__init__.py rename to src/gradient/types/gpu_droplets/firewalls/__init__.py diff --git a/src/do_gradientai/types/gpu_droplets/firewalls/droplet_add_params.py b/src/gradient/types/gpu_droplets/firewalls/droplet_add_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewalls/droplet_add_params.py rename to src/gradient/types/gpu_droplets/firewalls/droplet_add_params.py diff --git a/src/do_gradientai/types/gpu_droplets/firewalls/droplet_remove_params.py b/src/gradient/types/gpu_droplets/firewalls/droplet_remove_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewalls/droplet_remove_params.py rename to src/gradient/types/gpu_droplets/firewalls/droplet_remove_params.py diff --git a/src/do_gradientai/types/gpu_droplets/firewalls/rule_add_params.py b/src/gradient/types/gpu_droplets/firewalls/rule_add_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewalls/rule_add_params.py rename to src/gradient/types/gpu_droplets/firewalls/rule_add_params.py diff --git a/src/do_gradientai/types/gpu_droplets/firewalls/rule_remove_params.py b/src/gradient/types/gpu_droplets/firewalls/rule_remove_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewalls/rule_remove_params.py rename to src/gradient/types/gpu_droplets/firewalls/rule_remove_params.py diff --git a/src/do_gradientai/types/gpu_droplets/firewalls/tag_add_params.py b/src/gradient/types/gpu_droplets/firewalls/tag_add_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewalls/tag_add_params.py rename to src/gradient/types/gpu_droplets/firewalls/tag_add_params.py diff --git a/src/do_gradientai/types/gpu_droplets/firewalls/tag_remove_params.py b/src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/firewalls/tag_remove_params.py rename to src/gradient/types/gpu_droplets/firewalls/tag_remove_params.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ip.py b/src/gradient/types/gpu_droplets/floating_ip.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ip.py rename to src/gradient/types/gpu_droplets/floating_ip.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ip_create_params.py b/src/gradient/types/gpu_droplets/floating_ip_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ip_create_params.py rename to src/gradient/types/gpu_droplets/floating_ip_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ip_create_response.py b/src/gradient/types/gpu_droplets/floating_ip_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ip_create_response.py rename to src/gradient/types/gpu_droplets/floating_ip_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ip_list_params.py b/src/gradient/types/gpu_droplets/floating_ip_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ip_list_params.py rename to src/gradient/types/gpu_droplets/floating_ip_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ip_list_response.py b/src/gradient/types/gpu_droplets/floating_ip_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ip_list_response.py rename to src/gradient/types/gpu_droplets/floating_ip_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ip_retrieve_response.py b/src/gradient/types/gpu_droplets/floating_ip_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ip_retrieve_response.py rename to src/gradient/types/gpu_droplets/floating_ip_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ips/__init__.py b/src/gradient/types/gpu_droplets/floating_ips/__init__.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ips/__init__.py rename to src/gradient/types/gpu_droplets/floating_ips/__init__.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ips/action_create_params.py b/src/gradient/types/gpu_droplets/floating_ips/action_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ips/action_create_params.py rename to src/gradient/types/gpu_droplets/floating_ips/action_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ips/action_create_response.py b/src/gradient/types/gpu_droplets/floating_ips/action_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ips/action_create_response.py rename to src/gradient/types/gpu_droplets/floating_ips/action_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ips/action_list_response.py b/src/gradient/types/gpu_droplets/floating_ips/action_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ips/action_list_response.py rename to src/gradient/types/gpu_droplets/floating_ips/action_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/floating_ips/action_retrieve_response.py b/src/gradient/types/gpu_droplets/floating_ips/action_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/floating_ips/action_retrieve_response.py rename to src/gradient/types/gpu_droplets/floating_ips/action_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/forwarding_rule.py b/src/gradient/types/gpu_droplets/forwarding_rule.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/forwarding_rule.py rename to src/gradient/types/gpu_droplets/forwarding_rule.py diff --git a/src/do_gradientai/types/gpu_droplets/forwarding_rule_param.py b/src/gradient/types/gpu_droplets/forwarding_rule_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/forwarding_rule_param.py rename to src/gradient/types/gpu_droplets/forwarding_rule_param.py diff --git a/src/do_gradientai/types/gpu_droplets/glb_settings.py b/src/gradient/types/gpu_droplets/glb_settings.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/glb_settings.py rename to src/gradient/types/gpu_droplets/glb_settings.py diff --git a/src/do_gradientai/types/gpu_droplets/glb_settings_param.py b/src/gradient/types/gpu_droplets/glb_settings_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/glb_settings_param.py rename to src/gradient/types/gpu_droplets/glb_settings_param.py diff --git a/src/do_gradientai/types/gpu_droplets/health_check.py b/src/gradient/types/gpu_droplets/health_check.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/health_check.py rename to src/gradient/types/gpu_droplets/health_check.py diff --git a/src/do_gradientai/types/gpu_droplets/health_check_param.py b/src/gradient/types/gpu_droplets/health_check_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/health_check_param.py rename to src/gradient/types/gpu_droplets/health_check_param.py diff --git a/src/do_gradientai/types/gpu_droplets/image_create_params.py b/src/gradient/types/gpu_droplets/image_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/image_create_params.py rename to src/gradient/types/gpu_droplets/image_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/image_create_response.py b/src/gradient/types/gpu_droplets/image_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/image_create_response.py rename to src/gradient/types/gpu_droplets/image_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/image_list_params.py b/src/gradient/types/gpu_droplets/image_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/image_list_params.py rename to src/gradient/types/gpu_droplets/image_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/image_list_response.py b/src/gradient/types/gpu_droplets/image_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/image_list_response.py rename to src/gradient/types/gpu_droplets/image_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/image_retrieve_response.py b/src/gradient/types/gpu_droplets/image_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/image_retrieve_response.py rename to src/gradient/types/gpu_droplets/image_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/image_update_params.py b/src/gradient/types/gpu_droplets/image_update_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/image_update_params.py rename to src/gradient/types/gpu_droplets/image_update_params.py diff --git a/src/do_gradientai/types/gpu_droplets/image_update_response.py b/src/gradient/types/gpu_droplets/image_update_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/image_update_response.py rename to src/gradient/types/gpu_droplets/image_update_response.py diff --git a/src/do_gradientai/types/gpu_droplets/images/__init__.py b/src/gradient/types/gpu_droplets/images/__init__.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/images/__init__.py rename to src/gradient/types/gpu_droplets/images/__init__.py diff --git a/src/do_gradientai/types/gpu_droplets/images/action_create_params.py b/src/gradient/types/gpu_droplets/images/action_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/images/action_create_params.py rename to src/gradient/types/gpu_droplets/images/action_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/images/action_list_response.py b/src/gradient/types/gpu_droplets/images/action_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/images/action_list_response.py rename to src/gradient/types/gpu_droplets/images/action_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/lb_firewall.py b/src/gradient/types/gpu_droplets/lb_firewall.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/lb_firewall.py rename to src/gradient/types/gpu_droplets/lb_firewall.py diff --git a/src/do_gradientai/types/gpu_droplets/lb_firewall_param.py b/src/gradient/types/gpu_droplets/lb_firewall_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/lb_firewall_param.py rename to src/gradient/types/gpu_droplets/lb_firewall_param.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancer.py b/src/gradient/types/gpu_droplets/load_balancer.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancer.py rename to src/gradient/types/gpu_droplets/load_balancer.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancer_create_params.py b/src/gradient/types/gpu_droplets/load_balancer_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancer_create_params.py rename to src/gradient/types/gpu_droplets/load_balancer_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancer_create_response.py b/src/gradient/types/gpu_droplets/load_balancer_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancer_create_response.py rename to src/gradient/types/gpu_droplets/load_balancer_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancer_list_params.py b/src/gradient/types/gpu_droplets/load_balancer_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancer_list_params.py rename to src/gradient/types/gpu_droplets/load_balancer_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancer_list_response.py b/src/gradient/types/gpu_droplets/load_balancer_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancer_list_response.py rename to src/gradient/types/gpu_droplets/load_balancer_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancer_retrieve_response.py b/src/gradient/types/gpu_droplets/load_balancer_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancer_retrieve_response.py rename to src/gradient/types/gpu_droplets/load_balancer_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancer_update_params.py b/src/gradient/types/gpu_droplets/load_balancer_update_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancer_update_params.py rename to src/gradient/types/gpu_droplets/load_balancer_update_params.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancer_update_response.py b/src/gradient/types/gpu_droplets/load_balancer_update_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancer_update_response.py rename to src/gradient/types/gpu_droplets/load_balancer_update_response.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancers/__init__.py b/src/gradient/types/gpu_droplets/load_balancers/__init__.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancers/__init__.py rename to src/gradient/types/gpu_droplets/load_balancers/__init__.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancers/droplet_add_params.py b/src/gradient/types/gpu_droplets/load_balancers/droplet_add_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancers/droplet_add_params.py rename to src/gradient/types/gpu_droplets/load_balancers/droplet_add_params.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancers/droplet_remove_params.py b/src/gradient/types/gpu_droplets/load_balancers/droplet_remove_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancers/droplet_remove_params.py rename to src/gradient/types/gpu_droplets/load_balancers/droplet_remove_params.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancers/forwarding_rule_add_params.py b/src/gradient/types/gpu_droplets/load_balancers/forwarding_rule_add_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancers/forwarding_rule_add_params.py rename to src/gradient/types/gpu_droplets/load_balancers/forwarding_rule_add_params.py diff --git a/src/do_gradientai/types/gpu_droplets/load_balancers/forwarding_rule_remove_params.py b/src/gradient/types/gpu_droplets/load_balancers/forwarding_rule_remove_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/load_balancers/forwarding_rule_remove_params.py rename to src/gradient/types/gpu_droplets/load_balancers/forwarding_rule_remove_params.py diff --git a/src/do_gradientai/types/gpu_droplets/size_list_params.py b/src/gradient/types/gpu_droplets/size_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/size_list_params.py rename to src/gradient/types/gpu_droplets/size_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/size_list_response.py b/src/gradient/types/gpu_droplets/size_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/size_list_response.py rename to src/gradient/types/gpu_droplets/size_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/snapshot_list_params.py b/src/gradient/types/gpu_droplets/snapshot_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/snapshot_list_params.py rename to src/gradient/types/gpu_droplets/snapshot_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/snapshot_list_response.py b/src/gradient/types/gpu_droplets/snapshot_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/snapshot_list_response.py rename to src/gradient/types/gpu_droplets/snapshot_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/snapshot_retrieve_response.py b/src/gradient/types/gpu_droplets/snapshot_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/snapshot_retrieve_response.py rename to src/gradient/types/gpu_droplets/snapshot_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/sticky_sessions.py b/src/gradient/types/gpu_droplets/sticky_sessions.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/sticky_sessions.py rename to src/gradient/types/gpu_droplets/sticky_sessions.py diff --git a/src/do_gradientai/types/gpu_droplets/sticky_sessions_param.py b/src/gradient/types/gpu_droplets/sticky_sessions_param.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/sticky_sessions_param.py rename to src/gradient/types/gpu_droplets/sticky_sessions_param.py diff --git a/src/do_gradientai/types/gpu_droplets/volume_create_params.py b/src/gradient/types/gpu_droplets/volume_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volume_create_params.py rename to src/gradient/types/gpu_droplets/volume_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volume_create_response.py b/src/gradient/types/gpu_droplets/volume_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volume_create_response.py rename to src/gradient/types/gpu_droplets/volume_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volume_delete_by_name_params.py b/src/gradient/types/gpu_droplets/volume_delete_by_name_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volume_delete_by_name_params.py rename to src/gradient/types/gpu_droplets/volume_delete_by_name_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volume_list_params.py b/src/gradient/types/gpu_droplets/volume_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volume_list_params.py rename to src/gradient/types/gpu_droplets/volume_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volume_list_response.py b/src/gradient/types/gpu_droplets/volume_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volume_list_response.py rename to src/gradient/types/gpu_droplets/volume_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volume_retrieve_response.py b/src/gradient/types/gpu_droplets/volume_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volume_retrieve_response.py rename to src/gradient/types/gpu_droplets/volume_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/__init__.py b/src/gradient/types/gpu_droplets/volumes/__init__.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/__init__.py rename to src/gradient/types/gpu_droplets/volumes/__init__.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_params.py b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_params.py rename to src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_response.py b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_response.py rename to src/gradient/types/gpu_droplets/volumes/action_initiate_by_id_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_params.py b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_params.py rename to src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_response.py b/src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_response.py rename to src/gradient/types/gpu_droplets/volumes/action_initiate_by_name_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/action_list_params.py b/src/gradient/types/gpu_droplets/volumes/action_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/action_list_params.py rename to src/gradient/types/gpu_droplets/volumes/action_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/action_list_response.py b/src/gradient/types/gpu_droplets/volumes/action_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/action_list_response.py rename to src/gradient/types/gpu_droplets/volumes/action_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_params.py b/src/gradient/types/gpu_droplets/volumes/action_retrieve_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_params.py rename to src/gradient/types/gpu_droplets/volumes/action_retrieve_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_response.py b/src/gradient/types/gpu_droplets/volumes/action_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_response.py rename to src/gradient/types/gpu_droplets/volumes/action_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_params.py b/src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_params.py rename to src/gradient/types/gpu_droplets/volumes/snapshot_create_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_response.py b/src/gradient/types/gpu_droplets/volumes/snapshot_create_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_response.py rename to src/gradient/types/gpu_droplets/volumes/snapshot_create_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_params.py b/src/gradient/types/gpu_droplets/volumes/snapshot_list_params.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_params.py rename to src/gradient/types/gpu_droplets/volumes/snapshot_list_params.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_response.py b/src/gradient/types/gpu_droplets/volumes/snapshot_list_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_response.py rename to src/gradient/types/gpu_droplets/volumes/snapshot_list_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/snapshot_retrieve_response.py b/src/gradient/types/gpu_droplets/volumes/snapshot_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/snapshot_retrieve_response.py rename to src/gradient/types/gpu_droplets/volumes/snapshot_retrieve_response.py diff --git a/src/do_gradientai/types/gpu_droplets/volumes/volume_action.py b/src/gradient/types/gpu_droplets/volumes/volume_action.py similarity index 100% rename from src/do_gradientai/types/gpu_droplets/volumes/volume_action.py rename to src/gradient/types/gpu_droplets/volumes/volume_action.py diff --git a/src/do_gradientai/types/inference/__init__.py b/src/gradient/types/inference/__init__.py similarity index 100% rename from src/do_gradientai/types/inference/__init__.py rename to src/gradient/types/inference/__init__.py diff --git a/src/do_gradientai/types/inference/api_key_create_params.py b/src/gradient/types/inference/api_key_create_params.py similarity index 100% rename from src/do_gradientai/types/inference/api_key_create_params.py rename to src/gradient/types/inference/api_key_create_params.py diff --git a/src/do_gradientai/types/inference/api_key_create_response.py b/src/gradient/types/inference/api_key_create_response.py similarity index 100% rename from src/do_gradientai/types/inference/api_key_create_response.py rename to src/gradient/types/inference/api_key_create_response.py diff --git a/src/do_gradientai/types/inference/api_key_delete_response.py b/src/gradient/types/inference/api_key_delete_response.py similarity index 100% rename from src/do_gradientai/types/inference/api_key_delete_response.py rename to src/gradient/types/inference/api_key_delete_response.py diff --git a/src/do_gradientai/types/inference/api_key_list_params.py b/src/gradient/types/inference/api_key_list_params.py similarity index 100% rename from src/do_gradientai/types/inference/api_key_list_params.py rename to src/gradient/types/inference/api_key_list_params.py diff --git a/src/do_gradientai/types/inference/api_key_list_response.py b/src/gradient/types/inference/api_key_list_response.py similarity index 100% rename from src/do_gradientai/types/inference/api_key_list_response.py rename to src/gradient/types/inference/api_key_list_response.py diff --git a/src/do_gradientai/types/inference/api_key_update_params.py b/src/gradient/types/inference/api_key_update_params.py similarity index 100% rename from src/do_gradientai/types/inference/api_key_update_params.py rename to src/gradient/types/inference/api_key_update_params.py diff --git a/src/do_gradientai/types/inference/api_key_update_regenerate_response.py b/src/gradient/types/inference/api_key_update_regenerate_response.py similarity index 100% rename from src/do_gradientai/types/inference/api_key_update_regenerate_response.py rename to src/gradient/types/inference/api_key_update_regenerate_response.py diff --git a/src/do_gradientai/types/inference/api_key_update_response.py b/src/gradient/types/inference/api_key_update_response.py similarity index 100% rename from src/do_gradientai/types/inference/api_key_update_response.py rename to src/gradient/types/inference/api_key_update_response.py diff --git a/src/do_gradientai/types/inference/api_model_api_key_info.py b/src/gradient/types/inference/api_model_api_key_info.py similarity index 100% rename from src/do_gradientai/types/inference/api_model_api_key_info.py rename to src/gradient/types/inference/api_model_api_key_info.py diff --git a/src/do_gradientai/types/knowledge_base_create_params.py b/src/gradient/types/knowledge_base_create_params.py similarity index 100% rename from src/do_gradientai/types/knowledge_base_create_params.py rename to src/gradient/types/knowledge_base_create_params.py diff --git a/src/do_gradientai/types/knowledge_base_create_response.py b/src/gradient/types/knowledge_base_create_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_base_create_response.py rename to src/gradient/types/knowledge_base_create_response.py diff --git a/src/do_gradientai/types/knowledge_base_delete_response.py b/src/gradient/types/knowledge_base_delete_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_base_delete_response.py rename to src/gradient/types/knowledge_base_delete_response.py diff --git a/src/do_gradientai/types/knowledge_base_list_params.py b/src/gradient/types/knowledge_base_list_params.py similarity index 100% rename from src/do_gradientai/types/knowledge_base_list_params.py rename to src/gradient/types/knowledge_base_list_params.py diff --git a/src/do_gradientai/types/knowledge_base_list_response.py b/src/gradient/types/knowledge_base_list_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_base_list_response.py rename to src/gradient/types/knowledge_base_list_response.py diff --git a/src/do_gradientai/types/knowledge_base_retrieve_response.py b/src/gradient/types/knowledge_base_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_base_retrieve_response.py rename to src/gradient/types/knowledge_base_retrieve_response.py diff --git a/src/do_gradientai/types/knowledge_base_update_params.py b/src/gradient/types/knowledge_base_update_params.py similarity index 100% rename from src/do_gradientai/types/knowledge_base_update_params.py rename to src/gradient/types/knowledge_base_update_params.py diff --git a/src/do_gradientai/types/knowledge_base_update_response.py b/src/gradient/types/knowledge_base_update_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_base_update_response.py rename to src/gradient/types/knowledge_base_update_response.py diff --git a/src/do_gradientai/types/knowledge_bases/__init__.py b/src/gradient/types/knowledge_bases/__init__.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/__init__.py rename to src/gradient/types/knowledge_bases/__init__.py diff --git a/src/do_gradientai/types/knowledge_bases/api_file_upload_data_source.py b/src/gradient/types/knowledge_bases/api_file_upload_data_source.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_file_upload_data_source.py rename to src/gradient/types/knowledge_bases/api_file_upload_data_source.py diff --git a/src/do_gradientai/types/knowledge_bases/api_file_upload_data_source_param.py b/src/gradient/types/knowledge_bases/api_file_upload_data_source_param.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_file_upload_data_source_param.py rename to src/gradient/types/knowledge_bases/api_file_upload_data_source_param.py diff --git a/src/do_gradientai/types/knowledge_bases/api_indexed_data_source.py b/src/gradient/types/knowledge_bases/api_indexed_data_source.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_indexed_data_source.py rename to src/gradient/types/knowledge_bases/api_indexed_data_source.py diff --git a/src/do_gradientai/types/knowledge_bases/api_indexing_job.py b/src/gradient/types/knowledge_bases/api_indexing_job.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_indexing_job.py rename to src/gradient/types/knowledge_bases/api_indexing_job.py diff --git a/src/do_gradientai/types/knowledge_bases/api_knowledge_base_data_source.py b/src/gradient/types/knowledge_bases/api_knowledge_base_data_source.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_knowledge_base_data_source.py rename to src/gradient/types/knowledge_bases/api_knowledge_base_data_source.py diff --git a/src/do_gradientai/types/knowledge_bases/api_spaces_data_source.py b/src/gradient/types/knowledge_bases/api_spaces_data_source.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_spaces_data_source.py rename to src/gradient/types/knowledge_bases/api_spaces_data_source.py diff --git a/src/do_gradientai/types/knowledge_bases/api_spaces_data_source_param.py b/src/gradient/types/knowledge_bases/api_spaces_data_source_param.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_spaces_data_source_param.py rename to src/gradient/types/knowledge_bases/api_spaces_data_source_param.py diff --git a/src/do_gradientai/types/knowledge_bases/api_web_crawler_data_source.py b/src/gradient/types/knowledge_bases/api_web_crawler_data_source.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_web_crawler_data_source.py rename to src/gradient/types/knowledge_bases/api_web_crawler_data_source.py diff --git a/src/do_gradientai/types/knowledge_bases/api_web_crawler_data_source_param.py b/src/gradient/types/knowledge_bases/api_web_crawler_data_source_param.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/api_web_crawler_data_source_param.py rename to src/gradient/types/knowledge_bases/api_web_crawler_data_source_param.py diff --git a/src/do_gradientai/types/knowledge_bases/aws_data_source_param.py b/src/gradient/types/knowledge_bases/aws_data_source_param.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/aws_data_source_param.py rename to src/gradient/types/knowledge_bases/aws_data_source_param.py diff --git a/src/do_gradientai/types/knowledge_bases/data_source_create_params.py b/src/gradient/types/knowledge_bases/data_source_create_params.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/data_source_create_params.py rename to src/gradient/types/knowledge_bases/data_source_create_params.py diff --git a/src/do_gradientai/types/knowledge_bases/data_source_create_response.py b/src/gradient/types/knowledge_bases/data_source_create_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/data_source_create_response.py rename to src/gradient/types/knowledge_bases/data_source_create_response.py diff --git a/src/do_gradientai/types/knowledge_bases/data_source_delete_response.py b/src/gradient/types/knowledge_bases/data_source_delete_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/data_source_delete_response.py rename to src/gradient/types/knowledge_bases/data_source_delete_response.py diff --git a/src/do_gradientai/types/knowledge_bases/data_source_list_params.py b/src/gradient/types/knowledge_bases/data_source_list_params.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/data_source_list_params.py rename to src/gradient/types/knowledge_bases/data_source_list_params.py diff --git a/src/do_gradientai/types/knowledge_bases/data_source_list_response.py b/src/gradient/types/knowledge_bases/data_source_list_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/data_source_list_response.py rename to src/gradient/types/knowledge_bases/data_source_list_response.py diff --git a/src/do_gradientai/types/knowledge_bases/indexing_job_create_params.py b/src/gradient/types/knowledge_bases/indexing_job_create_params.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/indexing_job_create_params.py rename to src/gradient/types/knowledge_bases/indexing_job_create_params.py diff --git a/src/do_gradientai/types/knowledge_bases/indexing_job_create_response.py b/src/gradient/types/knowledge_bases/indexing_job_create_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/indexing_job_create_response.py rename to src/gradient/types/knowledge_bases/indexing_job_create_response.py diff --git a/src/do_gradientai/types/knowledge_bases/indexing_job_list_params.py b/src/gradient/types/knowledge_bases/indexing_job_list_params.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/indexing_job_list_params.py rename to src/gradient/types/knowledge_bases/indexing_job_list_params.py diff --git a/src/do_gradientai/types/knowledge_bases/indexing_job_list_response.py b/src/gradient/types/knowledge_bases/indexing_job_list_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/indexing_job_list_response.py rename to src/gradient/types/knowledge_bases/indexing_job_list_response.py diff --git a/src/do_gradientai/types/knowledge_bases/indexing_job_retrieve_data_sources_response.py b/src/gradient/types/knowledge_bases/indexing_job_retrieve_data_sources_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/indexing_job_retrieve_data_sources_response.py rename to src/gradient/types/knowledge_bases/indexing_job_retrieve_data_sources_response.py diff --git a/src/do_gradientai/types/knowledge_bases/indexing_job_retrieve_response.py b/src/gradient/types/knowledge_bases/indexing_job_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/indexing_job_retrieve_response.py rename to src/gradient/types/knowledge_bases/indexing_job_retrieve_response.py diff --git a/src/do_gradientai/types/knowledge_bases/indexing_job_update_cancel_params.py b/src/gradient/types/knowledge_bases/indexing_job_update_cancel_params.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/indexing_job_update_cancel_params.py rename to src/gradient/types/knowledge_bases/indexing_job_update_cancel_params.py diff --git a/src/do_gradientai/types/knowledge_bases/indexing_job_update_cancel_response.py b/src/gradient/types/knowledge_bases/indexing_job_update_cancel_response.py similarity index 100% rename from src/do_gradientai/types/knowledge_bases/indexing_job_update_cancel_response.py rename to src/gradient/types/knowledge_bases/indexing_job_update_cancel_response.py diff --git a/src/do_gradientai/types/model_list_params.py b/src/gradient/types/model_list_params.py similarity index 100% rename from src/do_gradientai/types/model_list_params.py rename to src/gradient/types/model_list_params.py diff --git a/src/do_gradientai/types/model_list_response.py b/src/gradient/types/model_list_response.py similarity index 100% rename from src/do_gradientai/types/model_list_response.py rename to src/gradient/types/model_list_response.py diff --git a/src/do_gradientai/types/models/__init__.py b/src/gradient/types/models/__init__.py similarity index 100% rename from src/do_gradientai/types/models/__init__.py rename to src/gradient/types/models/__init__.py diff --git a/src/do_gradientai/types/models/providers/__init__.py b/src/gradient/types/models/providers/__init__.py similarity index 100% rename from src/do_gradientai/types/models/providers/__init__.py rename to src/gradient/types/models/providers/__init__.py diff --git a/src/do_gradientai/types/models/providers/anthropic_create_params.py b/src/gradient/types/models/providers/anthropic_create_params.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_create_params.py rename to src/gradient/types/models/providers/anthropic_create_params.py diff --git a/src/do_gradientai/types/models/providers/anthropic_create_response.py b/src/gradient/types/models/providers/anthropic_create_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_create_response.py rename to src/gradient/types/models/providers/anthropic_create_response.py diff --git a/src/do_gradientai/types/models/providers/anthropic_delete_response.py b/src/gradient/types/models/providers/anthropic_delete_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_delete_response.py rename to src/gradient/types/models/providers/anthropic_delete_response.py diff --git a/src/do_gradientai/types/models/providers/anthropic_list_agents_params.py b/src/gradient/types/models/providers/anthropic_list_agents_params.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_list_agents_params.py rename to src/gradient/types/models/providers/anthropic_list_agents_params.py diff --git a/src/do_gradientai/types/models/providers/anthropic_list_agents_response.py b/src/gradient/types/models/providers/anthropic_list_agents_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_list_agents_response.py rename to src/gradient/types/models/providers/anthropic_list_agents_response.py diff --git a/src/do_gradientai/types/models/providers/anthropic_list_params.py b/src/gradient/types/models/providers/anthropic_list_params.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_list_params.py rename to src/gradient/types/models/providers/anthropic_list_params.py diff --git a/src/do_gradientai/types/models/providers/anthropic_list_response.py b/src/gradient/types/models/providers/anthropic_list_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_list_response.py rename to src/gradient/types/models/providers/anthropic_list_response.py diff --git a/src/do_gradientai/types/models/providers/anthropic_retrieve_response.py b/src/gradient/types/models/providers/anthropic_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_retrieve_response.py rename to src/gradient/types/models/providers/anthropic_retrieve_response.py diff --git a/src/do_gradientai/types/models/providers/anthropic_update_params.py b/src/gradient/types/models/providers/anthropic_update_params.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_update_params.py rename to src/gradient/types/models/providers/anthropic_update_params.py diff --git a/src/do_gradientai/types/models/providers/anthropic_update_response.py b/src/gradient/types/models/providers/anthropic_update_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/anthropic_update_response.py rename to src/gradient/types/models/providers/anthropic_update_response.py diff --git a/src/do_gradientai/types/models/providers/openai_create_params.py b/src/gradient/types/models/providers/openai_create_params.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_create_params.py rename to src/gradient/types/models/providers/openai_create_params.py diff --git a/src/do_gradientai/types/models/providers/openai_create_response.py b/src/gradient/types/models/providers/openai_create_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_create_response.py rename to src/gradient/types/models/providers/openai_create_response.py diff --git a/src/do_gradientai/types/models/providers/openai_delete_response.py b/src/gradient/types/models/providers/openai_delete_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_delete_response.py rename to src/gradient/types/models/providers/openai_delete_response.py diff --git a/src/do_gradientai/types/models/providers/openai_list_params.py b/src/gradient/types/models/providers/openai_list_params.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_list_params.py rename to src/gradient/types/models/providers/openai_list_params.py diff --git a/src/do_gradientai/types/models/providers/openai_list_response.py b/src/gradient/types/models/providers/openai_list_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_list_response.py rename to src/gradient/types/models/providers/openai_list_response.py diff --git a/src/do_gradientai/types/models/providers/openai_retrieve_agents_params.py b/src/gradient/types/models/providers/openai_retrieve_agents_params.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_retrieve_agents_params.py rename to src/gradient/types/models/providers/openai_retrieve_agents_params.py diff --git a/src/do_gradientai/types/models/providers/openai_retrieve_agents_response.py b/src/gradient/types/models/providers/openai_retrieve_agents_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_retrieve_agents_response.py rename to src/gradient/types/models/providers/openai_retrieve_agents_response.py diff --git a/src/do_gradientai/types/models/providers/openai_retrieve_response.py b/src/gradient/types/models/providers/openai_retrieve_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_retrieve_response.py rename to src/gradient/types/models/providers/openai_retrieve_response.py diff --git a/src/do_gradientai/types/models/providers/openai_update_params.py b/src/gradient/types/models/providers/openai_update_params.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_update_params.py rename to src/gradient/types/models/providers/openai_update_params.py diff --git a/src/do_gradientai/types/models/providers/openai_update_response.py b/src/gradient/types/models/providers/openai_update_response.py similarity index 100% rename from src/do_gradientai/types/models/providers/openai_update_response.py rename to src/gradient/types/models/providers/openai_update_response.py diff --git a/src/do_gradientai/types/region_list_params.py b/src/gradient/types/region_list_params.py similarity index 100% rename from src/do_gradientai/types/region_list_params.py rename to src/gradient/types/region_list_params.py diff --git a/src/do_gradientai/types/region_list_response.py b/src/gradient/types/region_list_response.py similarity index 100% rename from src/do_gradientai/types/region_list_response.py rename to src/gradient/types/region_list_response.py diff --git a/src/do_gradientai/types/shared/__init__.py b/src/gradient/types/shared/__init__.py similarity index 100% rename from src/do_gradientai/types/shared/__init__.py rename to src/gradient/types/shared/__init__.py diff --git a/src/do_gradientai/types/shared/action.py b/src/gradient/types/shared/action.py similarity index 100% rename from src/do_gradientai/types/shared/action.py rename to src/gradient/types/shared/action.py diff --git a/src/do_gradientai/types/shared/action_link.py b/src/gradient/types/shared/action_link.py similarity index 100% rename from src/do_gradientai/types/shared/action_link.py rename to src/gradient/types/shared/action_link.py diff --git a/src/do_gradientai/types/shared/api_links.py b/src/gradient/types/shared/api_links.py similarity index 100% rename from src/do_gradientai/types/shared/api_links.py rename to src/gradient/types/shared/api_links.py diff --git a/src/do_gradientai/types/shared/api_meta.py b/src/gradient/types/shared/api_meta.py similarity index 100% rename from src/do_gradientai/types/shared/api_meta.py rename to src/gradient/types/shared/api_meta.py diff --git a/src/do_gradientai/types/shared/backward_links.py b/src/gradient/types/shared/backward_links.py similarity index 100% rename from src/do_gradientai/types/shared/backward_links.py rename to src/gradient/types/shared/backward_links.py diff --git a/src/do_gradientai/types/shared/chat_completion_chunk.py b/src/gradient/types/shared/chat_completion_chunk.py similarity index 100% rename from src/do_gradientai/types/shared/chat_completion_chunk.py rename to src/gradient/types/shared/chat_completion_chunk.py diff --git a/src/do_gradientai/types/shared/chat_completion_token_logprob.py b/src/gradient/types/shared/chat_completion_token_logprob.py similarity index 100% rename from src/do_gradientai/types/shared/chat_completion_token_logprob.py rename to src/gradient/types/shared/chat_completion_token_logprob.py diff --git a/src/do_gradientai/types/shared/completion_usage.py b/src/gradient/types/shared/completion_usage.py similarity index 100% rename from src/do_gradientai/types/shared/completion_usage.py rename to src/gradient/types/shared/completion_usage.py diff --git a/src/do_gradientai/types/shared/disk_info.py b/src/gradient/types/shared/disk_info.py similarity index 100% rename from src/do_gradientai/types/shared/disk_info.py rename to src/gradient/types/shared/disk_info.py diff --git a/src/do_gradientai/types/shared/droplet.py b/src/gradient/types/shared/droplet.py similarity index 100% rename from src/do_gradientai/types/shared/droplet.py rename to src/gradient/types/shared/droplet.py diff --git a/src/do_gradientai/types/shared/droplet_next_backup_window.py b/src/gradient/types/shared/droplet_next_backup_window.py similarity index 100% rename from src/do_gradientai/types/shared/droplet_next_backup_window.py rename to src/gradient/types/shared/droplet_next_backup_window.py diff --git a/src/do_gradientai/types/shared/firewall_rule_target.py b/src/gradient/types/shared/firewall_rule_target.py similarity index 100% rename from src/do_gradientai/types/shared/firewall_rule_target.py rename to src/gradient/types/shared/firewall_rule_target.py diff --git a/src/do_gradientai/types/shared/forward_links.py b/src/gradient/types/shared/forward_links.py similarity index 100% rename from src/do_gradientai/types/shared/forward_links.py rename to src/gradient/types/shared/forward_links.py diff --git a/src/do_gradientai/types/shared/garbage_collection.py b/src/gradient/types/shared/garbage_collection.py similarity index 100% rename from src/do_gradientai/types/shared/garbage_collection.py rename to src/gradient/types/shared/garbage_collection.py diff --git a/src/do_gradientai/types/shared/gpu_info.py b/src/gradient/types/shared/gpu_info.py similarity index 100% rename from src/do_gradientai/types/shared/gpu_info.py rename to src/gradient/types/shared/gpu_info.py diff --git a/src/do_gradientai/types/shared/image.py b/src/gradient/types/shared/image.py similarity index 100% rename from src/do_gradientai/types/shared/image.py rename to src/gradient/types/shared/image.py diff --git a/src/do_gradientai/types/shared/kernel.py b/src/gradient/types/shared/kernel.py similarity index 100% rename from src/do_gradientai/types/shared/kernel.py rename to src/gradient/types/shared/kernel.py diff --git a/src/do_gradientai/types/shared/meta_properties.py b/src/gradient/types/shared/meta_properties.py similarity index 100% rename from src/do_gradientai/types/shared/meta_properties.py rename to src/gradient/types/shared/meta_properties.py diff --git a/src/do_gradientai/types/shared/network_v4.py b/src/gradient/types/shared/network_v4.py similarity index 100% rename from src/do_gradientai/types/shared/network_v4.py rename to src/gradient/types/shared/network_v4.py diff --git a/src/do_gradientai/types/shared/network_v6.py b/src/gradient/types/shared/network_v6.py similarity index 100% rename from src/do_gradientai/types/shared/network_v6.py rename to src/gradient/types/shared/network_v6.py diff --git a/src/do_gradientai/types/shared/page_links.py b/src/gradient/types/shared/page_links.py similarity index 100% rename from src/do_gradientai/types/shared/page_links.py rename to src/gradient/types/shared/page_links.py diff --git a/src/do_gradientai/types/shared/region.py b/src/gradient/types/shared/region.py similarity index 100% rename from src/do_gradientai/types/shared/region.py rename to src/gradient/types/shared/region.py diff --git a/src/do_gradientai/types/shared/size.py b/src/gradient/types/shared/size.py similarity index 100% rename from src/do_gradientai/types/shared/size.py rename to src/gradient/types/shared/size.py diff --git a/src/do_gradientai/types/shared/snapshots.py b/src/gradient/types/shared/snapshots.py similarity index 100% rename from src/do_gradientai/types/shared/snapshots.py rename to src/gradient/types/shared/snapshots.py diff --git a/src/do_gradientai/types/shared/subscription.py b/src/gradient/types/shared/subscription.py similarity index 100% rename from src/do_gradientai/types/shared/subscription.py rename to src/gradient/types/shared/subscription.py diff --git a/src/do_gradientai/types/shared/subscription_tier_base.py b/src/gradient/types/shared/subscription_tier_base.py similarity index 100% rename from src/do_gradientai/types/shared/subscription_tier_base.py rename to src/gradient/types/shared/subscription_tier_base.py diff --git a/src/do_gradientai/types/shared/vpc_peering.py b/src/gradient/types/shared/vpc_peering.py similarity index 100% rename from src/do_gradientai/types/shared/vpc_peering.py rename to src/gradient/types/shared/vpc_peering.py diff --git a/src/do_gradientai/types/shared_params/__init__.py b/src/gradient/types/shared_params/__init__.py similarity index 100% rename from src/do_gradientai/types/shared_params/__init__.py rename to src/gradient/types/shared_params/__init__.py diff --git a/src/do_gradientai/types/shared_params/firewall_rule_target.py b/src/gradient/types/shared_params/firewall_rule_target.py similarity index 100% rename from src/do_gradientai/types/shared_params/firewall_rule_target.py rename to src/gradient/types/shared_params/firewall_rule_target.py diff --git a/tests/api_resources/agents/chat/test_completions.py b/tests/api_resources/agents/chat/test_completions.py index de43cc34..474c11c9 100644 --- a/tests/api_resources/agents/chat/test_completions.py +++ b/tests/api_resources/agents/chat/test_completions.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents.chat import CompletionCreateResponse +from gradient.types.agents.chat import CompletionCreateResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,7 +19,7 @@ class TestCompletions: @pytest.mark.skip() @parametrize - def test_method_create_overload_1(self, client: GradientAI) -> None: + def test_method_create_overload_1(self, client: Gradient) -> None: completion = client.agents.chat.completions.create( messages=[ { @@ -33,7 +33,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: completion = client.agents.chat.completions.create( messages=[ { @@ -73,7 +73,7 @@ def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_1(self, client: GradientAI) -> None: + def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.agents.chat.completions.with_raw_response.create( messages=[ { @@ -91,7 +91,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.agents.chat.completions.with_streaming_response.create( messages=[ { @@ -111,7 +111,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_overload_2(self, client: GradientAI) -> None: + def test_method_create_overload_2(self, client: Gradient) -> None: completion_stream = client.agents.chat.completions.create( messages=[ { @@ -126,7 +126,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: completion_stream = client.agents.chat.completions.create( messages=[ { @@ -166,7 +166,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_2(self, client: GradientAI) -> None: + def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.agents.chat.completions.with_raw_response.create( messages=[ { @@ -184,7 +184,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.agents.chat.completions.with_streaming_response.create( messages=[ { @@ -214,7 +214,7 @@ class TestAsyncCompletions: @pytest.mark.skip() @parametrize - async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: completion = await async_client.agents.chat.completions.create( messages=[ { @@ -228,7 +228,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: completion = await async_client.agents.chat.completions.create( messages=[ { @@ -268,7 +268,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.agents.chat.completions.with_raw_response.create( messages=[ { @@ -286,7 +286,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.agents.chat.completions.with_streaming_response.create( messages=[ { @@ -306,7 +306,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: completion_stream = await async_client.agents.chat.completions.create( messages=[ { @@ -321,7 +321,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: completion_stream = await async_client.agents.chat.completions.create( messages=[ { @@ -361,7 +361,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.agents.chat.completions.with_raw_response.create( messages=[ { @@ -379,7 +379,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.agents.chat.completions.with_streaming_response.create( messages=[ { diff --git a/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py b/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py index aff153a6..a8ca5724 100644 --- a/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py +++ b/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents.evaluation_metrics.anthropic import ( +from gradient.types.agents.evaluation_metrics.anthropic import ( KeyListResponse, KeyCreateResponse, KeyDeleteResponse, @@ -26,13 +26,13 @@ class TestKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.create( api_key='"sk-ant-12345678901234567890123456789012"', name='"Production Key"', @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.create() assert response.is_closed is True @@ -51,7 +51,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.retrieve( "api_key_uuid", ) @@ -71,7 +71,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.retrieve( "api_key_uuid", ) @@ -83,7 +83,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -97,7 +97,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.agents.evaluation_metrics.anthropic.keys.with_raw_response.retrieve( "", @@ -105,7 +105,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -113,7 +113,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', api_key='"sk-ant-12345678901234567890123456789012"', @@ -124,7 +124,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -136,7 +136,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -150,7 +150,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): client.agents.evaluation_metrics.anthropic.keys.with_raw_response.update( path_api_key_uuid="", @@ -158,13 +158,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.list( page=0, per_page=0, @@ -173,7 +173,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list() assert response.is_closed is True @@ -183,7 +183,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -195,7 +195,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.delete( "api_key_uuid", ) @@ -203,7 +203,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.delete( "api_key_uuid", ) @@ -215,7 +215,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -229,7 +229,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.agents.evaluation_metrics.anthropic.keys.with_raw_response.delete( "", @@ -237,7 +237,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_agents(self, client: GradientAI) -> None: + def test_method_list_agents(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -245,7 +245,7 @@ def test_method_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_agents_with_all_params(self, client: GradientAI) -> None: + def test_method_list_agents_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -255,7 +255,7 @@ def test_method_list_agents_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_agents(self, client: GradientAI) -> None: + def test_raw_response_list_agents(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -267,7 +267,7 @@ def test_raw_response_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_agents(self, client: GradientAI) -> None: + def test_streaming_response_list_agents(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -281,7 +281,7 @@ def test_streaming_response_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list_agents(self, client: GradientAI) -> None: + def test_path_params_list_agents(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list_agents( uuid="", @@ -295,13 +295,13 @@ class TestAsyncKeys: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.create( api_key='"sk-ant-12345678901234567890123456789012"', name='"Production Key"', @@ -310,7 +310,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.create() assert response.is_closed is True @@ -320,7 +320,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -332,7 +332,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.retrieve( "api_key_uuid", ) @@ -340,7 +340,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.retrieve( "api_key_uuid", ) @@ -352,7 +352,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -366,7 +366,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.retrieve( "", @@ -374,7 +374,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -382,7 +382,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', api_key='"sk-ant-12345678901234567890123456789012"', @@ -393,7 +393,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -405,7 +405,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -419,7 +419,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.update( path_api_key_uuid="", @@ -427,13 +427,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.list( page=0, per_page=0, @@ -442,7 +442,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list() assert response.is_closed is True @@ -452,7 +452,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -464,7 +464,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.delete( "api_key_uuid", ) @@ -472,7 +472,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.delete( "api_key_uuid", ) @@ -484,7 +484,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -498,7 +498,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.delete( "", @@ -506,7 +506,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_agents(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -514,7 +514,7 @@ async def test_method_list_agents(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_agents_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -524,7 +524,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi @pytest.mark.skip() @parametrize - async def test_raw_response_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -536,7 +536,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -550,7 +550,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_path_params_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list_agents( uuid="", diff --git a/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py b/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py index 08404acc..5a22b1bc 100644 --- a/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py +++ b/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents.evaluation_metrics.openai import ( +from gradient.types.agents.evaluation_metrics.openai import ( KeyListResponse, KeyCreateResponse, KeyDeleteResponse, @@ -26,13 +26,13 @@ class TestKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.create( api_key='"sk-proj--123456789098765432123456789"', name='"Production Key"', @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.create() assert response.is_closed is True @@ -51,7 +51,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.retrieve( "api_key_uuid", ) @@ -71,7 +71,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.retrieve( "api_key_uuid", ) @@ -83,7 +83,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -97,7 +97,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.agents.evaluation_metrics.openai.keys.with_raw_response.retrieve( "", @@ -105,7 +105,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -113,7 +113,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', api_key='"sk-ant-12345678901234567890123456789012"', @@ -124,7 +124,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -136,7 +136,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -150,7 +150,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): client.agents.evaluation_metrics.openai.keys.with_raw_response.update( path_api_key_uuid="", @@ -158,13 +158,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.list( page=0, per_page=0, @@ -173,7 +173,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.list() assert response.is_closed is True @@ -183,7 +183,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -195,7 +195,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.delete( "api_key_uuid", ) @@ -203,7 +203,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.delete( "api_key_uuid", ) @@ -215,7 +215,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -229,7 +229,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.agents.evaluation_metrics.openai.keys.with_raw_response.delete( "", @@ -237,7 +237,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_agents(self, client: GradientAI) -> None: + def test_method_list_agents(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -245,7 +245,7 @@ def test_method_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_agents_with_all_params(self, client: GradientAI) -> None: + def test_method_list_agents_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -255,7 +255,7 @@ def test_method_list_agents_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_agents(self, client: GradientAI) -> None: + def test_raw_response_list_agents(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -267,7 +267,7 @@ def test_raw_response_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_agents(self, client: GradientAI) -> None: + def test_streaming_response_list_agents(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -281,7 +281,7 @@ def test_streaming_response_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list_agents(self, client: GradientAI) -> None: + def test_path_params_list_agents(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.evaluation_metrics.openai.keys.with_raw_response.list_agents( uuid="", @@ -295,13 +295,13 @@ class TestAsyncKeys: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.create( api_key='"sk-proj--123456789098765432123456789"', name='"Production Key"', @@ -310,7 +310,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.create() assert response.is_closed is True @@ -320,7 +320,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -332,7 +332,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.retrieve( "api_key_uuid", ) @@ -340,7 +340,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.retrieve( "api_key_uuid", ) @@ -352,7 +352,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -366,7 +366,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.retrieve( "", @@ -374,7 +374,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -382,7 +382,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', api_key='"sk-ant-12345678901234567890123456789012"', @@ -393,7 +393,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -405,7 +405,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -419,7 +419,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.update( path_api_key_uuid="", @@ -427,13 +427,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.list( page=0, per_page=0, @@ -442,7 +442,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.list() assert response.is_closed is True @@ -452,7 +452,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -464,7 +464,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.delete( "api_key_uuid", ) @@ -472,7 +472,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.delete( "api_key_uuid", ) @@ -484,7 +484,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -498,7 +498,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.delete( "", @@ -506,7 +506,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_agents(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -514,7 +514,7 @@ async def test_method_list_agents(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_agents_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -524,7 +524,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi @pytest.mark.skip() @parametrize - async def test_raw_response_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -536,7 +536,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -550,7 +550,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_path_params_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.list_agents( uuid="", diff --git a/tests/api_resources/agents/evaluation_metrics/test_models.py b/tests/api_resources/agents/evaluation_metrics/test_models.py index 27ab4a27..624e5288 100644 --- a/tests/api_resources/agents/evaluation_metrics/test_models.py +++ b/tests/api_resources/agents/evaluation_metrics/test_models.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents.evaluation_metrics import ModelListResponse +from gradient.types.agents.evaluation_metrics import ModelListResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,13 +19,13 @@ class TestModels: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: model = client.agents.evaluation_metrics.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: model = client.agents.evaluation_metrics.models.list( page=0, per_page=0, @@ -36,7 +36,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.models.with_raw_response.list() assert response.is_closed is True @@ -46,7 +46,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.models.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -64,13 +64,13 @@ class TestAsyncModels: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: model = await async_client.agents.evaluation_metrics.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: model = await async_client.agents.evaluation_metrics.models.list( page=0, per_page=0, @@ -81,7 +81,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.models.with_raw_response.list() assert response.is_closed is True @@ -91,7 +91,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.models.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/agents/evaluation_metrics/test_workspaces.py b/tests/api_resources/agents/evaluation_metrics/test_workspaces.py index 2728393e..608406bf 100644 --- a/tests/api_resources/agents/evaluation_metrics/test_workspaces.py +++ b/tests/api_resources/agents/evaluation_metrics/test_workspaces.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents.evaluation_metrics import ( +from gradient.types.agents.evaluation_metrics import ( WorkspaceListResponse, WorkspaceCreateResponse, WorkspaceDeleteResponse, @@ -26,13 +26,13 @@ class TestWorkspaces: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.create() assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.create( agent_uuids=["example string"], description='"example string"', @@ -42,7 +42,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.create() assert response.is_closed is True @@ -52,7 +52,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -64,7 +64,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.retrieve( "workspace_uuid", ) @@ -72,7 +72,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.retrieve( "workspace_uuid", ) @@ -84,7 +84,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.retrieve( "workspace_uuid", ) as response: @@ -98,7 +98,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): client.agents.evaluation_metrics.workspaces.with_raw_response.retrieve( "", @@ -106,7 +106,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.update( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -114,7 +114,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.update( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', description='"example string"', @@ -125,7 +125,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.update( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -137,7 +137,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.update( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -151,7 +151,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"): client.agents.evaluation_metrics.workspaces.with_raw_response.update( path_workspace_uuid="", @@ -159,13 +159,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.list() assert_matches_type(WorkspaceListResponse, workspace, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.list() assert response.is_closed is True @@ -175,7 +175,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -187,7 +187,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.delete( "workspace_uuid", ) @@ -195,7 +195,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.delete( "workspace_uuid", ) @@ -207,7 +207,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.delete( "workspace_uuid", ) as response: @@ -221,7 +221,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): client.agents.evaluation_metrics.workspaces.with_raw_response.delete( "", @@ -229,7 +229,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_evaluation_test_cases(self, client: GradientAI) -> None: + def test_method_list_evaluation_test_cases(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.list_evaluation_test_cases( "workspace_uuid", ) @@ -237,7 +237,7 @@ def test_method_list_evaluation_test_cases(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_evaluation_test_cases(self, client: GradientAI) -> None: + def test_raw_response_list_evaluation_test_cases(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.list_evaluation_test_cases( "workspace_uuid", ) @@ -249,7 +249,7 @@ def test_raw_response_list_evaluation_test_cases(self, client: GradientAI) -> No @pytest.mark.skip() @parametrize - def test_streaming_response_list_evaluation_test_cases(self, client: GradientAI) -> None: + def test_streaming_response_list_evaluation_test_cases(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.list_evaluation_test_cases( "workspace_uuid", ) as response: @@ -263,7 +263,7 @@ def test_streaming_response_list_evaluation_test_cases(self, client: GradientAI) @pytest.mark.skip() @parametrize - def test_path_params_list_evaluation_test_cases(self, client: GradientAI) -> None: + def test_path_params_list_evaluation_test_cases(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): client.agents.evaluation_metrics.workspaces.with_raw_response.list_evaluation_test_cases( "", @@ -277,13 +277,13 @@ class TestAsyncWorkspaces: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.create() assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.create( agent_uuids=["example string"], description='"example string"', @@ -293,7 +293,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.create() assert response.is_closed is True @@ -303,7 +303,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -315,7 +315,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.retrieve( "workspace_uuid", ) @@ -323,7 +323,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.retrieve( "workspace_uuid", ) @@ -335,7 +335,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.retrieve( "workspace_uuid", ) as response: @@ -349,7 +349,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): await async_client.agents.evaluation_metrics.workspaces.with_raw_response.retrieve( "", @@ -357,7 +357,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.update( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -365,7 +365,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.update( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', description='"example string"', @@ -376,7 +376,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.update( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -388,7 +388,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.update( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -402,7 +402,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"): await async_client.agents.evaluation_metrics.workspaces.with_raw_response.update( path_workspace_uuid="", @@ -410,13 +410,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.list() assert_matches_type(WorkspaceListResponse, workspace, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.list() assert response.is_closed is True @@ -426,7 +426,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -438,7 +438,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.delete( "workspace_uuid", ) @@ -446,7 +446,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.delete( "workspace_uuid", ) @@ -458,7 +458,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.delete( "workspace_uuid", ) as response: @@ -472,7 +472,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): await async_client.agents.evaluation_metrics.workspaces.with_raw_response.delete( "", @@ -480,7 +480,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_evaluation_test_cases(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.list_evaluation_test_cases( "workspace_uuid", ) @@ -488,7 +488,7 @@ async def test_method_list_evaluation_test_cases(self, async_client: AsyncGradie @pytest.mark.skip() @parametrize - async def test_raw_response_list_evaluation_test_cases(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.list_evaluation_test_cases( "workspace_uuid", ) @@ -500,7 +500,7 @@ async def test_raw_response_list_evaluation_test_cases(self, async_client: Async @pytest.mark.skip() @parametrize - async def test_streaming_response_list_evaluation_test_cases(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.list_evaluation_test_cases( "workspace_uuid", ) as response: @@ -514,7 +514,7 @@ async def test_streaming_response_list_evaluation_test_cases(self, async_client: @pytest.mark.skip() @parametrize - async def test_path_params_list_evaluation_test_cases(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): await async_client.agents.evaluation_metrics.workspaces.with_raw_response.list_evaluation_test_cases( "", diff --git a/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py b/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py index 37d39018..b70f9c58 100644 --- a/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py +++ b/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents.evaluation_metrics.workspaces import ( +from gradient.types.agents.evaluation_metrics.workspaces import ( AgentListResponse, AgentMoveResponse, ) @@ -22,7 +22,7 @@ class TestAgents: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: agent = client.agents.evaluation_metrics.workspaces.agents.list( workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -30,7 +30,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: agent = client.agents.evaluation_metrics.workspaces.agents.list( workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', only_deployed=True, @@ -41,7 +41,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.agents.with_raw_response.list( workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -53,7 +53,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.list( workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -67,7 +67,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: GradientAI) -> None: + def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): client.agents.evaluation_metrics.workspaces.agents.with_raw_response.list( workspace_uuid="", @@ -75,7 +75,7 @@ def test_path_params_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_move(self, client: GradientAI) -> None: + def test_method_move(self, client: Gradient) -> None: agent = client.agents.evaluation_metrics.workspaces.agents.move( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -83,7 +83,7 @@ def test_method_move(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_move_with_all_params(self, client: GradientAI) -> None: + def test_method_move_with_all_params(self, client: Gradient) -> None: agent = client.agents.evaluation_metrics.workspaces.agents.move( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuids=["example string"], @@ -93,7 +93,7 @@ def test_method_move_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_move(self, client: GradientAI) -> None: + def test_raw_response_move(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.agents.with_raw_response.move( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -105,7 +105,7 @@ def test_raw_response_move(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_move(self, client: GradientAI) -> None: + def test_streaming_response_move(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.move( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -119,7 +119,7 @@ def test_streaming_response_move(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_move(self, client: GradientAI) -> None: + def test_path_params_move(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"): client.agents.evaluation_metrics.workspaces.agents.with_raw_response.move( path_workspace_uuid="", @@ -133,7 +133,7 @@ class TestAsyncAgents: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.evaluation_metrics.workspaces.agents.list( workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -141,7 +141,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.evaluation_metrics.workspaces.agents.list( workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', only_deployed=True, @@ -152,7 +152,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.agents.with_raw_response.list( workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -164,7 +164,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.list( workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -178,7 +178,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): await async_client.agents.evaluation_metrics.workspaces.agents.with_raw_response.list( workspace_uuid="", @@ -186,7 +186,7 @@ async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_move(self, async_client: AsyncGradientAI) -> None: + async def test_method_move(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.evaluation_metrics.workspaces.agents.move( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -194,7 +194,7 @@ async def test_method_move(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_move_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_move_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.evaluation_metrics.workspaces.agents.move( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuids=["example string"], @@ -204,7 +204,7 @@ async def test_method_move_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_move(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_move(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.agents.with_raw_response.move( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -216,7 +216,7 @@ async def test_raw_response_move(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_move(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_move(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.move( path_workspace_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -230,7 +230,7 @@ async def test_streaming_response_move(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_move(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_move(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"): await async_client.agents.evaluation_metrics.workspaces.agents.with_raw_response.move( path_workspace_uuid="", diff --git a/tests/api_resources/agents/test_api_keys.py b/tests/api_resources/agents/test_api_keys.py index 1e5275fe..4b80fc54 100644 --- a/tests/api_resources/agents/test_api_keys.py +++ b/tests/api_resources/agents/test_api_keys.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import ( +from gradient.types.agents import ( APIKeyListResponse, APIKeyCreateResponse, APIKeyDeleteResponse, @@ -25,7 +25,7 @@ class TestAPIKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: api_key = client.agents.api_keys.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -33,7 +33,7 @@ def test_method_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: api_key = client.agents.api_keys.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_agent_uuid='"12345678-1234-1234-1234-123456789012"', @@ -43,7 +43,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -55,7 +55,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -69,7 +69,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create(self, client: GradientAI) -> None: + def test_path_params_create(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.create( path_agent_uuid="", @@ -77,7 +77,7 @@ def test_path_params_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: api_key = client.agents.api_keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -86,7 +86,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: api_key = client.agents.api_keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -98,7 +98,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -111,7 +111,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -126,7 +126,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -141,7 +141,7 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: api_key = client.agents.api_keys.list( agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -149,7 +149,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: api_key = client.agents.api_keys.list( agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -159,7 +159,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.list( agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -171,7 +171,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.list( agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -185,7 +185,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: GradientAI) -> None: + def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.list( agent_uuid="", @@ -193,7 +193,7 @@ def test_path_params_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: api_key = client.agents.api_keys.delete( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -202,7 +202,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.delete( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -215,7 +215,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.delete( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -230,7 +230,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.delete( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -245,7 +245,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_regenerate(self, client: GradientAI) -> None: + def test_method_regenerate(self, client: Gradient) -> None: api_key = client.agents.api_keys.regenerate( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -254,7 +254,7 @@ def test_method_regenerate(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_regenerate(self, client: GradientAI) -> None: + def test_raw_response_regenerate(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.regenerate( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -267,7 +267,7 @@ def test_raw_response_regenerate(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_regenerate(self, client: GradientAI) -> None: + def test_streaming_response_regenerate(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.regenerate( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -282,7 +282,7 @@ def test_streaming_response_regenerate(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_regenerate(self, client: GradientAI) -> None: + def test_path_params_regenerate(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.api_keys.with_raw_response.regenerate( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -303,7 +303,7 @@ class TestAsyncAPIKeys: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -311,7 +311,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_agent_uuid='"12345678-1234-1234-1234-123456789012"', @@ -321,7 +321,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -333,7 +333,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -347,7 +347,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_create(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.create( path_agent_uuid="", @@ -355,7 +355,7 @@ async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -364,7 +364,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -376,7 +376,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -389,7 +389,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -404,7 +404,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -419,7 +419,7 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.list( agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -427,7 +427,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.list( agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -437,7 +437,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.list( agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -449,7 +449,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.list( agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -463,7 +463,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.list( agent_uuid="", @@ -471,7 +471,7 @@ async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.delete( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -480,7 +480,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.delete( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -493,7 +493,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.delete( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -508,7 +508,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.delete( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -523,7 +523,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_regenerate(self, async_client: AsyncGradientAI) -> None: + async def test_method_regenerate(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.regenerate( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -532,7 +532,7 @@ async def test_method_regenerate(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_regenerate(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_regenerate(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.regenerate( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -545,7 +545,7 @@ async def test_raw_response_regenerate(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_streaming_response_regenerate(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_regenerate(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.regenerate( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -560,7 +560,7 @@ async def test_streaming_response_regenerate(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_path_params_regenerate(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_regenerate(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.api_keys.with_raw_response.regenerate( api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', diff --git a/tests/api_resources/agents/test_evaluation_datasets.py b/tests/api_resources/agents/test_evaluation_datasets.py index 56edd598..f60c4720 100644 --- a/tests/api_resources/agents/test_evaluation_datasets.py +++ b/tests/api_resources/agents/test_evaluation_datasets.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import ( +from gradient.types.agents import ( EvaluationDatasetCreateResponse, EvaluationDatasetCreateFileUploadPresignedURLsResponse, ) @@ -22,13 +22,13 @@ class TestEvaluationDatasets: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: evaluation_dataset = client.agents.evaluation_datasets.create() assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: evaluation_dataset = client.agents.evaluation_datasets.create( file_upload_dataset={ "original_file_name": '"example name"', @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_datasets.with_raw_response.create() assert response.is_closed is True @@ -51,7 +51,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_datasets.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_file_upload_presigned_urls(self, client: GradientAI) -> None: + def test_method_create_file_upload_presigned_urls(self, client: Gradient) -> None: evaluation_dataset = client.agents.evaluation_datasets.create_file_upload_presigned_urls() assert_matches_type( EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] @@ -71,7 +71,7 @@ def test_method_create_file_upload_presigned_urls(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_method_create_file_upload_presigned_urls_with_all_params(self, client: GradientAI) -> None: + def test_method_create_file_upload_presigned_urls_with_all_params(self, client: Gradient) -> None: evaluation_dataset = client.agents.evaluation_datasets.create_file_upload_presigned_urls( files=[ { @@ -86,7 +86,7 @@ def test_method_create_file_upload_presigned_urls_with_all_params(self, client: @pytest.mark.skip() @parametrize - def test_raw_response_create_file_upload_presigned_urls(self, client: GradientAI) -> None: + def test_raw_response_create_file_upload_presigned_urls(self, client: Gradient) -> None: response = client.agents.evaluation_datasets.with_raw_response.create_file_upload_presigned_urls() assert response.is_closed is True @@ -98,7 +98,7 @@ def test_raw_response_create_file_upload_presigned_urls(self, client: GradientAI @pytest.mark.skip() @parametrize - def test_streaming_response_create_file_upload_presigned_urls(self, client: GradientAI) -> None: + def test_streaming_response_create_file_upload_presigned_urls(self, client: Gradient) -> None: with client.agents.evaluation_datasets.with_streaming_response.create_file_upload_presigned_urls() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -118,13 +118,13 @@ class TestAsyncEvaluationDatasets: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: evaluation_dataset = await async_client.agents.evaluation_datasets.create() assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_dataset = await async_client.agents.evaluation_datasets.create( file_upload_dataset={ "original_file_name": '"example name"', @@ -137,7 +137,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_datasets.with_raw_response.create() assert response.is_closed is True @@ -147,7 +147,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_datasets.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -159,7 +159,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_file_upload_presigned_urls(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None: evaluation_dataset = await async_client.agents.evaluation_datasets.create_file_upload_presigned_urls() assert_matches_type( EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] @@ -167,9 +167,7 @@ async def test_method_create_file_upload_presigned_urls(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_method_create_file_upload_presigned_urls_with_all_params( - self, async_client: AsyncGradientAI - ) -> None: + async def test_method_create_file_upload_presigned_urls_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_dataset = await async_client.agents.evaluation_datasets.create_file_upload_presigned_urls( files=[ { @@ -184,7 +182,7 @@ async def test_method_create_file_upload_presigned_urls_with_all_params( @pytest.mark.skip() @parametrize - async def test_raw_response_create_file_upload_presigned_urls(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_datasets.with_raw_response.create_file_upload_presigned_urls() assert response.is_closed is True @@ -196,7 +194,7 @@ async def test_raw_response_create_file_upload_presigned_urls(self, async_client @pytest.mark.skip() @parametrize - async def test_streaming_response_create_file_upload_presigned_urls(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None: async with ( async_client.agents.evaluation_datasets.with_streaming_response.create_file_upload_presigned_urls() ) as response: diff --git a/tests/api_resources/agents/test_evaluation_metrics.py b/tests/api_resources/agents/test_evaluation_metrics.py index 303d85d6..612f4228 100644 --- a/tests/api_resources/agents/test_evaluation_metrics.py +++ b/tests/api_resources/agents/test_evaluation_metrics.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import ( +from gradient.types.agents import ( EvaluationMetricListResponse, EvaluationMetricListRegionsResponse, ) @@ -22,13 +22,13 @@ class TestEvaluationMetrics: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: evaluation_metric = client.agents.evaluation_metrics.list() assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.with_raw_response.list() assert response.is_closed is True @@ -38,7 +38,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -50,13 +50,13 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_regions(self, client: GradientAI) -> None: + def test_method_list_regions(self, client: Gradient) -> None: evaluation_metric = client.agents.evaluation_metrics.list_regions() assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_regions_with_all_params(self, client: GradientAI) -> None: + def test_method_list_regions_with_all_params(self, client: Gradient) -> None: evaluation_metric = client.agents.evaluation_metrics.list_regions( serves_batch=True, serves_inference=True, @@ -65,7 +65,7 @@ def test_method_list_regions_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_regions(self, client: GradientAI) -> None: + def test_raw_response_list_regions(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.with_raw_response.list_regions() assert response.is_closed is True @@ -75,7 +75,7 @@ def test_raw_response_list_regions(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_regions(self, client: GradientAI) -> None: + def test_streaming_response_list_regions(self, client: Gradient) -> None: with client.agents.evaluation_metrics.with_streaming_response.list_regions() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -93,13 +93,13 @@ class TestAsyncEvaluationMetrics: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: evaluation_metric = await async_client.agents.evaluation_metrics.list() assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.with_raw_response.list() assert response.is_closed is True @@ -109,7 +109,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -121,13 +121,13 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_list_regions(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_regions(self, async_client: AsyncGradient) -> None: evaluation_metric = await async_client.agents.evaluation_metrics.list_regions() assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_regions_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_regions_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_metric = await async_client.agents.evaluation_metrics.list_regions( serves_batch=True, serves_inference=True, @@ -136,7 +136,7 @@ async def test_method_list_regions_with_all_params(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_raw_response_list_regions(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_regions(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.with_raw_response.list_regions() assert response.is_closed is True @@ -146,7 +146,7 @@ async def test_raw_response_list_regions(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_list_regions(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_regions(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.with_streaming_response.list_regions() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/agents/test_evaluation_runs.py b/tests/api_resources/agents/test_evaluation_runs.py index 9d443f16..be842cbc 100644 --- a/tests/api_resources/agents/test_evaluation_runs.py +++ b/tests/api_resources/agents/test_evaluation_runs.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import ( +from gradient.types.agents import ( EvaluationRunCreateResponse, EvaluationRunRetrieveResponse, EvaluationRunListResultsResponse, @@ -24,13 +24,13 @@ class TestEvaluationRuns: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.create() assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.create( agent_uuids=["example string"], run_name="Evaluation Run Name", @@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_runs.with_raw_response.create() assert response.is_closed is True @@ -50,7 +50,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_runs.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -62,7 +62,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.retrieve( "evaluation_run_uuid", ) @@ -70,7 +70,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_runs.with_raw_response.retrieve( "evaluation_run_uuid", ) @@ -82,7 +82,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_runs.with_streaming_response.retrieve( "evaluation_run_uuid", ) as response: @@ -96,7 +96,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): client.agents.evaluation_runs.with_raw_response.retrieve( "", @@ -104,7 +104,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_results(self, client: GradientAI) -> None: + def test_method_list_results(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.list_results( evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -112,7 +112,7 @@ def test_method_list_results(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_results_with_all_params(self, client: GradientAI) -> None: + def test_method_list_results_with_all_params(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.list_results( evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -122,7 +122,7 @@ def test_method_list_results_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_results(self, client: GradientAI) -> None: + def test_raw_response_list_results(self, client: Gradient) -> None: response = client.agents.evaluation_runs.with_raw_response.list_results( evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -134,7 +134,7 @@ def test_raw_response_list_results(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_results(self, client: GradientAI) -> None: + def test_streaming_response_list_results(self, client: Gradient) -> None: with client.agents.evaluation_runs.with_streaming_response.list_results( evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -148,7 +148,7 @@ def test_streaming_response_list_results(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list_results(self, client: GradientAI) -> None: + def test_path_params_list_results(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): client.agents.evaluation_runs.with_raw_response.list_results( evaluation_run_uuid="", @@ -156,7 +156,7 @@ def test_path_params_list_results(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_results(self, client: GradientAI) -> None: + def test_method_retrieve_results(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.retrieve_results( prompt_id=1, evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -165,7 +165,7 @@ def test_method_retrieve_results(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve_results(self, client: GradientAI) -> None: + def test_raw_response_retrieve_results(self, client: Gradient) -> None: response = client.agents.evaluation_runs.with_raw_response.retrieve_results( prompt_id=1, evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -178,7 +178,7 @@ def test_raw_response_retrieve_results(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve_results(self, client: GradientAI) -> None: + def test_streaming_response_retrieve_results(self, client: Gradient) -> None: with client.agents.evaluation_runs.with_streaming_response.retrieve_results( prompt_id=1, evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -193,7 +193,7 @@ def test_streaming_response_retrieve_results(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve_results(self, client: GradientAI) -> None: + def test_path_params_retrieve_results(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): client.agents.evaluation_runs.with_raw_response.retrieve_results( prompt_id=1, @@ -208,13 +208,13 @@ class TestAsyncEvaluationRuns: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.create() assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.create( agent_uuids=["example string"], run_name="Evaluation Run Name", @@ -224,7 +224,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_runs.with_raw_response.create() assert response.is_closed is True @@ -234,7 +234,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_runs.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -246,7 +246,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.retrieve( "evaluation_run_uuid", ) @@ -254,7 +254,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_runs.with_raw_response.retrieve( "evaluation_run_uuid", ) @@ -266,7 +266,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_runs.with_streaming_response.retrieve( "evaluation_run_uuid", ) as response: @@ -280,7 +280,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): await async_client.agents.evaluation_runs.with_raw_response.retrieve( "", @@ -288,7 +288,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_list_results(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_results(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.list_results( evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -296,7 +296,7 @@ async def test_method_list_results(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_results_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_results_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.list_results( evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -306,7 +306,7 @@ async def test_method_list_results_with_all_params(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_raw_response_list_results(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_results(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_runs.with_raw_response.list_results( evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -318,7 +318,7 @@ async def test_raw_response_list_results(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_list_results(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_results(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_runs.with_streaming_response.list_results( evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -332,7 +332,7 @@ async def test_streaming_response_list_results(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_path_params_list_results(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list_results(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): await async_client.agents.evaluation_runs.with_raw_response.list_results( evaluation_run_uuid="", @@ -340,7 +340,7 @@ async def test_path_params_list_results(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve_results(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve_results(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.retrieve_results( prompt_id=1, evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -349,7 +349,7 @@ async def test_method_retrieve_results(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve_results(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve_results(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_runs.with_raw_response.retrieve_results( prompt_id=1, evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -362,7 +362,7 @@ async def test_raw_response_retrieve_results(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve_results(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve_results(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_runs.with_streaming_response.retrieve_results( prompt_id=1, evaluation_run_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -377,7 +377,7 @@ async def test_streaming_response_retrieve_results(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_path_params_retrieve_results(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve_results(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): await async_client.agents.evaluation_runs.with_raw_response.retrieve_results( prompt_id=1, diff --git a/tests/api_resources/agents/test_evaluation_test_cases.py b/tests/api_resources/agents/test_evaluation_test_cases.py index ae986abc..2860aa2c 100644 --- a/tests/api_resources/agents/test_evaluation_test_cases.py +++ b/tests/api_resources/agents/test_evaluation_test_cases.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import ( +from gradient.types.agents import ( EvaluationTestCaseListResponse, EvaluationTestCaseCreateResponse, EvaluationTestCaseUpdateResponse, @@ -25,13 +25,13 @@ class TestEvaluationTestCases: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.create() assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.create( dataset_uuid='"123e4567-e89b-12d3-a456-426614174000"', description='"example string"', @@ -49,7 +49,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.create() assert response.is_closed is True @@ -59,7 +59,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -71,7 +71,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.retrieve( test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -79,7 +79,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_with_all_params(self, client: GradientAI) -> None: + def test_method_retrieve_with_all_params(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.retrieve( test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', evaluation_test_case_version=0, @@ -88,7 +88,7 @@ def test_method_retrieve_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.retrieve( test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -100,7 +100,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.retrieve( test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -114,7 +114,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `test_case_uuid` but received ''"): client.agents.evaluation_test_cases.with_raw_response.retrieve( test_case_uuid="", @@ -122,7 +122,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.update( path_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -130,7 +130,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.update( path_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', dataset_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -149,7 +149,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.update( path_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -161,7 +161,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.update( path_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -175,7 +175,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_test_case_uuid` but received ''"): client.agents.evaluation_test_cases.with_raw_response.update( path_test_case_uuid="", @@ -183,13 +183,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.list() assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.list() assert response.is_closed is True @@ -199,7 +199,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -211,7 +211,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_evaluation_runs(self, client: GradientAI) -> None: + def test_method_list_evaluation_runs(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.list_evaluation_runs( evaluation_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -219,7 +219,7 @@ def test_method_list_evaluation_runs(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_evaluation_runs_with_all_params(self, client: GradientAI) -> None: + def test_method_list_evaluation_runs_with_all_params(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.list_evaluation_runs( evaluation_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', evaluation_test_case_version=0, @@ -228,7 +228,7 @@ def test_method_list_evaluation_runs_with_all_params(self, client: GradientAI) - @pytest.mark.skip() @parametrize - def test_raw_response_list_evaluation_runs(self, client: GradientAI) -> None: + def test_raw_response_list_evaluation_runs(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.list_evaluation_runs( evaluation_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -240,7 +240,7 @@ def test_raw_response_list_evaluation_runs(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_evaluation_runs(self, client: GradientAI) -> None: + def test_streaming_response_list_evaluation_runs(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.list_evaluation_runs( evaluation_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -254,7 +254,7 @@ def test_streaming_response_list_evaluation_runs(self, client: GradientAI) -> No @pytest.mark.skip() @parametrize - def test_path_params_list_evaluation_runs(self, client: GradientAI) -> None: + def test_path_params_list_evaluation_runs(self, client: Gradient) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `evaluation_test_case_uuid` but received ''" ): @@ -270,13 +270,13 @@ class TestAsyncEvaluationTestCases: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.create() assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.create( dataset_uuid='"123e4567-e89b-12d3-a456-426614174000"', description='"example string"', @@ -294,7 +294,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.create() assert response.is_closed is True @@ -304,7 +304,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -316,7 +316,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.retrieve( test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -324,7 +324,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_retrieve_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.retrieve( test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', evaluation_test_case_version=0, @@ -333,7 +333,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.retrieve( test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -345,7 +345,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.retrieve( test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -359,7 +359,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `test_case_uuid` but received ''"): await async_client.agents.evaluation_test_cases.with_raw_response.retrieve( test_case_uuid="", @@ -367,7 +367,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.update( path_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -375,7 +375,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.update( path_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', dataset_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -394,7 +394,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.update( path_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -406,7 +406,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.update( path_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -420,7 +420,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_test_case_uuid` but received ''"): await async_client.agents.evaluation_test_cases.with_raw_response.update( path_test_case_uuid="", @@ -428,13 +428,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.list() assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.list() assert response.is_closed is True @@ -444,7 +444,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -456,7 +456,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_list_evaluation_runs(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_evaluation_runs(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.list_evaluation_runs( evaluation_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -464,7 +464,7 @@ async def test_method_list_evaluation_runs(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_list_evaluation_runs_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_evaluation_runs_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.list_evaluation_runs( evaluation_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', evaluation_test_case_version=0, @@ -473,7 +473,7 @@ async def test_method_list_evaluation_runs_with_all_params(self, async_client: A @pytest.mark.skip() @parametrize - async def test_raw_response_list_evaluation_runs(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_evaluation_runs(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.list_evaluation_runs( evaluation_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -485,7 +485,7 @@ async def test_raw_response_list_evaluation_runs(self, async_client: AsyncGradie @pytest.mark.skip() @parametrize - async def test_streaming_response_list_evaluation_runs(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_evaluation_runs(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.list_evaluation_runs( evaluation_test_case_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -499,7 +499,7 @@ async def test_streaming_response_list_evaluation_runs(self, async_client: Async @pytest.mark.skip() @parametrize - async def test_path_params_list_evaluation_runs(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list_evaluation_runs(self, async_client: AsyncGradient) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `evaluation_test_case_uuid` but received ''" ): diff --git a/tests/api_resources/agents/test_functions.py b/tests/api_resources/agents/test_functions.py index 624446e0..0ba54432 100644 --- a/tests/api_resources/agents/test_functions.py +++ b/tests/api_resources/agents/test_functions.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import ( +from gradient.types.agents import ( FunctionCreateResponse, FunctionDeleteResponse, FunctionUpdateResponse, @@ -23,7 +23,7 @@ class TestFunctions: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: function = client.agents.functions.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -31,7 +31,7 @@ def test_method_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: function = client.agents.functions.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_agent_uuid='"12345678-1234-1234-1234-123456789012"', @@ -46,7 +46,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.functions.with_raw_response.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -58,7 +58,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.functions.with_streaming_response.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -72,7 +72,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create(self, client: GradientAI) -> None: + def test_path_params_create(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): client.agents.functions.with_raw_response.create( path_agent_uuid="", @@ -80,7 +80,7 @@ def test_path_params_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: function = client.agents.functions.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -89,7 +89,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: function = client.agents.functions.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -106,7 +106,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.functions.with_raw_response.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -119,7 +119,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.functions.with_streaming_response.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -134,7 +134,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): client.agents.functions.with_raw_response.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -149,7 +149,7 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: function = client.agents.functions.delete( function_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -158,7 +158,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.functions.with_raw_response.delete( function_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -171,7 +171,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.functions.with_streaming_response.delete( function_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -186,7 +186,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.functions.with_raw_response.delete( function_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -207,7 +207,7 @@ class TestAsyncFunctions: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -215,7 +215,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_agent_uuid='"12345678-1234-1234-1234-123456789012"', @@ -230,7 +230,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.functions.with_raw_response.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -242,7 +242,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.functions.with_streaming_response.create( path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -256,7 +256,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_create(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): await async_client.agents.functions.with_raw_response.create( path_agent_uuid="", @@ -264,7 +264,7 @@ async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -273,7 +273,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -290,7 +290,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.functions.with_raw_response.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -303,7 +303,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.functions.with_streaming_response.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -318,7 +318,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): await async_client.agents.functions.with_raw_response.update( path_function_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -333,7 +333,7 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.delete( function_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -342,7 +342,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.functions.with_raw_response.delete( function_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -355,7 +355,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.functions.with_streaming_response.delete( function_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -370,7 +370,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.functions.with_raw_response.delete( function_uuid='"123e4567-e89b-12d3-a456-426614174000"', diff --git a/tests/api_resources/agents/test_knowledge_bases.py b/tests/api_resources/agents/test_knowledge_bases.py index 7ac99316..dd35e5f4 100644 --- a/tests/api_resources/agents/test_knowledge_bases.py +++ b/tests/api_resources/agents/test_knowledge_bases.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import APILinkKnowledgeBaseOutput, KnowledgeBaseDetachResponse +from gradient.types.agents import APILinkKnowledgeBaseOutput, KnowledgeBaseDetachResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,7 +19,7 @@ class TestKnowledgeBases: @pytest.mark.skip() @parametrize - def test_method_attach(self, client: GradientAI) -> None: + def test_method_attach(self, client: Gradient) -> None: knowledge_base = client.agents.knowledge_bases.attach( "agent_uuid", ) @@ -27,7 +27,7 @@ def test_method_attach(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_attach(self, client: GradientAI) -> None: + def test_raw_response_attach(self, client: Gradient) -> None: response = client.agents.knowledge_bases.with_raw_response.attach( "agent_uuid", ) @@ -39,7 +39,7 @@ def test_raw_response_attach(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_attach(self, client: GradientAI) -> None: + def test_streaming_response_attach(self, client: Gradient) -> None: with client.agents.knowledge_bases.with_streaming_response.attach( "agent_uuid", ) as response: @@ -53,7 +53,7 @@ def test_streaming_response_attach(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_attach(self, client: GradientAI) -> None: + def test_path_params_attach(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.knowledge_bases.with_raw_response.attach( "", @@ -61,7 +61,7 @@ def test_path_params_attach(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_attach_single(self, client: GradientAI) -> None: + def test_method_attach_single(self, client: Gradient) -> None: knowledge_base = client.agents.knowledge_bases.attach_single( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -70,7 +70,7 @@ def test_method_attach_single(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_attach_single(self, client: GradientAI) -> None: + def test_raw_response_attach_single(self, client: Gradient) -> None: response = client.agents.knowledge_bases.with_raw_response.attach_single( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -83,7 +83,7 @@ def test_raw_response_attach_single(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_attach_single(self, client: GradientAI) -> None: + def test_streaming_response_attach_single(self, client: Gradient) -> None: with client.agents.knowledge_bases.with_streaming_response.attach_single( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -98,7 +98,7 @@ def test_streaming_response_attach_single(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_attach_single(self, client: GradientAI) -> None: + def test_path_params_attach_single(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.knowledge_bases.with_raw_response.attach_single( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -113,7 +113,7 @@ def test_path_params_attach_single(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_detach(self, client: GradientAI) -> None: + def test_method_detach(self, client: Gradient) -> None: knowledge_base = client.agents.knowledge_bases.detach( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -122,7 +122,7 @@ def test_method_detach(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_detach(self, client: GradientAI) -> None: + def test_raw_response_detach(self, client: Gradient) -> None: response = client.agents.knowledge_bases.with_raw_response.detach( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -135,7 +135,7 @@ def test_raw_response_detach(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_detach(self, client: GradientAI) -> None: + def test_streaming_response_detach(self, client: Gradient) -> None: with client.agents.knowledge_bases.with_streaming_response.detach( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -150,7 +150,7 @@ def test_streaming_response_detach(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_detach(self, client: GradientAI) -> None: + def test_path_params_detach(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): client.agents.knowledge_bases.with_raw_response.detach( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -171,7 +171,7 @@ class TestAsyncKnowledgeBases: @pytest.mark.skip() @parametrize - async def test_method_attach(self, async_client: AsyncGradientAI) -> None: + async def test_method_attach(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.agents.knowledge_bases.attach( "agent_uuid", ) @@ -179,7 +179,7 @@ async def test_method_attach(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_attach(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_attach(self, async_client: AsyncGradient) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.attach( "agent_uuid", ) @@ -191,7 +191,7 @@ async def test_raw_response_attach(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_attach(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_attach(self, async_client: AsyncGradient) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.attach( "agent_uuid", ) as response: @@ -205,7 +205,7 @@ async def test_streaming_response_attach(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_attach(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_attach(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.knowledge_bases.with_raw_response.attach( "", @@ -213,7 +213,7 @@ async def test_path_params_attach(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_attach_single(self, async_client: AsyncGradientAI) -> None: + async def test_method_attach_single(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.agents.knowledge_bases.attach_single( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -222,7 +222,7 @@ async def test_method_attach_single(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_raw_response_attach_single(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_attach_single(self, async_client: AsyncGradient) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.attach_single( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -235,7 +235,7 @@ async def test_raw_response_attach_single(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_streaming_response_attach_single(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_attach_single(self, async_client: AsyncGradient) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.attach_single( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -250,7 +250,7 @@ async def test_streaming_response_attach_single(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_path_params_attach_single(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_attach_single(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.knowledge_bases.with_raw_response.attach_single( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -265,7 +265,7 @@ async def test_path_params_attach_single(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_detach(self, async_client: AsyncGradientAI) -> None: + async def test_method_detach(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.agents.knowledge_bases.detach( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -274,7 +274,7 @@ async def test_method_detach(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_detach(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_detach(self, async_client: AsyncGradient) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.detach( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -287,7 +287,7 @@ async def test_raw_response_detach(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_detach(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_detach(self, async_client: AsyncGradient) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.detach( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -302,7 +302,7 @@ async def test_streaming_response_detach(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_detach(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_detach(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): await async_client.agents.knowledge_bases.with_raw_response.detach( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', diff --git a/tests/api_resources/agents/test_routes.py b/tests/api_resources/agents/test_routes.py index 256a4757..294fa853 100644 --- a/tests/api_resources/agents/test_routes.py +++ b/tests/api_resources/agents/test_routes.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import ( +from gradient.types.agents import ( RouteAddResponse, RouteViewResponse, RouteDeleteResponse, @@ -24,7 +24,7 @@ class TestRoutes: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: route = client.agents.routes.update( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -33,7 +33,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: route = client.agents.routes.update( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -47,7 +47,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.routes.with_raw_response.update( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -60,7 +60,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.routes.with_streaming_response.update( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -75,7 +75,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_parent_agent_uuid` but received ''" ): @@ -92,7 +92,7 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: route = client.agents.routes.delete( child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -101,7 +101,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.routes.with_raw_response.delete( child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -114,7 +114,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.routes.with_streaming_response.delete( child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -129,7 +129,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `parent_agent_uuid` but received ''"): client.agents.routes.with_raw_response.delete( child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -144,7 +144,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_add(self, client: GradientAI) -> None: + def test_method_add(self, client: Gradient) -> None: route = client.agents.routes.add( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -153,7 +153,7 @@ def test_method_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_add_with_all_params(self, client: GradientAI) -> None: + def test_method_add_with_all_params(self, client: Gradient) -> None: route = client.agents.routes.add( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -166,7 +166,7 @@ def test_method_add_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_add(self, client: GradientAI) -> None: + def test_raw_response_add(self, client: Gradient) -> None: response = client.agents.routes.with_raw_response.add( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -179,7 +179,7 @@ def test_raw_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_add(self, client: GradientAI) -> None: + def test_streaming_response_add(self, client: Gradient) -> None: with client.agents.routes.with_streaming_response.add( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -194,7 +194,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_add(self, client: GradientAI) -> None: + def test_path_params_add(self, client: Gradient) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_parent_agent_uuid` but received ''" ): @@ -211,7 +211,7 @@ def test_path_params_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_view(self, client: GradientAI) -> None: + def test_method_view(self, client: Gradient) -> None: route = client.agents.routes.view( "uuid", ) @@ -219,7 +219,7 @@ def test_method_view(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_view(self, client: GradientAI) -> None: + def test_raw_response_view(self, client: Gradient) -> None: response = client.agents.routes.with_raw_response.view( "uuid", ) @@ -231,7 +231,7 @@ def test_raw_response_view(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_view(self, client: GradientAI) -> None: + def test_streaming_response_view(self, client: Gradient) -> None: with client.agents.routes.with_streaming_response.view( "uuid", ) as response: @@ -245,7 +245,7 @@ def test_streaming_response_view(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_view(self, client: GradientAI) -> None: + def test_path_params_view(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.routes.with_raw_response.view( "", @@ -259,7 +259,7 @@ class TestAsyncRoutes: @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.update( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -268,7 +268,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.update( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -282,7 +282,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.routes.with_raw_response.update( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -295,7 +295,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.routes.with_streaming_response.update( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -310,7 +310,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_parent_agent_uuid` but received ''" ): @@ -327,7 +327,7 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.delete( child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -336,7 +336,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.routes.with_raw_response.delete( child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -349,7 +349,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.routes.with_streaming_response.delete( child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -364,7 +364,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `parent_agent_uuid` but received ''"): await async_client.agents.routes.with_raw_response.delete( child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -379,7 +379,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_add(self, async_client: AsyncGradientAI) -> None: + async def test_method_add(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.add( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -388,7 +388,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_add_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_add_with_all_params(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.add( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -401,7 +401,7 @@ async def test_method_add_with_all_params(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.agents.routes.with_raw_response.add( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -414,7 +414,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.agents.routes.with_streaming_response.add( path_child_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -429,7 +429,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No @pytest.mark.skip() @parametrize - async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_parent_agent_uuid` but received ''" ): @@ -446,7 +446,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_view(self, async_client: AsyncGradientAI) -> None: + async def test_method_view(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.view( "uuid", ) @@ -454,7 +454,7 @@ async def test_method_view(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_view(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_view(self, async_client: AsyncGradient) -> None: response = await async_client.agents.routes.with_raw_response.view( "uuid", ) @@ -466,7 +466,7 @@ async def test_raw_response_view(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_view(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_view(self, async_client: AsyncGradient) -> None: async with async_client.agents.routes.with_streaming_response.view( "uuid", ) as response: @@ -480,7 +480,7 @@ async def test_streaming_response_view(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_view(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_view(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.routes.with_raw_response.view( "", diff --git a/tests/api_resources/agents/test_versions.py b/tests/api_resources/agents/test_versions.py index 158856ed..4b45edf7 100644 --- a/tests/api_resources/agents/test_versions.py +++ b/tests/api_resources/agents/test_versions.py @@ -7,12 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.agents import ( - VersionListResponse, - VersionUpdateResponse, -) +from gradient.types.agents import VersionListResponse, VersionUpdateResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -22,7 +19,7 @@ class TestVersions: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: version = client.agents.versions.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -30,7 +27,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: version = client.agents.versions.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_uuid='"12345678-1234-1234-1234-123456789012"', @@ -40,7 +37,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.versions.with_raw_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -52,7 +49,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.versions.with_streaming_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -66,7 +63,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.agents.versions.with_raw_response.update( path_uuid="", @@ -74,7 +71,7 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: version = client.agents.versions.list( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -82,7 +79,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: version = client.agents.versions.list( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -92,7 +89,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.versions.with_raw_response.list( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -104,7 +101,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.versions.with_streaming_response.list( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -118,7 +115,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: GradientAI) -> None: + def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.versions.with_raw_response.list( uuid="", @@ -132,7 +129,7 @@ class TestAsyncVersions: @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: version = await async_client.agents.versions.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -140,7 +137,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: version = await async_client.agents.versions.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_uuid='"12345678-1234-1234-1234-123456789012"', @@ -150,7 +147,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.versions.with_raw_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -162,7 +159,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.versions.with_streaming_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -176,7 +173,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.agents.versions.with_raw_response.update( path_uuid="", @@ -184,7 +181,7 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: version = await async_client.agents.versions.list( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -192,7 +189,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: version = await async_client.agents.versions.list( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -202,7 +199,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.versions.with_raw_response.list( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -214,7 +211,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.versions.with_streaming_response.list( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -228,7 +225,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.versions.with_raw_response.list( uuid="", diff --git a/tests/api_resources/chat/test_completions.py b/tests/api_resources/chat/test_completions.py index 95b02106..a25fd3c4 100644 --- a/tests/api_resources/chat/test_completions.py +++ b/tests/api_resources/chat/test_completions.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.chat import CompletionCreateResponse +from gradient.types.chat import CompletionCreateResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,7 +19,7 @@ class TestCompletions: @pytest.mark.skip() @parametrize - def test_method_create_overload_1(self, client: GradientAI) -> None: + def test_method_create_overload_1(self, client: Gradient) -> None: completion = client.chat.completions.create( messages=[ { @@ -33,7 +33,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: completion = client.chat.completions.create( messages=[ { @@ -73,7 +73,7 @@ def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_1(self, client: GradientAI) -> None: + def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.chat.completions.with_raw_response.create( messages=[ { @@ -91,7 +91,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.chat.completions.with_streaming_response.create( messages=[ { @@ -111,7 +111,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_overload_2(self, client: GradientAI) -> None: + def test_method_create_overload_2(self, client: Gradient) -> None: completion_stream = client.chat.completions.create( messages=[ { @@ -126,7 +126,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: completion_stream = client.chat.completions.create( messages=[ { @@ -166,7 +166,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_2(self, client: GradientAI) -> None: + def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.chat.completions.with_raw_response.create( messages=[ { @@ -184,7 +184,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.chat.completions.with_streaming_response.create( messages=[ { @@ -211,7 +211,7 @@ class TestAsyncCompletions: @pytest.mark.skip() @parametrize - async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: completion = await async_client.chat.completions.create( messages=[ { @@ -225,7 +225,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: completion = await async_client.chat.completions.create( messages=[ { @@ -265,7 +265,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.chat.completions.with_raw_response.create( messages=[ { @@ -283,7 +283,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.chat.completions.with_streaming_response.create( messages=[ { @@ -303,7 +303,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: completion_stream = await async_client.chat.completions.create( messages=[ { @@ -318,7 +318,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: completion_stream = await async_client.chat.completions.create( messages=[ { @@ -358,7 +358,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.chat.completions.with_raw_response.create( messages=[ { @@ -376,7 +376,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.chat.completions.with_streaming_response.create( messages=[ { diff --git a/tests/api_resources/databases/schema_registry/test_config.py b/tests/api_resources/databases/schema_registry/test_config.py index f63d62c0..024d8b0a 100644 --- a/tests/api_resources/databases/schema_registry/test_config.py +++ b/tests/api_resources/databases/schema_registry/test_config.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.databases.schema_registry import ( +from gradient.types.databases.schema_registry import ( ConfigUpdateResponse, ConfigRetrieveResponse, ConfigUpdateSubjectResponse, @@ -24,7 +24,7 @@ class TestConfig: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: config = client.databases.schema_registry.config.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -32,7 +32,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.databases.schema_registry.config.with_raw_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -44,7 +44,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.databases.schema_registry.config.with_streaming_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -58,7 +58,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): client.databases.schema_registry.config.with_raw_response.retrieve( "", @@ -66,7 +66,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: config = client.databases.schema_registry.config.update( database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", compatibility_level="BACKWARD", @@ -75,7 +75,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.databases.schema_registry.config.with_raw_response.update( database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", compatibility_level="BACKWARD", @@ -88,7 +88,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.databases.schema_registry.config.with_streaming_response.update( database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", compatibility_level="BACKWARD", @@ -103,7 +103,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): client.databases.schema_registry.config.with_raw_response.update( database_cluster_uuid="", @@ -112,7 +112,7 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_subject(self, client: GradientAI) -> None: + def test_method_retrieve_subject(self, client: Gradient) -> None: config = client.databases.schema_registry.config.retrieve_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -121,7 +121,7 @@ def test_method_retrieve_subject(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve_subject(self, client: GradientAI) -> None: + def test_raw_response_retrieve_subject(self, client: Gradient) -> None: response = client.databases.schema_registry.config.with_raw_response.retrieve_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -134,7 +134,7 @@ def test_raw_response_retrieve_subject(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve_subject(self, client: GradientAI) -> None: + def test_streaming_response_retrieve_subject(self, client: Gradient) -> None: with client.databases.schema_registry.config.with_streaming_response.retrieve_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -149,7 +149,7 @@ def test_streaming_response_retrieve_subject(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve_subject(self, client: GradientAI) -> None: + def test_path_params_retrieve_subject(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): client.databases.schema_registry.config.with_raw_response.retrieve_subject( subject_name="customer-schema", @@ -164,7 +164,7 @@ def test_path_params_retrieve_subject(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_subject(self, client: GradientAI) -> None: + def test_method_update_subject(self, client: Gradient) -> None: config = client.databases.schema_registry.config.update_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -174,7 +174,7 @@ def test_method_update_subject(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update_subject(self, client: GradientAI) -> None: + def test_raw_response_update_subject(self, client: Gradient) -> None: response = client.databases.schema_registry.config.with_raw_response.update_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -188,7 +188,7 @@ def test_raw_response_update_subject(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update_subject(self, client: GradientAI) -> None: + def test_streaming_response_update_subject(self, client: Gradient) -> None: with client.databases.schema_registry.config.with_streaming_response.update_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -204,7 +204,7 @@ def test_streaming_response_update_subject(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update_subject(self, client: GradientAI) -> None: + def test_path_params_update_subject(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): client.databases.schema_registry.config.with_raw_response.update_subject( subject_name="customer-schema", @@ -227,7 +227,7 @@ class TestAsyncConfig: @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: config = await async_client.databases.schema_registry.config.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -235,7 +235,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.databases.schema_registry.config.with_raw_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -247,7 +247,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.databases.schema_registry.config.with_streaming_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -261,7 +261,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): await async_client.databases.schema_registry.config.with_raw_response.retrieve( "", @@ -269,7 +269,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: config = await async_client.databases.schema_registry.config.update( database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", compatibility_level="BACKWARD", @@ -278,7 +278,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.databases.schema_registry.config.with_raw_response.update( database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", compatibility_level="BACKWARD", @@ -291,7 +291,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.databases.schema_registry.config.with_streaming_response.update( database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", compatibility_level="BACKWARD", @@ -306,7 +306,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): await async_client.databases.schema_registry.config.with_raw_response.update( database_cluster_uuid="", @@ -315,7 +315,7 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_retrieve_subject(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve_subject(self, async_client: AsyncGradient) -> None: config = await async_client.databases.schema_registry.config.retrieve_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -324,7 +324,7 @@ async def test_method_retrieve_subject(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve_subject(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve_subject(self, async_client: AsyncGradient) -> None: response = await async_client.databases.schema_registry.config.with_raw_response.retrieve_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -337,7 +337,7 @@ async def test_raw_response_retrieve_subject(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve_subject(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve_subject(self, async_client: AsyncGradient) -> None: async with async_client.databases.schema_registry.config.with_streaming_response.retrieve_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -352,7 +352,7 @@ async def test_streaming_response_retrieve_subject(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_path_params_retrieve_subject(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve_subject(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): await async_client.databases.schema_registry.config.with_raw_response.retrieve_subject( subject_name="customer-schema", @@ -367,7 +367,7 @@ async def test_path_params_retrieve_subject(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_update_subject(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_subject(self, async_client: AsyncGradient) -> None: config = await async_client.databases.schema_registry.config.update_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -377,7 +377,7 @@ async def test_method_update_subject(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_raw_response_update_subject(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update_subject(self, async_client: AsyncGradient) -> None: response = await async_client.databases.schema_registry.config.with_raw_response.update_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -391,7 +391,7 @@ async def test_raw_response_update_subject(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_streaming_response_update_subject(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update_subject(self, async_client: AsyncGradient) -> None: async with async_client.databases.schema_registry.config.with_streaming_response.update_subject( subject_name="customer-schema", database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", @@ -407,7 +407,7 @@ async def test_streaming_response_update_subject(self, async_client: AsyncGradie @pytest.mark.skip() @parametrize - async def test_path_params_update_subject(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update_subject(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): await async_client.databases.schema_registry.config.with_raw_response.update_subject( subject_name="customer-schema", diff --git a/tests/api_resources/gpu_droplets/account/test_keys.py b/tests/api_resources/gpu_droplets/account/test_keys.py index cf168f61..5a63c275 100644 --- a/tests/api_resources/gpu_droplets/account/test_keys.py +++ b/tests/api_resources/gpu_droplets/account/test_keys.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets.account import ( +from gradient.types.gpu_droplets.account import ( KeyListResponse, KeyCreateResponse, KeyUpdateResponse, @@ -24,7 +24,7 @@ class TestKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.create( name="My SSH Public Key", public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example", @@ -33,7 +33,7 @@ def test_method_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.create( name="My SSH Public Key", public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example", @@ -46,7 +46,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.create( name="My SSH Public Key", public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example", @@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.retrieve( 512189, ) @@ -69,7 +69,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.retrieve( 512189, ) @@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.retrieve( 512189, ) as response: @@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.update( ssh_key_identifier=512189, ) @@ -103,7 +103,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.update( ssh_key_identifier=512189, name="My SSH Public Key", @@ -112,7 +112,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.update( ssh_key_identifier=512189, ) @@ -124,7 +124,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.update( ssh_key_identifier=512189, ) as response: @@ -138,13 +138,13 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.list( page=1, per_page=1, @@ -153,7 +153,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.list() assert response.is_closed is True @@ -163,7 +163,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -175,7 +175,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.delete( 512189, ) @@ -183,7 +183,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.delete( 512189, ) @@ -195,7 +195,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.delete( 512189, ) as response: @@ -215,7 +215,7 @@ class TestAsyncKeys: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.create( name="My SSH Public Key", public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example", @@ -224,7 +224,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.create( name="My SSH Public Key", public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example", @@ -237,7 +237,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.create( name="My SSH Public Key", public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example", @@ -252,7 +252,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.retrieve( 512189, ) @@ -260,7 +260,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.retrieve( 512189, ) @@ -272,7 +272,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.retrieve( 512189, ) as response: @@ -286,7 +286,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.update( ssh_key_identifier=512189, ) @@ -294,7 +294,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.update( ssh_key_identifier=512189, name="My SSH Public Key", @@ -303,7 +303,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.update( ssh_key_identifier=512189, ) @@ -315,7 +315,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.update( ssh_key_identifier=512189, ) as response: @@ -329,13 +329,13 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.list( page=1, per_page=1, @@ -344,7 +344,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.list() assert response.is_closed is True @@ -354,7 +354,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -366,7 +366,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.delete( 512189, ) @@ -374,7 +374,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.delete( 512189, ) @@ -386,7 +386,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.delete( 512189, ) as response: diff --git a/tests/api_resources/gpu_droplets/firewalls/test_droplets.py b/tests/api_resources/gpu_droplets/firewalls/test_droplets.py index 819a5e6e..8f39a064 100644 --- a/tests/api_resources/gpu_droplets/firewalls/test_droplets.py +++ b/tests/api_resources/gpu_droplets/firewalls/test_droplets.py @@ -7,7 +7,7 @@ import pytest -from do_gradientai import GradientAI, AsyncGradientAI +from gradient import Gradient, AsyncGradient base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -17,7 +17,7 @@ class TestDroplets: @pytest.mark.skip() @parametrize - def test_method_add(self, client: GradientAI) -> None: + def test_method_add(self, client: Gradient) -> None: droplet = client.gpu_droplets.firewalls.droplets.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -26,7 +26,7 @@ def test_method_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_add(self, client: GradientAI) -> None: + def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.droplets.with_raw_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -39,7 +39,7 @@ def test_raw_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_add(self, client: GradientAI) -> None: + def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.droplets.with_streaming_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -54,7 +54,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_add(self, client: GradientAI) -> None: + def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.droplets.with_raw_response.add( firewall_id="", @@ -63,7 +63,7 @@ def test_path_params_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_remove(self, client: GradientAI) -> None: + def test_method_remove(self, client: Gradient) -> None: droplet = client.gpu_droplets.firewalls.droplets.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -72,7 +72,7 @@ def test_method_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_remove(self, client: GradientAI) -> None: + def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.droplets.with_raw_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -85,7 +85,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_remove(self, client: GradientAI) -> None: + def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.droplets.with_streaming_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -100,7 +100,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_remove(self, client: GradientAI) -> None: + def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.droplets.with_raw_response.remove( firewall_id="", @@ -115,7 +115,7 @@ class TestAsyncDroplets: @pytest.mark.skip() @parametrize - async def test_method_add(self, async_client: AsyncGradientAI) -> None: + async def test_method_add(self, async_client: AsyncGradient) -> None: droplet = await async_client.gpu_droplets.firewalls.droplets.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -124,7 +124,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.droplets.with_raw_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -137,7 +137,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.droplets.with_streaming_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -152,7 +152,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No @pytest.mark.skip() @parametrize - async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.droplets.with_raw_response.add( firewall_id="", @@ -161,7 +161,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_remove(self, async_client: AsyncGradientAI) -> None: + async def test_method_remove(self, async_client: AsyncGradient) -> None: droplet = await async_client.gpu_droplets.firewalls.droplets.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -170,7 +170,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.droplets.with_raw_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -183,7 +183,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.droplets.with_streaming_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", droplet_ids=[49696269], @@ -198,7 +198,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.droplets.with_raw_response.remove( firewall_id="", diff --git a/tests/api_resources/gpu_droplets/firewalls/test_rules.py b/tests/api_resources/gpu_droplets/firewalls/test_rules.py index b2eab40c..2bd74228 100644 --- a/tests/api_resources/gpu_droplets/firewalls/test_rules.py +++ b/tests/api_resources/gpu_droplets/firewalls/test_rules.py @@ -7,7 +7,7 @@ import pytest -from do_gradientai import GradientAI, AsyncGradientAI +from gradient import Gradient, AsyncGradient base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -17,7 +17,7 @@ class TestRules: @pytest.mark.skip() @parametrize - def test_method_add(self, client: GradientAI) -> None: + def test_method_add(self, client: Gradient) -> None: rule = client.gpu_droplets.firewalls.rules.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) @@ -25,7 +25,7 @@ def test_method_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_add_with_all_params(self, client: GradientAI) -> None: + def test_method_add_with_all_params(self, client: Gradient) -> None: rule = client.gpu_droplets.firewalls.rules.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", inbound_rules=[ @@ -59,7 +59,7 @@ def test_method_add_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_add(self, client: GradientAI) -> None: + def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.rules.with_raw_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) @@ -71,7 +71,7 @@ def test_raw_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_add(self, client: GradientAI) -> None: + def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.rules.with_streaming_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) as response: @@ -85,7 +85,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_add(self, client: GradientAI) -> None: + def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.rules.with_raw_response.add( firewall_id="", @@ -93,7 +93,7 @@ def test_path_params_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_remove(self, client: GradientAI) -> None: + def test_method_remove(self, client: Gradient) -> None: rule = client.gpu_droplets.firewalls.rules.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) @@ -101,7 +101,7 @@ def test_method_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_remove_with_all_params(self, client: GradientAI) -> None: + def test_method_remove_with_all_params(self, client: Gradient) -> None: rule = client.gpu_droplets.firewalls.rules.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", inbound_rules=[ @@ -135,7 +135,7 @@ def test_method_remove_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_remove(self, client: GradientAI) -> None: + def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.rules.with_raw_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) @@ -147,7 +147,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_remove(self, client: GradientAI) -> None: + def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.rules.with_streaming_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) as response: @@ -161,7 +161,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_remove(self, client: GradientAI) -> None: + def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.rules.with_raw_response.remove( firewall_id="", @@ -175,7 +175,7 @@ class TestAsyncRules: @pytest.mark.skip() @parametrize - async def test_method_add(self, async_client: AsyncGradientAI) -> None: + async def test_method_add(self, async_client: AsyncGradient) -> None: rule = await async_client.gpu_droplets.firewalls.rules.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) @@ -183,7 +183,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_add_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_add_with_all_params(self, async_client: AsyncGradient) -> None: rule = await async_client.gpu_droplets.firewalls.rules.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", inbound_rules=[ @@ -217,7 +217,7 @@ async def test_method_add_with_all_params(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.rules.with_raw_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) @@ -229,7 +229,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.rules.with_streaming_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) as response: @@ -243,7 +243,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No @pytest.mark.skip() @parametrize - async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.rules.with_raw_response.add( firewall_id="", @@ -251,7 +251,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_remove(self, async_client: AsyncGradientAI) -> None: + async def test_method_remove(self, async_client: AsyncGradient) -> None: rule = await async_client.gpu_droplets.firewalls.rules.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) @@ -259,7 +259,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_remove_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_remove_with_all_params(self, async_client: AsyncGradient) -> None: rule = await async_client.gpu_droplets.firewalls.rules.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", inbound_rules=[ @@ -293,7 +293,7 @@ async def test_method_remove_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.rules.with_raw_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) @@ -305,7 +305,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.rules.with_streaming_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", ) as response: @@ -319,7 +319,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.rules.with_raw_response.remove( firewall_id="", diff --git a/tests/api_resources/gpu_droplets/firewalls/test_tags.py b/tests/api_resources/gpu_droplets/firewalls/test_tags.py index 25c9362b..cbd86f65 100644 --- a/tests/api_resources/gpu_droplets/firewalls/test_tags.py +++ b/tests/api_resources/gpu_droplets/firewalls/test_tags.py @@ -7,7 +7,7 @@ import pytest -from do_gradientai import GradientAI, AsyncGradientAI +from gradient import Gradient, AsyncGradient base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -17,7 +17,7 @@ class TestTags: @pytest.mark.skip() @parametrize - def test_method_add(self, client: GradientAI) -> None: + def test_method_add(self, client: Gradient) -> None: tag = client.gpu_droplets.firewalls.tags.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -26,7 +26,7 @@ def test_method_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_add(self, client: GradientAI) -> None: + def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.tags.with_raw_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -39,7 +39,7 @@ def test_raw_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_add(self, client: GradientAI) -> None: + def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.tags.with_streaming_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -54,7 +54,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_add(self, client: GradientAI) -> None: + def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.tags.with_raw_response.add( firewall_id="", @@ -63,7 +63,7 @@ def test_path_params_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_remove(self, client: GradientAI) -> None: + def test_method_remove(self, client: Gradient) -> None: tag = client.gpu_droplets.firewalls.tags.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -72,7 +72,7 @@ def test_method_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_remove(self, client: GradientAI) -> None: + def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.tags.with_raw_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -85,7 +85,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_remove(self, client: GradientAI) -> None: + def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.tags.with_streaming_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -100,7 +100,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_remove(self, client: GradientAI) -> None: + def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.tags.with_raw_response.remove( firewall_id="", @@ -115,7 +115,7 @@ class TestAsyncTags: @pytest.mark.skip() @parametrize - async def test_method_add(self, async_client: AsyncGradientAI) -> None: + async def test_method_add(self, async_client: AsyncGradient) -> None: tag = await async_client.gpu_droplets.firewalls.tags.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -124,7 +124,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.tags.with_raw_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -137,7 +137,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.tags.with_streaming_response.add( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -152,7 +152,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No @pytest.mark.skip() @parametrize - async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.tags.with_raw_response.add( firewall_id="", @@ -161,7 +161,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_remove(self, async_client: AsyncGradientAI) -> None: + async def test_method_remove(self, async_client: AsyncGradient) -> None: tag = await async_client.gpu_droplets.firewalls.tags.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -170,7 +170,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.tags.with_raw_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -183,7 +183,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.tags.with_streaming_response.remove( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", tags=["frontend"], @@ -198,7 +198,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.tags.with_raw_response.remove( firewall_id="", diff --git a/tests/api_resources/gpu_droplets/floating_ips/test_actions.py b/tests/api_resources/gpu_droplets/floating_ips/test_actions.py index ad26db8a..9417a880 100644 --- a/tests/api_resources/gpu_droplets/floating_ips/test_actions.py +++ b/tests/api_resources/gpu_droplets/floating_ips/test_actions.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets.floating_ips import ( +from gradient.types.gpu_droplets.floating_ips import ( ActionListResponse, ActionCreateResponse, ActionRetrieveResponse, @@ -23,7 +23,7 @@ class TestActions: @pytest.mark.skip() @parametrize - def test_method_create_overload_1(self, client: GradientAI) -> None: + def test_method_create_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.floating_ips.actions.create( floating_ip="45.55.96.47", type="assign", @@ -32,7 +32,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_1(self, client: GradientAI) -> None: + def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.actions.with_raw_response.create( floating_ip="45.55.96.47", type="assign", @@ -45,7 +45,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.actions.with_streaming_response.create( floating_ip="45.55.96.47", type="assign", @@ -60,7 +60,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create_overload_1(self, client: GradientAI) -> None: + def test_path_params_create_overload_1(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): client.gpu_droplets.floating_ips.actions.with_raw_response.create( floating_ip="", @@ -69,7 +69,7 @@ def test_path_params_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_overload_2(self, client: GradientAI) -> None: + def test_method_create_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.floating_ips.actions.create( floating_ip="45.55.96.47", droplet_id=758604968, @@ -79,7 +79,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_2(self, client: GradientAI) -> None: + def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.actions.with_raw_response.create( floating_ip="45.55.96.47", droplet_id=758604968, @@ -93,7 +93,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.actions.with_streaming_response.create( floating_ip="45.55.96.47", droplet_id=758604968, @@ -109,7 +109,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create_overload_2(self, client: GradientAI) -> None: + def test_path_params_create_overload_2(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): client.gpu_droplets.floating_ips.actions.with_raw_response.create( floating_ip="", @@ -119,7 +119,7 @@ def test_path_params_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: action = client.gpu_droplets.floating_ips.actions.retrieve( action_id=36804636, floating_ip="45.55.96.47", @@ -128,7 +128,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve( action_id=36804636, floating_ip="45.55.96.47", @@ -141,7 +141,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.actions.with_streaming_response.retrieve( action_id=36804636, floating_ip="45.55.96.47", @@ -156,7 +156,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve( action_id=36804636, @@ -165,7 +165,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: action = client.gpu_droplets.floating_ips.actions.list( "192.168.1.1", ) @@ -173,7 +173,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.actions.with_raw_response.list( "192.168.1.1", ) @@ -185,7 +185,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.actions.with_streaming_response.list( "192.168.1.1", ) as response: @@ -199,7 +199,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: GradientAI) -> None: + def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): client.gpu_droplets.floating_ips.actions.with_raw_response.list( "", @@ -213,7 +213,7 @@ class TestAsyncActions: @pytest.mark.skip() @parametrize - async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.floating_ips.actions.create( floating_ip="45.55.96.47", type="assign", @@ -222,7 +222,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create( floating_ip="45.55.96.47", type="assign", @@ -235,7 +235,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.create( floating_ip="45.55.96.47", type="assign", @@ -250,7 +250,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_path_params_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_create_overload_1(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create( floating_ip="", @@ -259,7 +259,7 @@ async def test_path_params_create_overload_1(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.floating_ips.actions.create( floating_ip="45.55.96.47", droplet_id=758604968, @@ -269,7 +269,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create( floating_ip="45.55.96.47", droplet_id=758604968, @@ -283,7 +283,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.create( floating_ip="45.55.96.47", droplet_id=758604968, @@ -299,7 +299,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_path_params_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_create_overload_2(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create( floating_ip="", @@ -309,7 +309,7 @@ async def test_path_params_create_overload_2(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.floating_ips.actions.retrieve( action_id=36804636, floating_ip="45.55.96.47", @@ -318,7 +318,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve( action_id=36804636, floating_ip="45.55.96.47", @@ -331,7 +331,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.retrieve( action_id=36804636, floating_ip="45.55.96.47", @@ -346,7 +346,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): await async_client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve( action_id=36804636, @@ -355,7 +355,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.floating_ips.actions.list( "192.168.1.1", ) @@ -363,7 +363,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.list( "192.168.1.1", ) @@ -375,7 +375,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.list( "192.168.1.1", ) as response: @@ -389,7 +389,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): await async_client.gpu_droplets.floating_ips.actions.with_raw_response.list( "", diff --git a/tests/api_resources/gpu_droplets/images/test_actions.py b/tests/api_resources/gpu_droplets/images/test_actions.py index 35861bcb..f59e3986 100644 --- a/tests/api_resources/gpu_droplets/images/test_actions.py +++ b/tests/api_resources/gpu_droplets/images/test_actions.py @@ -7,10 +7,10 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.shared import Action -from do_gradientai.types.gpu_droplets.images import ActionListResponse +from gradient.types.shared import Action +from gradient.types.gpu_droplets.images import ActionListResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -20,7 +20,7 @@ class TestActions: @pytest.mark.skip() @parametrize - def test_method_create_overload_1(self, client: GradientAI) -> None: + def test_method_create_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.images.actions.create( image_id=62137902, type="convert", @@ -29,7 +29,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_1(self, client: GradientAI) -> None: + def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.images.actions.with_raw_response.create( image_id=62137902, type="convert", @@ -42,7 +42,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.images.actions.with_streaming_response.create( image_id=62137902, type="convert", @@ -57,7 +57,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_overload_2(self, client: GradientAI) -> None: + def test_method_create_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.images.actions.create( image_id=62137902, region="nyc3", @@ -67,7 +67,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_2(self, client: GradientAI) -> None: + def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.images.actions.with_raw_response.create( image_id=62137902, region="nyc3", @@ -81,7 +81,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.images.actions.with_streaming_response.create( image_id=62137902, region="nyc3", @@ -97,7 +97,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: action = client.gpu_droplets.images.actions.retrieve( action_id=36804636, image_id=62137902, @@ -106,7 +106,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.images.actions.with_raw_response.retrieve( action_id=36804636, image_id=62137902, @@ -119,7 +119,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.images.actions.with_streaming_response.retrieve( action_id=36804636, image_id=62137902, @@ -134,7 +134,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: action = client.gpu_droplets.images.actions.list( 0, ) @@ -142,7 +142,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.images.actions.with_raw_response.list( 0, ) @@ -154,7 +154,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.images.actions.with_streaming_response.list( 0, ) as response: @@ -174,7 +174,7 @@ class TestAsyncActions: @pytest.mark.skip() @parametrize - async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.images.actions.create( image_id=62137902, type="convert", @@ -183,7 +183,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.actions.with_raw_response.create( image_id=62137902, type="convert", @@ -196,7 +196,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.actions.with_streaming_response.create( image_id=62137902, type="convert", @@ -211,7 +211,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.images.actions.create( image_id=62137902, region="nyc3", @@ -221,7 +221,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.actions.with_raw_response.create( image_id=62137902, region="nyc3", @@ -235,7 +235,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.actions.with_streaming_response.create( image_id=62137902, region="nyc3", @@ -251,7 +251,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.images.actions.retrieve( action_id=36804636, image_id=62137902, @@ -260,7 +260,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.actions.with_raw_response.retrieve( action_id=36804636, image_id=62137902, @@ -273,7 +273,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.actions.with_streaming_response.retrieve( action_id=36804636, image_id=62137902, @@ -288,7 +288,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.images.actions.list( 0, ) @@ -296,7 +296,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.actions.with_raw_response.list( 0, ) @@ -308,7 +308,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.actions.with_streaming_response.list( 0, ) as response: diff --git a/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py b/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py index f22213e2..200dad39 100644 --- a/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py +++ b/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py @@ -7,7 +7,7 @@ import pytest -from do_gradientai import GradientAI, AsyncGradientAI +from gradient import Gradient, AsyncGradient base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -17,7 +17,7 @@ class TestDroplets: @pytest.mark.skip() @parametrize - def test_method_add(self, client: GradientAI) -> None: + def test_method_add(self, client: Gradient) -> None: droplet = client.gpu_droplets.load_balancers.droplets.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -26,7 +26,7 @@ def test_method_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_add(self, client: GradientAI) -> None: + def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.droplets.with_raw_response.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -39,7 +39,7 @@ def test_raw_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_add(self, client: GradientAI) -> None: + def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.droplets.with_streaming_response.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -54,7 +54,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_add(self, client: GradientAI) -> None: + def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.droplets.with_raw_response.add( lb_id="", @@ -63,7 +63,7 @@ def test_path_params_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_remove(self, client: GradientAI) -> None: + def test_method_remove(self, client: Gradient) -> None: droplet = client.gpu_droplets.load_balancers.droplets.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -72,7 +72,7 @@ def test_method_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_remove(self, client: GradientAI) -> None: + def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.droplets.with_raw_response.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -85,7 +85,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_remove(self, client: GradientAI) -> None: + def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.droplets.with_streaming_response.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -100,7 +100,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_remove(self, client: GradientAI) -> None: + def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.droplets.with_raw_response.remove( lb_id="", @@ -115,7 +115,7 @@ class TestAsyncDroplets: @pytest.mark.skip() @parametrize - async def test_method_add(self, async_client: AsyncGradientAI) -> None: + async def test_method_add(self, async_client: AsyncGradient) -> None: droplet = await async_client.gpu_droplets.load_balancers.droplets.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -124,7 +124,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -137,7 +137,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.droplets.with_streaming_response.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -152,7 +152,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No @pytest.mark.skip() @parametrize - async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.add( lb_id="", @@ -161,7 +161,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_remove(self, async_client: AsyncGradientAI) -> None: + async def test_method_remove(self, async_client: AsyncGradient) -> None: droplet = await async_client.gpu_droplets.load_balancers.droplets.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -170,7 +170,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -183,7 +183,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.droplets.with_streaming_response.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", droplet_ids=[3164444, 3164445], @@ -198,7 +198,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.remove( lb_id="", diff --git a/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py b/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py index d53bd0db..4f1decdf 100644 --- a/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py +++ b/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py @@ -7,7 +7,7 @@ import pytest -from do_gradientai import GradientAI, AsyncGradientAI +from gradient import Gradient, AsyncGradient base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -17,7 +17,7 @@ class TestForwardingRules: @pytest.mark.skip() @parametrize - def test_method_add(self, client: GradientAI) -> None: + def test_method_add(self, client: Gradient) -> None: forwarding_rule = client.gpu_droplets.load_balancers.forwarding_rules.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -33,7 +33,7 @@ def test_method_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_add(self, client: GradientAI) -> None: + def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -53,7 +53,7 @@ def test_raw_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_add(self, client: GradientAI) -> None: + def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -75,7 +75,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_add(self, client: GradientAI) -> None: + def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add( lb_id="", @@ -91,7 +91,7 @@ def test_path_params_add(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_remove(self, client: GradientAI) -> None: + def test_method_remove(self, client: Gradient) -> None: forwarding_rule = client.gpu_droplets.load_balancers.forwarding_rules.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -107,7 +107,7 @@ def test_method_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_remove(self, client: GradientAI) -> None: + def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -127,7 +127,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_remove(self, client: GradientAI) -> None: + def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -149,7 +149,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_remove(self, client: GradientAI) -> None: + def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove( lb_id="", @@ -171,7 +171,7 @@ class TestAsyncForwardingRules: @pytest.mark.skip() @parametrize - async def test_method_add(self, async_client: AsyncGradientAI) -> None: + async def test_method_add(self, async_client: AsyncGradient) -> None: forwarding_rule = await async_client.gpu_droplets.load_balancers.forwarding_rules.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -187,7 +187,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -207,7 +207,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.add( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -229,7 +229,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No @pytest.mark.skip() @parametrize - async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add( lb_id="", @@ -245,7 +245,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_remove(self, async_client: AsyncGradientAI) -> None: + async def test_method_remove(self, async_client: AsyncGradient) -> None: forwarding_rule = await async_client.gpu_droplets.load_balancers.forwarding_rules.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -261,7 +261,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -281,7 +281,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.remove( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -303,7 +303,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove( lb_id="", diff --git a/tests/api_resources/gpu_droplets/test_actions.py b/tests/api_resources/gpu_droplets/test_actions.py index 74e45b44..7a52c608 100644 --- a/tests/api_resources/gpu_droplets/test_actions.py +++ b/tests/api_resources/gpu_droplets/test_actions.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( ActionListResponse, ActionInitiateResponse, ActionRetrieveResponse, @@ -24,7 +24,7 @@ class TestActions: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: action = client.gpu_droplets.actions.retrieve( action_id=36804636, droplet_id=3164444, @@ -33,7 +33,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.retrieve( action_id=36804636, droplet_id=3164444, @@ -46,7 +46,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.retrieve( action_id=36804636, droplet_id=3164444, @@ -61,7 +61,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: action = client.gpu_droplets.actions.list( droplet_id=3164444, ) @@ -69,7 +69,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: action = client.gpu_droplets.actions.list( droplet_id=3164444, page=1, @@ -79,7 +79,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.list( droplet_id=3164444, ) @@ -91,7 +91,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.list( droplet_id=3164444, ) as response: @@ -105,7 +105,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_bulk_initiate_overload_1(self, client: GradientAI) -> None: + def test_method_bulk_initiate_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.actions.bulk_initiate( type="reboot", ) @@ -113,7 +113,7 @@ def test_method_bulk_initiate_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_bulk_initiate_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_bulk_initiate_with_all_params_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.actions.bulk_initiate( type="reboot", tag_name="tag_name", @@ -122,7 +122,7 @@ def test_method_bulk_initiate_with_all_params_overload_1(self, client: GradientA @pytest.mark.skip() @parametrize - def test_raw_response_bulk_initiate_overload_1(self, client: GradientAI) -> None: + def test_raw_response_bulk_initiate_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.bulk_initiate( type="reboot", ) @@ -134,7 +134,7 @@ def test_raw_response_bulk_initiate_overload_1(self, client: GradientAI) -> None @pytest.mark.skip() @parametrize - def test_streaming_response_bulk_initiate_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_bulk_initiate_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.bulk_initiate( type="reboot", ) as response: @@ -148,7 +148,7 @@ def test_streaming_response_bulk_initiate_overload_1(self, client: GradientAI) - @pytest.mark.skip() @parametrize - def test_method_bulk_initiate_overload_2(self, client: GradientAI) -> None: + def test_method_bulk_initiate_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.actions.bulk_initiate( type="reboot", ) @@ -156,7 +156,7 @@ def test_method_bulk_initiate_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_bulk_initiate_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_bulk_initiate_with_all_params_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.actions.bulk_initiate( type="reboot", tag_name="tag_name", @@ -166,7 +166,7 @@ def test_method_bulk_initiate_with_all_params_overload_2(self, client: GradientA @pytest.mark.skip() @parametrize - def test_raw_response_bulk_initiate_overload_2(self, client: GradientAI) -> None: + def test_raw_response_bulk_initiate_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.bulk_initiate( type="reboot", ) @@ -178,7 +178,7 @@ def test_raw_response_bulk_initiate_overload_2(self, client: GradientAI) -> None @pytest.mark.skip() @parametrize - def test_streaming_response_bulk_initiate_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_bulk_initiate_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.bulk_initiate( type="reboot", ) as response: @@ -192,7 +192,7 @@ def test_streaming_response_bulk_initiate_overload_2(self, client: GradientAI) - @pytest.mark.skip() @parametrize - def test_method_initiate_overload_1(self, client: GradientAI) -> None: + def test_method_initiate_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -201,7 +201,7 @@ def test_method_initiate_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_1(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -214,7 +214,7 @@ def test_raw_response_initiate_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -229,7 +229,7 @@ def test_streaming_response_initiate_overload_1(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_method_initiate_overload_2(self, client: GradientAI) -> None: + def test_method_initiate_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="enable_backups", @@ -238,7 +238,7 @@ def test_method_initiate_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_initiate_with_all_params_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="enable_backups", @@ -252,7 +252,7 @@ def test_method_initiate_with_all_params_overload_2(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_2(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="enable_backups", @@ -265,7 +265,7 @@ def test_raw_response_initiate_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="enable_backups", @@ -280,7 +280,7 @@ def test_streaming_response_initiate_overload_2(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_method_initiate_overload_3(self, client: GradientAI) -> None: + def test_method_initiate_overload_3(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="enable_backups", @@ -289,7 +289,7 @@ def test_method_initiate_overload_3(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_with_all_params_overload_3(self, client: GradientAI) -> None: + def test_method_initiate_with_all_params_overload_3(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="enable_backups", @@ -303,7 +303,7 @@ def test_method_initiate_with_all_params_overload_3(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_3(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_3(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="enable_backups", @@ -316,7 +316,7 @@ def test_raw_response_initiate_overload_3(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_3(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_3(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="enable_backups", @@ -331,7 +331,7 @@ def test_streaming_response_initiate_overload_3(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_method_initiate_overload_4(self, client: GradientAI) -> None: + def test_method_initiate_overload_4(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -340,7 +340,7 @@ def test_method_initiate_overload_4(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_with_all_params_overload_4(self, client: GradientAI) -> None: + def test_method_initiate_with_all_params_overload_4(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -350,7 +350,7 @@ def test_method_initiate_with_all_params_overload_4(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_4(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_4(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -363,7 +363,7 @@ def test_raw_response_initiate_overload_4(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_4(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_4(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -378,7 +378,7 @@ def test_streaming_response_initiate_overload_4(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_method_initiate_overload_5(self, client: GradientAI) -> None: + def test_method_initiate_overload_5(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -387,7 +387,7 @@ def test_method_initiate_overload_5(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_with_all_params_overload_5(self, client: GradientAI) -> None: + def test_method_initiate_with_all_params_overload_5(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -398,7 +398,7 @@ def test_method_initiate_with_all_params_overload_5(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_5(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_5(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -411,7 +411,7 @@ def test_raw_response_initiate_overload_5(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_5(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_5(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -426,7 +426,7 @@ def test_streaming_response_initiate_overload_5(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_method_initiate_overload_6(self, client: GradientAI) -> None: + def test_method_initiate_overload_6(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -435,7 +435,7 @@ def test_method_initiate_overload_6(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_with_all_params_overload_6(self, client: GradientAI) -> None: + def test_method_initiate_with_all_params_overload_6(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -445,7 +445,7 @@ def test_method_initiate_with_all_params_overload_6(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_6(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_6(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -458,7 +458,7 @@ def test_raw_response_initiate_overload_6(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_6(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_6(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -473,7 +473,7 @@ def test_streaming_response_initiate_overload_6(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_method_initiate_overload_7(self, client: GradientAI) -> None: + def test_method_initiate_overload_7(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -482,7 +482,7 @@ def test_method_initiate_overload_7(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_with_all_params_overload_7(self, client: GradientAI) -> None: + def test_method_initiate_with_all_params_overload_7(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -492,7 +492,7 @@ def test_method_initiate_with_all_params_overload_7(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_7(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_7(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -505,7 +505,7 @@ def test_raw_response_initiate_overload_7(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_7(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_7(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -520,7 +520,7 @@ def test_streaming_response_initiate_overload_7(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_method_initiate_overload_8(self, client: GradientAI) -> None: + def test_method_initiate_overload_8(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -529,7 +529,7 @@ def test_method_initiate_overload_8(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_with_all_params_overload_8(self, client: GradientAI) -> None: + def test_method_initiate_with_all_params_overload_8(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -539,7 +539,7 @@ def test_method_initiate_with_all_params_overload_8(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_8(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_8(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -552,7 +552,7 @@ def test_raw_response_initiate_overload_8(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_8(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_8(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -567,7 +567,7 @@ def test_streaming_response_initiate_overload_8(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_method_initiate_overload_9(self, client: GradientAI) -> None: + def test_method_initiate_overload_9(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -576,7 +576,7 @@ def test_method_initiate_overload_9(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_with_all_params_overload_9(self, client: GradientAI) -> None: + def test_method_initiate_with_all_params_overload_9(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -586,7 +586,7 @@ def test_method_initiate_with_all_params_overload_9(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_raw_response_initiate_overload_9(self, client: GradientAI) -> None: + def test_raw_response_initiate_overload_9(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -599,7 +599,7 @@ def test_raw_response_initiate_overload_9(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_overload_9(self, client: GradientAI) -> None: + def test_streaming_response_initiate_overload_9(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -620,7 +620,7 @@ class TestAsyncActions: @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.retrieve( action_id=36804636, droplet_id=3164444, @@ -629,7 +629,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.retrieve( action_id=36804636, droplet_id=3164444, @@ -642,7 +642,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.retrieve( action_id=36804636, droplet_id=3164444, @@ -657,7 +657,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.list( droplet_id=3164444, ) @@ -665,7 +665,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.list( droplet_id=3164444, page=1, @@ -675,7 +675,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.list( droplet_id=3164444, ) @@ -687,7 +687,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.list( droplet_id=3164444, ) as response: @@ -701,7 +701,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.bulk_initiate( type="reboot", ) @@ -709,7 +709,7 @@ async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_method_bulk_initiate_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_bulk_initiate_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.bulk_initiate( type="reboot", tag_name="tag_name", @@ -718,7 +718,7 @@ async def test_method_bulk_initiate_with_all_params_overload_1(self, async_clien @pytest.mark.skip() @parametrize - async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.bulk_initiate( type="reboot", ) @@ -730,7 +730,7 @@ async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGr @pytest.mark.skip() @parametrize - async def test_streaming_response_bulk_initiate_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.bulk_initiate( type="reboot", ) as response: @@ -744,7 +744,7 @@ async def test_streaming_response_bulk_initiate_overload_1(self, async_client: A @pytest.mark.skip() @parametrize - async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.bulk_initiate( type="reboot", ) @@ -752,7 +752,7 @@ async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_method_bulk_initiate_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_bulk_initiate_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.bulk_initiate( type="reboot", tag_name="tag_name", @@ -762,7 +762,7 @@ async def test_method_bulk_initiate_with_all_params_overload_2(self, async_clien @pytest.mark.skip() @parametrize - async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.bulk_initiate( type="reboot", ) @@ -774,7 +774,7 @@ async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGr @pytest.mark.skip() @parametrize - async def test_streaming_response_bulk_initiate_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.bulk_initiate( type="reboot", ) as response: @@ -788,7 +788,7 @@ async def test_streaming_response_bulk_initiate_overload_2(self, async_client: A @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -797,7 +797,7 @@ async def test_method_initiate_overload_1(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -810,7 +810,7 @@ async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -825,7 +825,7 @@ async def test_streaming_response_initiate_overload_1(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="enable_backups", @@ -834,7 +834,7 @@ async def test_method_initiate_overload_2(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_method_initiate_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="enable_backups", @@ -848,7 +848,7 @@ async def test_method_initiate_with_all_params_overload_2(self, async_client: As @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="enable_backups", @@ -861,7 +861,7 @@ async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="enable_backups", @@ -876,7 +876,7 @@ async def test_streaming_response_initiate_overload_2(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_3(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="enable_backups", @@ -885,7 +885,7 @@ async def test_method_initiate_overload_3(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_method_initiate_with_all_params_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_with_all_params_overload_3(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="enable_backups", @@ -899,7 +899,7 @@ async def test_method_initiate_with_all_params_overload_3(self, async_client: As @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="enable_backups", @@ -912,7 +912,7 @@ async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_3(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="enable_backups", @@ -927,7 +927,7 @@ async def test_streaming_response_initiate_overload_3(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_4(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_4(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -936,7 +936,7 @@ async def test_method_initiate_overload_4(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_method_initiate_with_all_params_overload_4(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_with_all_params_overload_4(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -946,7 +946,7 @@ async def test_method_initiate_with_all_params_overload_4(self, async_client: As @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -959,7 +959,7 @@ async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_4(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_4(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -974,7 +974,7 @@ async def test_streaming_response_initiate_overload_4(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_5(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_5(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -983,7 +983,7 @@ async def test_method_initiate_overload_5(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_method_initiate_with_all_params_overload_5(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_with_all_params_overload_5(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -994,7 +994,7 @@ async def test_method_initiate_with_all_params_overload_5(self, async_client: As @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -1007,7 +1007,7 @@ async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_5(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_5(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -1022,7 +1022,7 @@ async def test_streaming_response_initiate_overload_5(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_6(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_6(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -1031,7 +1031,7 @@ async def test_method_initiate_overload_6(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_method_initiate_with_all_params_overload_6(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_with_all_params_overload_6(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -1041,7 +1041,7 @@ async def test_method_initiate_with_all_params_overload_6(self, async_client: As @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -1054,7 +1054,7 @@ async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_6(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_6(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -1069,7 +1069,7 @@ async def test_streaming_response_initiate_overload_6(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_7(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_7(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -1078,7 +1078,7 @@ async def test_method_initiate_overload_7(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_method_initiate_with_all_params_overload_7(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_with_all_params_overload_7(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -1088,7 +1088,7 @@ async def test_method_initiate_with_all_params_overload_7(self, async_client: As @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -1101,7 +1101,7 @@ async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_7(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_7(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -1116,7 +1116,7 @@ async def test_streaming_response_initiate_overload_7(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_8(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_8(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -1125,7 +1125,7 @@ async def test_method_initiate_overload_8(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_method_initiate_with_all_params_overload_8(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_with_all_params_overload_8(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -1135,7 +1135,7 @@ async def test_method_initiate_with_all_params_overload_8(self, async_client: As @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -1148,7 +1148,7 @@ async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_8(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_8(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", @@ -1163,7 +1163,7 @@ async def test_streaming_response_initiate_overload_8(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_method_initiate_overload_9(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_overload_9(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -1172,7 +1172,7 @@ async def test_method_initiate_overload_9(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_method_initiate_with_all_params_overload_9(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_with_all_params_overload_9(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( droplet_id=3164444, type="reboot", @@ -1182,7 +1182,7 @@ async def test_method_initiate_with_all_params_overload_9(self, async_client: As @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( droplet_id=3164444, type="reboot", @@ -1195,7 +1195,7 @@ async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_overload_9(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_overload_9(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( droplet_id=3164444, type="reboot", diff --git a/tests/api_resources/gpu_droplets/test_autoscale.py b/tests/api_resources/gpu_droplets/test_autoscale.py index cec0371d..16be3e00 100644 --- a/tests/api_resources/gpu_droplets/test_autoscale.py +++ b/tests/api_resources/gpu_droplets/test_autoscale.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( AutoscaleListResponse, AutoscaleCreateResponse, AutoscaleUpdateResponse, @@ -26,7 +26,7 @@ class TestAutoscale: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.create( config={ "max_instances": 5, @@ -44,7 +44,7 @@ def test_method_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.create( config={ "max_instances": 5, @@ -72,7 +72,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.create( config={ "max_instances": 5, @@ -94,7 +94,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.create( config={ "max_instances": 5, @@ -118,7 +118,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.retrieve( "autoscale_pool_id", ) @@ -126,7 +126,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.retrieve( "autoscale_pool_id", ) @@ -138,7 +138,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.retrieve( "autoscale_pool_id", ) as response: @@ -152,7 +152,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): client.gpu_droplets.autoscale.with_raw_response.retrieve( "", @@ -160,7 +160,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.update( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", config={"target_number_instances": 2}, @@ -176,7 +176,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.update( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", config={"target_number_instances": 2}, @@ -199,7 +199,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.update( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", config={"target_number_instances": 2}, @@ -219,7 +219,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.update( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", config={"target_number_instances": 2}, @@ -241,7 +241,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): client.gpu_droplets.autoscale.with_raw_response.update( autoscale_pool_id="", @@ -257,13 +257,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list() assert_matches_type(AutoscaleListResponse, autoscale, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list( name="name", page=1, @@ -273,7 +273,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.list() assert response.is_closed is True @@ -283,7 +283,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -295,7 +295,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.delete( "autoscale_pool_id", ) @@ -303,7 +303,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.delete( "autoscale_pool_id", ) @@ -315,7 +315,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.delete( "autoscale_pool_id", ) as response: @@ -329,7 +329,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): client.gpu_droplets.autoscale.with_raw_response.delete( "", @@ -337,7 +337,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete_dangerous(self, client: GradientAI) -> None: + def test_method_delete_dangerous(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.delete_dangerous( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", x_dangerous=True, @@ -346,7 +346,7 @@ def test_method_delete_dangerous(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete_dangerous(self, client: GradientAI) -> None: + def test_raw_response_delete_dangerous(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.delete_dangerous( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", x_dangerous=True, @@ -359,7 +359,7 @@ def test_raw_response_delete_dangerous(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete_dangerous(self, client: GradientAI) -> None: + def test_streaming_response_delete_dangerous(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.delete_dangerous( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", x_dangerous=True, @@ -374,7 +374,7 @@ def test_streaming_response_delete_dangerous(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete_dangerous(self, client: GradientAI) -> None: + def test_path_params_delete_dangerous(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): client.gpu_droplets.autoscale.with_raw_response.delete_dangerous( autoscale_pool_id="", @@ -383,7 +383,7 @@ def test_path_params_delete_dangerous(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_history(self, client: GradientAI) -> None: + def test_method_list_history(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list_history( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) @@ -391,7 +391,7 @@ def test_method_list_history(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_history_with_all_params(self, client: GradientAI) -> None: + def test_method_list_history_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list_history( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", page=1, @@ -401,7 +401,7 @@ def test_method_list_history_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_history(self, client: GradientAI) -> None: + def test_raw_response_list_history(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.list_history( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) @@ -413,7 +413,7 @@ def test_raw_response_list_history(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_history(self, client: GradientAI) -> None: + def test_streaming_response_list_history(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.list_history( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) as response: @@ -427,7 +427,7 @@ def test_streaming_response_list_history(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list_history(self, client: GradientAI) -> None: + def test_path_params_list_history(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): client.gpu_droplets.autoscale.with_raw_response.list_history( autoscale_pool_id="", @@ -435,7 +435,7 @@ def test_path_params_list_history(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_members(self, client: GradientAI) -> None: + def test_method_list_members(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list_members( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) @@ -443,7 +443,7 @@ def test_method_list_members(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_members_with_all_params(self, client: GradientAI) -> None: + def test_method_list_members_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list_members( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", page=1, @@ -453,7 +453,7 @@ def test_method_list_members_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_members(self, client: GradientAI) -> None: + def test_raw_response_list_members(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.list_members( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) @@ -465,7 +465,7 @@ def test_raw_response_list_members(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_members(self, client: GradientAI) -> None: + def test_streaming_response_list_members(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.list_members( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) as response: @@ -479,7 +479,7 @@ def test_streaming_response_list_members(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list_members(self, client: GradientAI) -> None: + def test_path_params_list_members(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): client.gpu_droplets.autoscale.with_raw_response.list_members( autoscale_pool_id="", @@ -493,7 +493,7 @@ class TestAsyncAutoscale: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.create( config={ "max_instances": 5, @@ -511,7 +511,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.create( config={ "max_instances": 5, @@ -539,7 +539,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.create( config={ "max_instances": 5, @@ -561,7 +561,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.create( config={ "max_instances": 5, @@ -585,7 +585,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.retrieve( "autoscale_pool_id", ) @@ -593,7 +593,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.retrieve( "autoscale_pool_id", ) @@ -605,7 +605,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.retrieve( "autoscale_pool_id", ) as response: @@ -619,7 +619,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): await async_client.gpu_droplets.autoscale.with_raw_response.retrieve( "", @@ -627,7 +627,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.update( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", config={"target_number_instances": 2}, @@ -643,7 +643,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.update( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", config={"target_number_instances": 2}, @@ -666,7 +666,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.update( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", config={"target_number_instances": 2}, @@ -686,7 +686,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.update( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", config={"target_number_instances": 2}, @@ -708,7 +708,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): await async_client.gpu_droplets.autoscale.with_raw_response.update( autoscale_pool_id="", @@ -724,13 +724,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list() assert_matches_type(AutoscaleListResponse, autoscale, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list( name="name", page=1, @@ -740,7 +740,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.list() assert response.is_closed is True @@ -750,7 +750,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -762,7 +762,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.delete( "autoscale_pool_id", ) @@ -770,7 +770,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.delete( "autoscale_pool_id", ) @@ -782,7 +782,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.delete( "autoscale_pool_id", ) as response: @@ -796,7 +796,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): await async_client.gpu_droplets.autoscale.with_raw_response.delete( "", @@ -804,7 +804,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_delete_dangerous(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.delete_dangerous( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", x_dangerous=True, @@ -813,7 +813,7 @@ async def test_method_delete_dangerous(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete_dangerous(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.delete_dangerous( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", x_dangerous=True, @@ -826,7 +826,7 @@ async def test_raw_response_delete_dangerous(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.delete_dangerous( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", x_dangerous=True, @@ -841,7 +841,7 @@ async def test_streaming_response_delete_dangerous(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_path_params_delete_dangerous(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete_dangerous(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): await async_client.gpu_droplets.autoscale.with_raw_response.delete_dangerous( autoscale_pool_id="", @@ -850,7 +850,7 @@ async def test_path_params_delete_dangerous(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_list_history(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_history(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list_history( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) @@ -858,7 +858,7 @@ async def test_method_list_history(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_history_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_history_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list_history( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", page=1, @@ -868,7 +868,7 @@ async def test_method_list_history_with_all_params(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_raw_response_list_history(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_history(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.list_history( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) @@ -880,7 +880,7 @@ async def test_raw_response_list_history(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_list_history(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_history(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.list_history( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) as response: @@ -894,7 +894,7 @@ async def test_streaming_response_list_history(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_path_params_list_history(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list_history(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): await async_client.gpu_droplets.autoscale.with_raw_response.list_history( autoscale_pool_id="", @@ -902,7 +902,7 @@ async def test_path_params_list_history(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_list_members(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_members(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list_members( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) @@ -910,7 +910,7 @@ async def test_method_list_members(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_members_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_members_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list_members( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", page=1, @@ -920,7 +920,7 @@ async def test_method_list_members_with_all_params(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_raw_response_list_members(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_members(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.list_members( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) @@ -932,7 +932,7 @@ async def test_raw_response_list_members(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_list_members(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_members(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.list_members( autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac", ) as response: @@ -946,7 +946,7 @@ async def test_streaming_response_list_members(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_path_params_list_members(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list_members(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): await async_client.gpu_droplets.autoscale.with_raw_response.list_members( autoscale_pool_id="", diff --git a/tests/api_resources/gpu_droplets/test_backups.py b/tests/api_resources/gpu_droplets/test_backups.py index 334c701f..ecff25de 100644 --- a/tests/api_resources/gpu_droplets/test_backups.py +++ b/tests/api_resources/gpu_droplets/test_backups.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( BackupListResponse, BackupListPoliciesResponse, BackupRetrievePolicyResponse, @@ -24,7 +24,7 @@ class TestBackups: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list( droplet_id=3164444, ) @@ -32,7 +32,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list( droplet_id=3164444, page=1, @@ -42,7 +42,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.backups.with_raw_response.list( droplet_id=3164444, ) @@ -54,7 +54,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.backups.with_streaming_response.list( droplet_id=3164444, ) as response: @@ -68,13 +68,13 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_policies(self, client: GradientAI) -> None: + def test_method_list_policies(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list_policies() assert_matches_type(BackupListPoliciesResponse, backup, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_policies_with_all_params(self, client: GradientAI) -> None: + def test_method_list_policies_with_all_params(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list_policies( page=1, per_page=1, @@ -83,7 +83,7 @@ def test_method_list_policies_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_policies(self, client: GradientAI) -> None: + def test_raw_response_list_policies(self, client: Gradient) -> None: response = client.gpu_droplets.backups.with_raw_response.list_policies() assert response.is_closed is True @@ -93,7 +93,7 @@ def test_raw_response_list_policies(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_policies(self, client: GradientAI) -> None: + def test_streaming_response_list_policies(self, client: Gradient) -> None: with client.gpu_droplets.backups.with_streaming_response.list_policies() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -105,13 +105,13 @@ def test_streaming_response_list_policies(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_supported_policies(self, client: GradientAI) -> None: + def test_method_list_supported_policies(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list_supported_policies() assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"]) @pytest.mark.skip() @parametrize - def test_raw_response_list_supported_policies(self, client: GradientAI) -> None: + def test_raw_response_list_supported_policies(self, client: Gradient) -> None: response = client.gpu_droplets.backups.with_raw_response.list_supported_policies() assert response.is_closed is True @@ -121,7 +121,7 @@ def test_raw_response_list_supported_policies(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_supported_policies(self, client: GradientAI) -> None: + def test_streaming_response_list_supported_policies(self, client: Gradient) -> None: with client.gpu_droplets.backups.with_streaming_response.list_supported_policies() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -133,7 +133,7 @@ def test_streaming_response_list_supported_policies(self, client: GradientAI) -> @pytest.mark.skip() @parametrize - def test_method_retrieve_policy(self, client: GradientAI) -> None: + def test_method_retrieve_policy(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.retrieve_policy( 1, ) @@ -141,7 +141,7 @@ def test_method_retrieve_policy(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve_policy(self, client: GradientAI) -> None: + def test_raw_response_retrieve_policy(self, client: Gradient) -> None: response = client.gpu_droplets.backups.with_raw_response.retrieve_policy( 1, ) @@ -153,7 +153,7 @@ def test_raw_response_retrieve_policy(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve_policy(self, client: GradientAI) -> None: + def test_streaming_response_retrieve_policy(self, client: Gradient) -> None: with client.gpu_droplets.backups.with_streaming_response.retrieve_policy( 1, ) as response: @@ -173,7 +173,7 @@ class TestAsyncBackups: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list( droplet_id=3164444, ) @@ -181,7 +181,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list( droplet_id=3164444, page=1, @@ -191,7 +191,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.backups.with_raw_response.list( droplet_id=3164444, ) @@ -203,7 +203,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.backups.with_streaming_response.list( droplet_id=3164444, ) as response: @@ -217,13 +217,13 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_list_policies(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_policies(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list_policies() assert_matches_type(BackupListPoliciesResponse, backup, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_policies_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_policies_with_all_params(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list_policies( page=1, per_page=1, @@ -232,7 +232,7 @@ async def test_method_list_policies_with_all_params(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_raw_response_list_policies(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_policies(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.backups.with_raw_response.list_policies() assert response.is_closed is True @@ -242,7 +242,7 @@ async def test_raw_response_list_policies(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_streaming_response_list_policies(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_policies(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.backups.with_streaming_response.list_policies() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -254,13 +254,13 @@ async def test_streaming_response_list_policies(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_method_list_supported_policies(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_supported_policies(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list_supported_policies() assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"]) @pytest.mark.skip() @parametrize - async def test_raw_response_list_supported_policies(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_supported_policies(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.backups.with_raw_response.list_supported_policies() assert response.is_closed is True @@ -270,7 +270,7 @@ async def test_raw_response_list_supported_policies(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_streaming_response_list_supported_policies(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_supported_policies(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.backups.with_streaming_response.list_supported_policies() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -282,7 +282,7 @@ async def test_streaming_response_list_supported_policies(self, async_client: As @pytest.mark.skip() @parametrize - async def test_method_retrieve_policy(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve_policy(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.retrieve_policy( 1, ) @@ -290,7 +290,7 @@ async def test_method_retrieve_policy(self, async_client: AsyncGradientAI) -> No @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve_policy(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve_policy(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.backups.with_raw_response.retrieve_policy( 1, ) @@ -302,7 +302,7 @@ async def test_raw_response_retrieve_policy(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve_policy(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve_policy(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.backups.with_streaming_response.retrieve_policy( 1, ) as response: diff --git a/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py b/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py index 2aef1fce..3715ced7 100644 --- a/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py +++ b/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( DestroyWithAssociatedResourceListResponse, DestroyWithAssociatedResourceCheckStatusResponse, ) @@ -22,7 +22,7 @@ class TestDestroyWithAssociatedResources: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.list( 1, ) @@ -32,7 +32,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.list( 1, ) @@ -46,7 +46,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.list( 1, ) as response: @@ -62,7 +62,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_check_status(self, client: GradientAI) -> None: + def test_method_check_status(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.check_status( 1, ) @@ -72,7 +72,7 @@ def test_method_check_status(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_check_status(self, client: GradientAI) -> None: + def test_raw_response_check_status(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.check_status( 1, ) @@ -86,7 +86,7 @@ def test_raw_response_check_status(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_check_status(self, client: GradientAI) -> None: + def test_streaming_response_check_status(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.check_status( 1, ) as response: @@ -102,7 +102,7 @@ def test_streaming_response_check_status(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete_dangerous(self, client: GradientAI) -> None: + def test_method_delete_dangerous(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_dangerous( droplet_id=3164444, x_dangerous=True, @@ -111,7 +111,7 @@ def test_method_delete_dangerous(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete_dangerous(self, client: GradientAI) -> None: + def test_raw_response_delete_dangerous(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous( droplet_id=3164444, x_dangerous=True, @@ -124,7 +124,7 @@ def test_raw_response_delete_dangerous(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete_dangerous(self, client: GradientAI) -> None: + def test_streaming_response_delete_dangerous(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous( droplet_id=3164444, x_dangerous=True, @@ -139,7 +139,7 @@ def test_streaming_response_delete_dangerous(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete_selective(self, client: GradientAI) -> None: + def test_method_delete_selective(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_selective( droplet_id=3164444, ) @@ -147,7 +147,7 @@ def test_method_delete_selective(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete_selective_with_all_params(self, client: GradientAI) -> None: + def test_method_delete_selective_with_all_params(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_selective( droplet_id=3164444, floating_ips=["6186916"], @@ -160,7 +160,7 @@ def test_method_delete_selective_with_all_params(self, client: GradientAI) -> No @pytest.mark.skip() @parametrize - def test_raw_response_delete_selective(self, client: GradientAI) -> None: + def test_raw_response_delete_selective(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_selective( droplet_id=3164444, ) @@ -172,7 +172,7 @@ def test_raw_response_delete_selective(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete_selective(self, client: GradientAI) -> None: + def test_streaming_response_delete_selective(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_selective( droplet_id=3164444, ) as response: @@ -186,7 +186,7 @@ def test_streaming_response_delete_selective(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retry(self, client: GradientAI) -> None: + def test_method_retry(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.retry( 1, ) @@ -194,7 +194,7 @@ def test_method_retry(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retry(self, client: GradientAI) -> None: + def test_raw_response_retry(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.retry( 1, ) @@ -206,7 +206,7 @@ def test_raw_response_retry(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retry(self, client: GradientAI) -> None: + def test_streaming_response_retry(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.retry( 1, ) as response: @@ -226,7 +226,7 @@ class TestAsyncDestroyWithAssociatedResources: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = await async_client.gpu_droplets.destroy_with_associated_resources.list( 1, ) @@ -236,7 +236,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.list( 1, ) @@ -250,7 +250,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.list( 1, ) as response: @@ -266,7 +266,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_check_status(self, async_client: AsyncGradientAI) -> None: + async def test_method_check_status(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = ( await async_client.gpu_droplets.destroy_with_associated_resources.check_status( 1, @@ -278,7 +278,7 @@ async def test_method_check_status(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_check_status(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_check_status(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.check_status( 1, ) @@ -292,7 +292,7 @@ async def test_raw_response_check_status(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_check_status(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_check_status(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.check_status( 1, ) as response: @@ -308,7 +308,7 @@ async def test_streaming_response_check_status(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_method_delete_dangerous(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = ( await async_client.gpu_droplets.destroy_with_associated_resources.delete_dangerous( droplet_id=3164444, @@ -319,7 +319,7 @@ async def test_method_delete_dangerous(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_raw_response_delete_dangerous(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous( droplet_id=3164444, x_dangerous=True, @@ -332,7 +332,7 @@ async def test_raw_response_delete_dangerous(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous( droplet_id=3164444, x_dangerous=True, @@ -347,7 +347,7 @@ async def test_streaming_response_delete_dangerous(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_method_delete_selective(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete_selective(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = ( await async_client.gpu_droplets.destroy_with_associated_resources.delete_selective( droplet_id=3164444, @@ -357,7 +357,7 @@ async def test_method_delete_selective(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete_selective_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete_selective_with_all_params(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = ( await async_client.gpu_droplets.destroy_with_associated_resources.delete_selective( droplet_id=3164444, @@ -372,7 +372,7 @@ async def test_method_delete_selective_with_all_params(self, async_client: Async @pytest.mark.skip() @parametrize - async def test_raw_response_delete_selective(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete_selective(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_selective( droplet_id=3164444, ) @@ -384,7 +384,7 @@ async def test_raw_response_delete_selective(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_streaming_response_delete_selective(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete_selective(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_selective( droplet_id=3164444, ) as response: @@ -398,7 +398,7 @@ async def test_streaming_response_delete_selective(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_method_retry(self, async_client: AsyncGradientAI) -> None: + async def test_method_retry(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = await async_client.gpu_droplets.destroy_with_associated_resources.retry( 1, ) @@ -406,7 +406,7 @@ async def test_method_retry(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retry(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retry(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.retry( 1, ) @@ -418,7 +418,7 @@ async def test_raw_response_retry(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_retry(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retry(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.retry( 1, ) as response: diff --git a/tests/api_resources/gpu_droplets/test_firewalls.py b/tests/api_resources/gpu_droplets/test_firewalls.py index 6d98ebe8..8585a114 100644 --- a/tests/api_resources/gpu_droplets/test_firewalls.py +++ b/tests/api_resources/gpu_droplets/test_firewalls.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( FirewallListResponse, FirewallCreateResponse, FirewallUpdateResponse, @@ -24,13 +24,13 @@ class TestFirewalls: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.create() assert_matches_type(FirewallCreateResponse, firewall, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.create( body={ "droplet_ids": [8043964], @@ -79,7 +79,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.create() assert response.is_closed is True @@ -89,7 +89,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -101,7 +101,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -109,7 +109,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -121,7 +121,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -135,7 +135,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.with_raw_response.retrieve( "", @@ -143,7 +143,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.update( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", firewall={"name": "frontend-firewall"}, @@ -152,7 +152,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.update( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", firewall={ @@ -202,7 +202,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.update( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", firewall={"name": "frontend-firewall"}, @@ -215,7 +215,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.update( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", firewall={"name": "frontend-firewall"}, @@ -230,7 +230,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.with_raw_response.update( firewall_id="", @@ -239,13 +239,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.list() assert_matches_type(FirewallListResponse, firewall, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.list( page=1, per_page=1, @@ -254,7 +254,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.list() assert response.is_closed is True @@ -264,7 +264,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -276,7 +276,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -284,7 +284,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -296,7 +296,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -310,7 +310,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): client.gpu_droplets.firewalls.with_raw_response.delete( "", @@ -324,13 +324,13 @@ class TestAsyncFirewalls: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.create() assert_matches_type(FirewallCreateResponse, firewall, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.create( body={ "droplet_ids": [8043964], @@ -379,7 +379,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.create() assert response.is_closed is True @@ -389,7 +389,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -401,7 +401,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -409,7 +409,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -421,7 +421,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -435,7 +435,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.with_raw_response.retrieve( "", @@ -443,7 +443,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.update( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", firewall={"name": "frontend-firewall"}, @@ -452,7 +452,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.update( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", firewall={ @@ -502,7 +502,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.update( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", firewall={"name": "frontend-firewall"}, @@ -515,7 +515,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.update( firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c", firewall={"name": "frontend-firewall"}, @@ -530,7 +530,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.with_raw_response.update( firewall_id="", @@ -539,13 +539,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.list() assert_matches_type(FirewallListResponse, firewall, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.list( page=1, per_page=1, @@ -554,7 +554,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.list() assert response.is_closed is True @@ -564,7 +564,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -576,7 +576,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -584,7 +584,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -596,7 +596,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -610,7 +610,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): await async_client.gpu_droplets.firewalls.with_raw_response.delete( "", diff --git a/tests/api_resources/gpu_droplets/test_floating_ips.py b/tests/api_resources/gpu_droplets/test_floating_ips.py index 9b8b3183..9ac488d6 100644 --- a/tests/api_resources/gpu_droplets/test_floating_ips.py +++ b/tests/api_resources/gpu_droplets/test_floating_ips.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( FloatingIPListResponse, FloatingIPCreateResponse, FloatingIPRetrieveResponse, @@ -23,7 +23,7 @@ class TestFloatingIPs: @pytest.mark.skip() @parametrize - def test_method_create_overload_1(self, client: GradientAI) -> None: + def test_method_create_overload_1(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.create( droplet_id=2457247, ) @@ -31,7 +31,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_1(self, client: GradientAI) -> None: + def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.create( droplet_id=2457247, ) @@ -43,7 +43,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.create( droplet_id=2457247, ) as response: @@ -57,7 +57,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_overload_2(self, client: GradientAI) -> None: + def test_method_create_overload_2(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.create( region="nyc3", ) @@ -65,7 +65,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.create( region="nyc3", project_id="746c6152-2fa2-11ed-92d3-27aaa54e4988", @@ -74,7 +74,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_2(self, client: GradientAI) -> None: + def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.create( region="nyc3", ) @@ -86,7 +86,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.create( region="nyc3", ) as response: @@ -100,7 +100,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.retrieve( "192.168.1.1", ) @@ -108,7 +108,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.retrieve( "192.168.1.1", ) @@ -120,7 +120,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.retrieve( "192.168.1.1", ) as response: @@ -134,7 +134,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): client.gpu_droplets.floating_ips.with_raw_response.retrieve( "", @@ -142,13 +142,13 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.list() assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.list( page=1, per_page=1, @@ -157,7 +157,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.list() assert response.is_closed is True @@ -167,7 +167,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -179,7 +179,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.delete( "192.168.1.1", ) @@ -187,7 +187,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.delete( "192.168.1.1", ) @@ -199,7 +199,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.delete( "192.168.1.1", ) as response: @@ -213,7 +213,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): client.gpu_droplets.floating_ips.with_raw_response.delete( "", @@ -227,7 +227,7 @@ class TestAsyncFloatingIPs: @pytest.mark.skip() @parametrize - async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.create( droplet_id=2457247, ) @@ -235,7 +235,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.create( droplet_id=2457247, ) @@ -247,7 +247,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.create( droplet_id=2457247, ) as response: @@ -261,7 +261,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.create( region="nyc3", ) @@ -269,7 +269,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.create( region="nyc3", project_id="746c6152-2fa2-11ed-92d3-27aaa54e4988", @@ -278,7 +278,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.create( region="nyc3", ) @@ -290,7 +290,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.create( region="nyc3", ) as response: @@ -304,7 +304,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.retrieve( "192.168.1.1", ) @@ -312,7 +312,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.retrieve( "192.168.1.1", ) @@ -324,7 +324,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.retrieve( "192.168.1.1", ) as response: @@ -338,7 +338,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): await async_client.gpu_droplets.floating_ips.with_raw_response.retrieve( "", @@ -346,13 +346,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.list() assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.list( page=1, per_page=1, @@ -361,7 +361,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.list() assert response.is_closed is True @@ -371,7 +371,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -383,7 +383,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.delete( "192.168.1.1", ) @@ -391,7 +391,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.delete( "192.168.1.1", ) @@ -403,7 +403,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.delete( "192.168.1.1", ) as response: @@ -417,7 +417,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): await async_client.gpu_droplets.floating_ips.with_raw_response.delete( "", diff --git a/tests/api_resources/gpu_droplets/test_images.py b/tests/api_resources/gpu_droplets/test_images.py index 5a2a7c0c..bf6bfa4f 100644 --- a/tests/api_resources/gpu_droplets/test_images.py +++ b/tests/api_resources/gpu_droplets/test_images.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( ImageListResponse, ImageCreateResponse, ImageUpdateResponse, @@ -24,13 +24,13 @@ class TestImages: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: image = client.gpu_droplets.images.create() assert_matches_type(ImageCreateResponse, image, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: image = client.gpu_droplets.images.create( description=" ", distribution="Ubuntu", @@ -43,7 +43,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.create() assert response.is_closed is True @@ -53,7 +53,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -65,7 +65,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: image = client.gpu_droplets.images.retrieve( 0, ) @@ -73,7 +73,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.retrieve( 0, ) @@ -85,7 +85,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.retrieve( 0, ) as response: @@ -99,7 +99,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: image = client.gpu_droplets.images.update( image_id=62137902, ) @@ -107,7 +107,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: image = client.gpu_droplets.images.update( image_id=62137902, description=" ", @@ -118,7 +118,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.update( image_id=62137902, ) @@ -130,7 +130,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.update( image_id=62137902, ) as response: @@ -144,13 +144,13 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: image = client.gpu_droplets.images.list() assert_matches_type(ImageListResponse, image, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: image = client.gpu_droplets.images.list( page=1, per_page=1, @@ -162,7 +162,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.list() assert response.is_closed is True @@ -172,7 +172,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -184,7 +184,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: image = client.gpu_droplets.images.delete( 0, ) @@ -192,7 +192,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.delete( 0, ) @@ -204,7 +204,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.delete( 0, ) as response: @@ -224,13 +224,13 @@ class TestAsyncImages: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.create() assert_matches_type(ImageCreateResponse, image, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.create( description=" ", distribution="Ubuntu", @@ -243,7 +243,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.create() assert response.is_closed is True @@ -253,7 +253,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -265,7 +265,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.retrieve( 0, ) @@ -273,7 +273,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.retrieve( 0, ) @@ -285,7 +285,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.retrieve( 0, ) as response: @@ -299,7 +299,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.update( image_id=62137902, ) @@ -307,7 +307,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.update( image_id=62137902, description=" ", @@ -318,7 +318,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.update( image_id=62137902, ) @@ -330,7 +330,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.update( image_id=62137902, ) as response: @@ -344,13 +344,13 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.list() assert_matches_type(ImageListResponse, image, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.list( page=1, per_page=1, @@ -362,7 +362,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.list() assert response.is_closed is True @@ -372,7 +372,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -384,7 +384,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.delete( 0, ) @@ -392,7 +392,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.delete( 0, ) @@ -404,7 +404,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.delete( 0, ) as response: diff --git a/tests/api_resources/gpu_droplets/test_load_balancers.py b/tests/api_resources/gpu_droplets/test_load_balancers.py index b96c6d52..f660f8f3 100644 --- a/tests/api_resources/gpu_droplets/test_load_balancers.py +++ b/tests/api_resources/gpu_droplets/test_load_balancers.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( LoadBalancerListResponse, LoadBalancerCreateResponse, LoadBalancerUpdateResponse, @@ -24,7 +24,7 @@ class TestLoadBalancers: @pytest.mark.skip() @parametrize - def test_method_create_overload_1(self, client: GradientAI) -> None: + def test_method_create_overload_1(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.create( forwarding_rules=[ { @@ -39,7 +39,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.create( forwarding_rules=[ { @@ -110,7 +110,7 @@ def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_1(self, client: GradientAI) -> None: + def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.create( forwarding_rules=[ { @@ -129,7 +129,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.create( forwarding_rules=[ { @@ -150,7 +150,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_overload_2(self, client: GradientAI) -> None: + def test_method_create_overload_2(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.create( forwarding_rules=[ { @@ -165,7 +165,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.create( forwarding_rules=[ { @@ -236,7 +236,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_2(self, client: GradientAI) -> None: + def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.create( forwarding_rules=[ { @@ -255,7 +255,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.create( forwarding_rules=[ { @@ -276,7 +276,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.retrieve( "lb_id", ) @@ -284,7 +284,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.retrieve( "lb_id", ) @@ -296,7 +296,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.retrieve( "lb_id", ) as response: @@ -310,7 +310,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.with_raw_response.retrieve( "", @@ -318,7 +318,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_overload_1(self, client: GradientAI) -> None: + def test_method_update_overload_1(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -334,7 +334,7 @@ def test_method_update_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_update_with_all_params_overload_1(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -406,7 +406,7 @@ def test_method_update_with_all_params_overload_1(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_update_overload_1(self, client: GradientAI) -> None: + def test_raw_response_update_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -426,7 +426,7 @@ def test_raw_response_update_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_update_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -448,7 +448,7 @@ def test_streaming_response_update_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update_overload_1(self, client: GradientAI) -> None: + def test_path_params_update_overload_1(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.with_raw_response.update( lb_id="", @@ -464,7 +464,7 @@ def test_path_params_update_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_overload_2(self, client: GradientAI) -> None: + def test_method_update_overload_2(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -480,7 +480,7 @@ def test_method_update_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_update_with_all_params_overload_2(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -552,7 +552,7 @@ def test_method_update_with_all_params_overload_2(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_update_overload_2(self, client: GradientAI) -> None: + def test_raw_response_update_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -572,7 +572,7 @@ def test_raw_response_update_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_update_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -594,7 +594,7 @@ def test_streaming_response_update_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update_overload_2(self, client: GradientAI) -> None: + def test_path_params_update_overload_2(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.with_raw_response.update( lb_id="", @@ -610,13 +610,13 @@ def test_path_params_update_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.list() assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.list( page=1, per_page=1, @@ -625,7 +625,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.list() assert response.is_closed is True @@ -635,7 +635,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -647,7 +647,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.delete( "lb_id", ) @@ -655,7 +655,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.delete( "lb_id", ) @@ -667,7 +667,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.delete( "lb_id", ) as response: @@ -681,7 +681,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.with_raw_response.delete( "", @@ -689,7 +689,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete_cache(self, client: GradientAI) -> None: + def test_method_delete_cache(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.delete_cache( "lb_id", ) @@ -697,7 +697,7 @@ def test_method_delete_cache(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete_cache(self, client: GradientAI) -> None: + def test_raw_response_delete_cache(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.delete_cache( "lb_id", ) @@ -709,7 +709,7 @@ def test_raw_response_delete_cache(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete_cache(self, client: GradientAI) -> None: + def test_streaming_response_delete_cache(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.delete_cache( "lb_id", ) as response: @@ -723,7 +723,7 @@ def test_streaming_response_delete_cache(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete_cache(self, client: GradientAI) -> None: + def test_path_params_delete_cache(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): client.gpu_droplets.load_balancers.with_raw_response.delete_cache( "", @@ -737,7 +737,7 @@ class TestAsyncLoadBalancers: @pytest.mark.skip() @parametrize - async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.create( forwarding_rules=[ { @@ -752,7 +752,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.create( forwarding_rules=[ { @@ -823,7 +823,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.create( forwarding_rules=[ { @@ -842,7 +842,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.create( forwarding_rules=[ { @@ -863,7 +863,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.create( forwarding_rules=[ { @@ -878,7 +878,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.create( forwarding_rules=[ { @@ -949,7 +949,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.create( forwarding_rules=[ { @@ -968,7 +968,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.create( forwarding_rules=[ { @@ -989,7 +989,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.retrieve( "lb_id", ) @@ -997,7 +997,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.retrieve( "lb_id", ) @@ -1009,7 +1009,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.retrieve( "lb_id", ) as response: @@ -1023,7 +1023,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.with_raw_response.retrieve( "", @@ -1031,7 +1031,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_overload_1(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -1047,7 +1047,7 @@ async def test_method_update_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -1119,7 +1119,7 @@ async def test_method_update_with_all_params_overload_1(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_update_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -1139,7 +1139,7 @@ async def test_raw_response_update_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_update_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -1161,7 +1161,7 @@ async def test_streaming_response_update_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_path_params_update_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update_overload_1(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.with_raw_response.update( lb_id="", @@ -1177,7 +1177,7 @@ async def test_path_params_update_overload_1(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_method_update_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_overload_2(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -1193,7 +1193,7 @@ async def test_method_update_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -1265,7 +1265,7 @@ async def test_method_update_with_all_params_overload_2(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_update_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -1285,7 +1285,7 @@ async def test_raw_response_update_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_update_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.update( lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6", forwarding_rules=[ @@ -1307,7 +1307,7 @@ async def test_streaming_response_update_overload_2(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_path_params_update_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update_overload_2(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.with_raw_response.update( lb_id="", @@ -1323,13 +1323,13 @@ async def test_path_params_update_overload_2(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.list() assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.list( page=1, per_page=1, @@ -1338,7 +1338,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.list() assert response.is_closed is True @@ -1348,7 +1348,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -1360,7 +1360,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.delete( "lb_id", ) @@ -1368,7 +1368,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.delete( "lb_id", ) @@ -1380,7 +1380,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.delete( "lb_id", ) as response: @@ -1394,7 +1394,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.with_raw_response.delete( "", @@ -1402,7 +1402,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_delete_cache(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete_cache(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.delete_cache( "lb_id", ) @@ -1410,7 +1410,7 @@ async def test_method_delete_cache(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete_cache(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete_cache(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.delete_cache( "lb_id", ) @@ -1422,7 +1422,7 @@ async def test_raw_response_delete_cache(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_delete_cache(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete_cache(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.delete_cache( "lb_id", ) as response: @@ -1436,7 +1436,7 @@ async def test_streaming_response_delete_cache(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_path_params_delete_cache(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete_cache(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): await async_client.gpu_droplets.load_balancers.with_raw_response.delete_cache( "", diff --git a/tests/api_resources/gpu_droplets/test_sizes.py b/tests/api_resources/gpu_droplets/test_sizes.py index 1ff11cd7..ec934e9f 100644 --- a/tests/api_resources/gpu_droplets/test_sizes.py +++ b/tests/api_resources/gpu_droplets/test_sizes.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import SizeListResponse +from gradient.types.gpu_droplets import SizeListResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,13 +19,13 @@ class TestSizes: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: size = client.gpu_droplets.sizes.list() assert_matches_type(SizeListResponse, size, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: size = client.gpu_droplets.sizes.list( page=1, per_page=1, @@ -34,7 +34,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.sizes.with_raw_response.list() assert response.is_closed is True @@ -44,7 +44,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.sizes.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -62,13 +62,13 @@ class TestAsyncSizes: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: size = await async_client.gpu_droplets.sizes.list() assert_matches_type(SizeListResponse, size, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: size = await async_client.gpu_droplets.sizes.list( page=1, per_page=1, @@ -77,7 +77,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.sizes.with_raw_response.list() assert response.is_closed is True @@ -87,7 +87,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.sizes.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/gpu_droplets/test_snapshots.py b/tests/api_resources/gpu_droplets/test_snapshots.py index 413dd993..d4574ece 100644 --- a/tests/api_resources/gpu_droplets/test_snapshots.py +++ b/tests/api_resources/gpu_droplets/test_snapshots.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import SnapshotListResponse, SnapshotRetrieveResponse +from gradient.types.gpu_droplets import SnapshotListResponse, SnapshotRetrieveResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,7 +19,7 @@ class TestSnapshots: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: snapshot = client.gpu_droplets.snapshots.retrieve( 6372321, ) @@ -27,7 +27,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.snapshots.with_raw_response.retrieve( 6372321, ) @@ -39,7 +39,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.snapshots.with_streaming_response.retrieve( 6372321, ) as response: @@ -53,13 +53,13 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: snapshot = client.gpu_droplets.snapshots.list() assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: snapshot = client.gpu_droplets.snapshots.list( page=1, per_page=1, @@ -69,7 +69,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.snapshots.with_raw_response.list() assert response.is_closed is True @@ -79,7 +79,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.snapshots.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -91,7 +91,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: snapshot = client.gpu_droplets.snapshots.delete( 6372321, ) @@ -99,7 +99,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.snapshots.with_raw_response.delete( 6372321, ) @@ -111,7 +111,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.snapshots.with_streaming_response.delete( 6372321, ) as response: @@ -131,7 +131,7 @@ class TestAsyncSnapshots: @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.snapshots.retrieve( 6372321, ) @@ -139,7 +139,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.snapshots.with_raw_response.retrieve( 6372321, ) @@ -151,7 +151,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.snapshots.with_streaming_response.retrieve( 6372321, ) as response: @@ -165,13 +165,13 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.snapshots.list() assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.snapshots.list( page=1, per_page=1, @@ -181,7 +181,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.snapshots.with_raw_response.list() assert response.is_closed is True @@ -191,7 +191,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.snapshots.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -203,7 +203,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.snapshots.delete( 6372321, ) @@ -211,7 +211,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.snapshots.with_raw_response.delete( 6372321, ) @@ -223,7 +223,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.snapshots.with_streaming_response.delete( 6372321, ) as response: diff --git a/tests/api_resources/gpu_droplets/test_volumes.py b/tests/api_resources/gpu_droplets/test_volumes.py index baf6b430..49436220 100644 --- a/tests/api_resources/gpu_droplets/test_volumes.py +++ b/tests/api_resources/gpu_droplets/test_volumes.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets import ( +from gradient.types.gpu_droplets import ( VolumeListResponse, VolumeCreateResponse, VolumeRetrieveResponse, @@ -23,7 +23,7 @@ class TestVolumes: @pytest.mark.skip() @parametrize - def test_method_create_overload_1(self, client: GradientAI) -> None: + def test_method_create_overload_1(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.create( name="example", region="nyc3", @@ -33,7 +33,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.create( name="example", region="nyc3", @@ -48,7 +48,7 @@ def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_1(self, client: GradientAI) -> None: + def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.create( name="example", region="nyc3", @@ -62,7 +62,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.create( name="example", region="nyc3", @@ -78,7 +78,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_overload_2(self, client: GradientAI) -> None: + def test_method_create_overload_2(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.create( name="example", region="nyc3", @@ -88,7 +88,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.create( name="example", region="nyc3", @@ -103,7 +103,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_2(self, client: GradientAI) -> None: + def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.create( name="example", region="nyc3", @@ -117,7 +117,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.create( name="example", region="nyc3", @@ -133,7 +133,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -141,7 +141,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -153,7 +153,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -167,7 +167,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.with_raw_response.retrieve( "", @@ -175,13 +175,13 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.list() assert_matches_type(VolumeListResponse, volume, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.list( name="name", page=1, @@ -192,7 +192,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.list() assert response.is_closed is True @@ -202,7 +202,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -214,7 +214,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -222,7 +222,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -234,7 +234,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -248,7 +248,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.with_raw_response.delete( "", @@ -256,13 +256,13 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete_by_name(self, client: GradientAI) -> None: + def test_method_delete_by_name(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.delete_by_name() assert volume is None @pytest.mark.skip() @parametrize - def test_method_delete_by_name_with_all_params(self, client: GradientAI) -> None: + def test_method_delete_by_name_with_all_params(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.delete_by_name( name="name", region="nyc3", @@ -271,7 +271,7 @@ def test_method_delete_by_name_with_all_params(self, client: GradientAI) -> None @pytest.mark.skip() @parametrize - def test_raw_response_delete_by_name(self, client: GradientAI) -> None: + def test_raw_response_delete_by_name(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.delete_by_name() assert response.is_closed is True @@ -281,7 +281,7 @@ def test_raw_response_delete_by_name(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete_by_name(self, client: GradientAI) -> None: + def test_streaming_response_delete_by_name(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.delete_by_name() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -299,7 +299,7 @@ class TestAsyncVolumes: @pytest.mark.skip() @parametrize - async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.create( name="example", region="nyc3", @@ -309,7 +309,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.create( name="example", region="nyc3", @@ -324,7 +324,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.create( name="example", region="nyc3", @@ -338,7 +338,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.create( name="example", region="nyc3", @@ -354,7 +354,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.create( name="example", region="nyc3", @@ -364,7 +364,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.create( name="example", region="nyc3", @@ -379,7 +379,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.create( name="example", region="nyc3", @@ -393,7 +393,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.create( name="example", region="nyc3", @@ -409,7 +409,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -417,7 +417,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -429,7 +429,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.retrieve( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -443,7 +443,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.with_raw_response.retrieve( "", @@ -451,13 +451,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.list() assert_matches_type(VolumeListResponse, volume, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.list( name="name", page=1, @@ -468,7 +468,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.list() assert response.is_closed is True @@ -478,7 +478,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -490,7 +490,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -498,7 +498,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) @@ -510,7 +510,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.delete( "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", ) as response: @@ -524,7 +524,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.with_raw_response.delete( "", @@ -532,13 +532,13 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_delete_by_name(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete_by_name(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.delete_by_name() assert volume is None @pytest.mark.skip() @parametrize - async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.delete_by_name( name="name", region="nyc3", @@ -547,7 +547,7 @@ async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGr @pytest.mark.skip() @parametrize - async def test_raw_response_delete_by_name(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete_by_name(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.delete_by_name() assert response.is_closed is True @@ -557,7 +557,7 @@ async def test_raw_response_delete_by_name(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_streaming_response_delete_by_name(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete_by_name(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.delete_by_name() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/gpu_droplets/volumes/test_actions.py b/tests/api_resources/gpu_droplets/volumes/test_actions.py index 40d9b4eb..19088e9e 100644 --- a/tests/api_resources/gpu_droplets/volumes/test_actions.py +++ b/tests/api_resources/gpu_droplets/volumes/test_actions.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets.volumes import ( +from gradient.types.gpu_droplets.volumes import ( ActionListResponse, ActionRetrieveResponse, ActionInitiateByIDResponse, @@ -24,7 +24,7 @@ class TestActions: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.retrieve( action_id=36804636, volume_id="7724db7c-e098-11e5-b522-000f53304e51", @@ -33,7 +33,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_with_all_params(self, client: GradientAI) -> None: + def test_method_retrieve_with_all_params(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.retrieve( action_id=36804636, volume_id="7724db7c-e098-11e5-b522-000f53304e51", @@ -44,7 +44,7 @@ def test_method_retrieve_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.retrieve( action_id=36804636, volume_id="7724db7c-e098-11e5-b522-000f53304e51", @@ -57,7 +57,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.retrieve( action_id=36804636, volume_id="7724db7c-e098-11e5-b522-000f53304e51", @@ -72,7 +72,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.actions.with_raw_response.retrieve( action_id=36804636, @@ -81,7 +81,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) @@ -89,7 +89,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", page=1, @@ -99,7 +99,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) @@ -111,7 +111,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) as response: @@ -125,7 +125,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: GradientAI) -> None: + def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.actions.with_raw_response.list( volume_id="", @@ -133,7 +133,7 @@ def test_path_params_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_by_id_overload_1(self, client: GradientAI) -> None: + def test_method_initiate_by_id_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -143,7 +143,7 @@ def test_method_initiate_by_id_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_by_id_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_initiate_by_id_with_all_params_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -157,7 +157,7 @@ def test_method_initiate_by_id_with_all_params_overload_1(self, client: Gradient @pytest.mark.skip() @parametrize - def test_raw_response_initiate_by_id_overload_1(self, client: GradientAI) -> None: + def test_raw_response_initiate_by_id_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -171,7 +171,7 @@ def test_raw_response_initiate_by_id_overload_1(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_by_id_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_initiate_by_id_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -187,7 +187,7 @@ def test_streaming_response_initiate_by_id_overload_1(self, client: GradientAI) @pytest.mark.skip() @parametrize - def test_path_params_initiate_by_id_overload_1(self, client: GradientAI) -> None: + def test_path_params_initiate_by_id_overload_1(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="", @@ -197,7 +197,7 @@ def test_path_params_initiate_by_id_overload_1(self, client: GradientAI) -> None @pytest.mark.skip() @parametrize - def test_method_initiate_by_id_overload_2(self, client: GradientAI) -> None: + def test_method_initiate_by_id_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -207,7 +207,7 @@ def test_method_initiate_by_id_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_by_id_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_initiate_by_id_with_all_params_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -220,7 +220,7 @@ def test_method_initiate_by_id_with_all_params_overload_2(self, client: Gradient @pytest.mark.skip() @parametrize - def test_raw_response_initiate_by_id_overload_2(self, client: GradientAI) -> None: + def test_raw_response_initiate_by_id_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -234,7 +234,7 @@ def test_raw_response_initiate_by_id_overload_2(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_by_id_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_initiate_by_id_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -250,7 +250,7 @@ def test_streaming_response_initiate_by_id_overload_2(self, client: GradientAI) @pytest.mark.skip() @parametrize - def test_path_params_initiate_by_id_overload_2(self, client: GradientAI) -> None: + def test_path_params_initiate_by_id_overload_2(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="", @@ -260,7 +260,7 @@ def test_path_params_initiate_by_id_overload_2(self, client: GradientAI) -> None @pytest.mark.skip() @parametrize - def test_method_initiate_by_id_overload_3(self, client: GradientAI) -> None: + def test_method_initiate_by_id_overload_3(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", size_gigabytes=16384, @@ -270,7 +270,7 @@ def test_method_initiate_by_id_overload_3(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_by_id_with_all_params_overload_3(self, client: GradientAI) -> None: + def test_method_initiate_by_id_with_all_params_overload_3(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", size_gigabytes=16384, @@ -283,7 +283,7 @@ def test_method_initiate_by_id_with_all_params_overload_3(self, client: Gradient @pytest.mark.skip() @parametrize - def test_raw_response_initiate_by_id_overload_3(self, client: GradientAI) -> None: + def test_raw_response_initiate_by_id_overload_3(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", size_gigabytes=16384, @@ -297,7 +297,7 @@ def test_raw_response_initiate_by_id_overload_3(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_by_id_overload_3(self, client: GradientAI) -> None: + def test_streaming_response_initiate_by_id_overload_3(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", size_gigabytes=16384, @@ -313,7 +313,7 @@ def test_streaming_response_initiate_by_id_overload_3(self, client: GradientAI) @pytest.mark.skip() @parametrize - def test_path_params_initiate_by_id_overload_3(self, client: GradientAI) -> None: + def test_path_params_initiate_by_id_overload_3(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="", @@ -323,7 +323,7 @@ def test_path_params_initiate_by_id_overload_3(self, client: GradientAI) -> None @pytest.mark.skip() @parametrize - def test_method_initiate_by_name_overload_1(self, client: GradientAI) -> None: + def test_method_initiate_by_name_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_name( droplet_id=11612190, type="attach", @@ -332,7 +332,7 @@ def test_method_initiate_by_name_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_by_name_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_initiate_by_name_with_all_params_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_name( droplet_id=11612190, type="attach", @@ -345,7 +345,7 @@ def test_method_initiate_by_name_with_all_params_overload_1(self, client: Gradie @pytest.mark.skip() @parametrize - def test_raw_response_initiate_by_name_overload_1(self, client: GradientAI) -> None: + def test_raw_response_initiate_by_name_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name( droplet_id=11612190, type="attach", @@ -358,7 +358,7 @@ def test_raw_response_initiate_by_name_overload_1(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_by_name_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_initiate_by_name_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name( droplet_id=11612190, type="attach", @@ -373,7 +373,7 @@ def test_streaming_response_initiate_by_name_overload_1(self, client: GradientAI @pytest.mark.skip() @parametrize - def test_method_initiate_by_name_overload_2(self, client: GradientAI) -> None: + def test_method_initiate_by_name_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_name( droplet_id=11612190, type="attach", @@ -382,7 +382,7 @@ def test_method_initiate_by_name_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_initiate_by_name_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_initiate_by_name_with_all_params_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_name( droplet_id=11612190, type="attach", @@ -394,7 +394,7 @@ def test_method_initiate_by_name_with_all_params_overload_2(self, client: Gradie @pytest.mark.skip() @parametrize - def test_raw_response_initiate_by_name_overload_2(self, client: GradientAI) -> None: + def test_raw_response_initiate_by_name_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name( droplet_id=11612190, type="attach", @@ -407,7 +407,7 @@ def test_raw_response_initiate_by_name_overload_2(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_streaming_response_initiate_by_name_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_initiate_by_name_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name( droplet_id=11612190, type="attach", @@ -428,7 +428,7 @@ class TestAsyncActions: @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.retrieve( action_id=36804636, volume_id="7724db7c-e098-11e5-b522-000f53304e51", @@ -437,7 +437,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_retrieve_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.retrieve( action_id=36804636, volume_id="7724db7c-e098-11e5-b522-000f53304e51", @@ -448,7 +448,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.retrieve( action_id=36804636, volume_id="7724db7c-e098-11e5-b522-000f53304e51", @@ -461,7 +461,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.retrieve( action_id=36804636, volume_id="7724db7c-e098-11e5-b522-000f53304e51", @@ -476,7 +476,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.actions.with_raw_response.retrieve( action_id=36804636, @@ -485,7 +485,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) @@ -493,7 +493,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", page=1, @@ -503,7 +503,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) @@ -515,7 +515,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) as response: @@ -529,7 +529,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.actions.with_raw_response.list( volume_id="", @@ -537,7 +537,7 @@ async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -547,7 +547,7 @@ async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_method_initiate_by_id_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_id_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -561,7 +561,7 @@ async def test_method_initiate_by_id_with_all_params_overload_1(self, async_clie @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -575,7 +575,7 @@ async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_by_id_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -591,7 +591,7 @@ async def test_streaming_response_initiate_by_id_overload_1(self, async_client: @pytest.mark.skip() @parametrize - async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="", @@ -601,7 +601,7 @@ async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGr @pytest.mark.skip() @parametrize - async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -611,7 +611,7 @@ async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_method_initiate_by_id_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_id_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -624,7 +624,7 @@ async def test_method_initiate_by_id_with_all_params_overload_2(self, async_clie @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -638,7 +638,7 @@ async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_by_id_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", droplet_id=11612190, @@ -654,7 +654,7 @@ async def test_streaming_response_initiate_by_id_overload_2(self, async_client: @pytest.mark.skip() @parametrize - async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="", @@ -664,7 +664,7 @@ async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGr @pytest.mark.skip() @parametrize - async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", size_gigabytes=16384, @@ -674,7 +674,7 @@ async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_method_initiate_by_id_with_all_params_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_id_with_all_params_overload_3(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", size_gigabytes=16384, @@ -687,7 +687,7 @@ async def test_method_initiate_by_id_with_all_params_overload_3(self, async_clie @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", size_gigabytes=16384, @@ -701,7 +701,7 @@ async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_by_id_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( volume_id="7724db7c-e098-11e5-b522-000f53304e51", size_gigabytes=16384, @@ -717,7 +717,7 @@ async def test_streaming_response_initiate_by_id_overload_3(self, async_client: @pytest.mark.skip() @parametrize - async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( volume_id="", @@ -727,7 +727,7 @@ async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGr @pytest.mark.skip() @parametrize - async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_name( droplet_id=11612190, type="attach", @@ -736,7 +736,7 @@ async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradi @pytest.mark.skip() @parametrize - async def test_method_initiate_by_name_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_name_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_name( droplet_id=11612190, type="attach", @@ -749,7 +749,7 @@ async def test_method_initiate_by_name_with_all_params_overload_1(self, async_cl @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_by_name_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name( droplet_id=11612190, type="attach", @@ -762,7 +762,7 @@ async def test_raw_response_initiate_by_name_overload_1(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_by_name_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name( droplet_id=11612190, type="attach", @@ -777,7 +777,7 @@ async def test_streaming_response_initiate_by_name_overload_1(self, async_client @pytest.mark.skip() @parametrize - async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_name( droplet_id=11612190, type="attach", @@ -786,7 +786,7 @@ async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradi @pytest.mark.skip() @parametrize - async def test_method_initiate_by_name_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_initiate_by_name_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_name( droplet_id=11612190, type="attach", @@ -798,7 +798,7 @@ async def test_method_initiate_by_name_with_all_params_overload_2(self, async_cl @pytest.mark.skip() @parametrize - async def test_raw_response_initiate_by_name_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name( droplet_id=11612190, type="attach", @@ -811,7 +811,7 @@ async def test_raw_response_initiate_by_name_overload_2(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_streaming_response_initiate_by_name_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name( droplet_id=11612190, type="attach", diff --git a/tests/api_resources/gpu_droplets/volumes/test_snapshots.py b/tests/api_resources/gpu_droplets/volumes/test_snapshots.py index 4884d372..5037c7bb 100644 --- a/tests/api_resources/gpu_droplets/volumes/test_snapshots.py +++ b/tests/api_resources/gpu_droplets/volumes/test_snapshots.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.gpu_droplets.volumes import ( +from gradient.types.gpu_droplets.volumes import ( SnapshotListResponse, SnapshotCreateResponse, SnapshotRetrieveResponse, @@ -23,7 +23,7 @@ class TestSnapshots: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.create( volume_id="7724db7c-e098-11e5-b522-000f53304e51", name="big-data-snapshot1475261774", @@ -32,7 +32,7 @@ def test_method_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.create( volume_id="7724db7c-e098-11e5-b522-000f53304e51", name="big-data-snapshot1475261774", @@ -42,7 +42,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.snapshots.with_raw_response.create( volume_id="7724db7c-e098-11e5-b522-000f53304e51", name="big-data-snapshot1475261774", @@ -55,7 +55,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.volumes.snapshots.with_streaming_response.create( volume_id="7724db7c-e098-11e5-b522-000f53304e51", name="big-data-snapshot1475261774", @@ -70,7 +70,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create(self, client: GradientAI) -> None: + def test_path_params_create(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.snapshots.with_raw_response.create( volume_id="", @@ -79,7 +79,7 @@ def test_path_params_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.retrieve( "snapshot_id", ) @@ -87,7 +87,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve( "snapshot_id", ) @@ -99,7 +99,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.volumes.snapshots.with_streaming_response.retrieve( "snapshot_id", ) as response: @@ -113,7 +113,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"): client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve( "", @@ -121,7 +121,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) @@ -129,7 +129,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", page=1, @@ -139,7 +139,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.snapshots.with_raw_response.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) @@ -151,7 +151,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.volumes.snapshots.with_streaming_response.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) as response: @@ -165,7 +165,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: GradientAI) -> None: + def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): client.gpu_droplets.volumes.snapshots.with_raw_response.list( volume_id="", @@ -173,7 +173,7 @@ def test_path_params_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.delete( "snapshot_id", ) @@ -181,7 +181,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.snapshots.with_raw_response.delete( "snapshot_id", ) @@ -193,7 +193,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.volumes.snapshots.with_streaming_response.delete( "snapshot_id", ) as response: @@ -207,7 +207,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"): client.gpu_droplets.volumes.snapshots.with_raw_response.delete( "", @@ -221,7 +221,7 @@ class TestAsyncSnapshots: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.create( volume_id="7724db7c-e098-11e5-b522-000f53304e51", name="big-data-snapshot1475261774", @@ -230,7 +230,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.create( volume_id="7724db7c-e098-11e5-b522-000f53304e51", name="big-data-snapshot1475261774", @@ -240,7 +240,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.create( volume_id="7724db7c-e098-11e5-b522-000f53304e51", name="big-data-snapshot1475261774", @@ -253,7 +253,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.create( volume_id="7724db7c-e098-11e5-b522-000f53304e51", name="big-data-snapshot1475261774", @@ -268,7 +268,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_create(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.snapshots.with_raw_response.create( volume_id="", @@ -277,7 +277,7 @@ async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.retrieve( "snapshot_id", ) @@ -285,7 +285,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve( "snapshot_id", ) @@ -297,7 +297,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.retrieve( "snapshot_id", ) as response: @@ -311,7 +311,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"): await async_client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve( "", @@ -319,7 +319,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) @@ -327,7 +327,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", page=1, @@ -337,7 +337,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) @@ -349,7 +349,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.list( volume_id="7724db7c-e098-11e5-b522-000f53304e51", ) as response: @@ -363,7 +363,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): await async_client.gpu_droplets.volumes.snapshots.with_raw_response.list( volume_id="", @@ -371,7 +371,7 @@ async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.delete( "snapshot_id", ) @@ -379,7 +379,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.delete( "snapshot_id", ) @@ -391,7 +391,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.delete( "snapshot_id", ) as response: @@ -405,7 +405,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"): await async_client.gpu_droplets.volumes.snapshots.with_raw_response.delete( "", diff --git a/tests/api_resources/inference/test_api_keys.py b/tests/api_resources/inference/test_api_keys.py index 85ad49da..f22947ed 100644 --- a/tests/api_resources/inference/test_api_keys.py +++ b/tests/api_resources/inference/test_api_keys.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.inference import ( +from gradient.types.inference import ( APIKeyListResponse, APIKeyCreateResponse, APIKeyDeleteResponse, @@ -25,13 +25,13 @@ class TestAPIKeys: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: api_key = client.inference.api_keys.create() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: api_key = client.inference.api_keys.create( name="Production Key", ) @@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.create() assert response.is_closed is True @@ -49,7 +49,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: api_key = client.inference.api_keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -69,7 +69,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: api_key = client.inference.api_keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_api_key_uuid='"12345678-1234-1234-1234-123456789012"', @@ -79,7 +79,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -91,7 +91,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -105,7 +105,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): client.inference.api_keys.with_raw_response.update( path_api_key_uuid="", @@ -113,13 +113,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: api_key = client.inference.api_keys.list() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: api_key = client.inference.api_keys.list( page=0, per_page=0, @@ -128,7 +128,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.list() assert response.is_closed is True @@ -138,7 +138,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -150,7 +150,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: api_key = client.inference.api_keys.delete( "api_key_uuid", ) @@ -158,7 +158,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.delete( "api_key_uuid", ) @@ -170,7 +170,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -184,7 +184,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.inference.api_keys.with_raw_response.delete( "", @@ -192,7 +192,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_regenerate(self, client: GradientAI) -> None: + def test_method_update_regenerate(self, client: Gradient) -> None: api_key = client.inference.api_keys.update_regenerate( "api_key_uuid", ) @@ -200,7 +200,7 @@ def test_method_update_regenerate(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update_regenerate(self, client: GradientAI) -> None: + def test_raw_response_update_regenerate(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.update_regenerate( "api_key_uuid", ) @@ -212,7 +212,7 @@ def test_raw_response_update_regenerate(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update_regenerate(self, client: GradientAI) -> None: + def test_streaming_response_update_regenerate(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.update_regenerate( "api_key_uuid", ) as response: @@ -226,7 +226,7 @@ def test_streaming_response_update_regenerate(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update_regenerate(self, client: GradientAI) -> None: + def test_path_params_update_regenerate(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.inference.api_keys.with_raw_response.update_regenerate( "", @@ -240,13 +240,13 @@ class TestAsyncAPIKeys: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.create() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.create( name="Production Key", ) @@ -254,7 +254,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.create() assert response.is_closed is True @@ -264,7 +264,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -276,7 +276,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -284,7 +284,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_api_key_uuid='"12345678-1234-1234-1234-123456789012"', @@ -294,7 +294,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -306,7 +306,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -320,7 +320,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): await async_client.inference.api_keys.with_raw_response.update( path_api_key_uuid="", @@ -328,13 +328,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.list() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.list( page=0, per_page=0, @@ -343,7 +343,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.list() assert response.is_closed is True @@ -353,7 +353,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -365,7 +365,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.delete( "api_key_uuid", ) @@ -373,7 +373,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.delete( "api_key_uuid", ) @@ -385,7 +385,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -399,7 +399,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.inference.api_keys.with_raw_response.delete( "", @@ -407,7 +407,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_regenerate(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_regenerate(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.update_regenerate( "api_key_uuid", ) @@ -415,7 +415,7 @@ async def test_method_update_regenerate(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_raw_response_update_regenerate(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update_regenerate(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.update_regenerate( "api_key_uuid", ) @@ -427,7 +427,7 @@ async def test_raw_response_update_regenerate(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_update_regenerate(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update_regenerate(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.update_regenerate( "api_key_uuid", ) as response: @@ -441,7 +441,7 @@ async def test_streaming_response_update_regenerate(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_path_params_update_regenerate(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update_regenerate(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.inference.api_keys.with_raw_response.update_regenerate( "", diff --git a/tests/api_resources/knowledge_bases/test_data_sources.py b/tests/api_resources/knowledge_bases/test_data_sources.py index ebb0841a..a5734cea 100644 --- a/tests/api_resources/knowledge_bases/test_data_sources.py +++ b/tests/api_resources/knowledge_bases/test_data_sources.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.knowledge_bases import ( +from gradient.types.knowledge_bases import ( DataSourceListResponse, DataSourceCreateResponse, DataSourceDeleteResponse, @@ -23,7 +23,7 @@ class TestDataSources: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.create( path_knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -31,7 +31,7 @@ def test_method_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.create( path_knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', aws_data_source={ @@ -57,7 +57,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.knowledge_bases.data_sources.with_raw_response.create( path_knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -69,7 +69,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.knowledge_bases.data_sources.with_streaming_response.create( path_knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -83,7 +83,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_create(self, client: GradientAI) -> None: + def test_path_params_create(self, client: Gradient) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_knowledge_base_uuid` but received ''" ): @@ -93,7 +93,7 @@ def test_path_params_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.list( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -101,7 +101,7 @@ def test_method_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.list( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -111,7 +111,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.knowledge_bases.data_sources.with_raw_response.list( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -123,7 +123,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.knowledge_bases.data_sources.with_streaming_response.list( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -137,7 +137,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list(self, client: GradientAI) -> None: + def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): client.knowledge_bases.data_sources.with_raw_response.list( knowledge_base_uuid="", @@ -145,7 +145,7 @@ def test_path_params_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.delete( data_source_uuid='"123e4567-e89b-12d3-a456-426614174000"', knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -154,7 +154,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.knowledge_bases.data_sources.with_raw_response.delete( data_source_uuid='"123e4567-e89b-12d3-a456-426614174000"', knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -167,7 +167,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.knowledge_bases.data_sources.with_streaming_response.delete( data_source_uuid='"123e4567-e89b-12d3-a456-426614174000"', knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -182,7 +182,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): client.knowledge_bases.data_sources.with_raw_response.delete( data_source_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -203,7 +203,7 @@ class TestAsyncDataSources: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.create( path_knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -211,7 +211,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.create( path_knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', aws_data_source={ @@ -237,7 +237,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.create( path_knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -249,7 +249,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.create( path_knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -263,7 +263,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_create(self, async_client: AsyncGradient) -> None: with pytest.raises( ValueError, match=r"Expected a non-empty value for `path_knowledge_base_uuid` but received ''" ): @@ -273,7 +273,7 @@ async def test_path_params_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.list( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -281,7 +281,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.list( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -291,7 +291,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.list( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -303,7 +303,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.list( knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -317,7 +317,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): await async_client.knowledge_bases.data_sources.with_raw_response.list( knowledge_base_uuid="", @@ -325,7 +325,7 @@ async def test_path_params_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.delete( data_source_uuid='"123e4567-e89b-12d3-a456-426614174000"', knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -334,7 +334,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.delete( data_source_uuid='"123e4567-e89b-12d3-a456-426614174000"', knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -347,7 +347,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.delete( data_source_uuid='"123e4567-e89b-12d3-a456-426614174000"', knowledge_base_uuid='"123e4567-e89b-12d3-a456-426614174000"', @@ -362,7 +362,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): await async_client.knowledge_bases.data_sources.with_raw_response.delete( data_source_uuid='"123e4567-e89b-12d3-a456-426614174000"', diff --git a/tests/api_resources/knowledge_bases/test_indexing_jobs.py b/tests/api_resources/knowledge_bases/test_indexing_jobs.py index b0185941..231b22af 100644 --- a/tests/api_resources/knowledge_bases/test_indexing_jobs.py +++ b/tests/api_resources/knowledge_bases/test_indexing_jobs.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.knowledge_bases import ( +from gradient.types.knowledge_bases import ( IndexingJobListResponse, IndexingJobCreateResponse, IndexingJobRetrieveResponse, @@ -25,13 +25,13 @@ class TestIndexingJobs: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.create() assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.create( data_source_uuids=["example string"], knowledge_base_uuid='"12345678-1234-1234-1234-123456789012"', @@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.create() assert response.is_closed is True @@ -50,7 +50,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -62,7 +62,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.retrieve( "uuid", ) @@ -70,7 +70,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.retrieve( "uuid", ) @@ -82,7 +82,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve( "uuid", ) as response: @@ -96,7 +96,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.knowledge_bases.indexing_jobs.with_raw_response.retrieve( "", @@ -104,13 +104,13 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.list() assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.list( page=0, per_page=0, @@ -119,7 +119,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.list() assert response.is_closed is True @@ -129,7 +129,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -141,7 +141,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_data_sources(self, client: GradientAI) -> None: + def test_method_retrieve_data_sources(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.retrieve_data_sources( "indexing_job_uuid", ) @@ -149,7 +149,7 @@ def test_method_retrieve_data_sources(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve_data_sources(self, client: GradientAI) -> None: + def test_raw_response_retrieve_data_sources(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_data_sources( "indexing_job_uuid", ) @@ -161,7 +161,7 @@ def test_raw_response_retrieve_data_sources(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve_data_sources(self, client: GradientAI) -> None: + def test_streaming_response_retrieve_data_sources(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve_data_sources( "indexing_job_uuid", ) as response: @@ -175,7 +175,7 @@ def test_streaming_response_retrieve_data_sources(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_path_params_retrieve_data_sources(self, client: GradientAI) -> None: + def test_path_params_retrieve_data_sources(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"): client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_data_sources( "", @@ -183,7 +183,7 @@ def test_path_params_retrieve_data_sources(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_cancel(self, client: GradientAI) -> None: + def test_method_update_cancel(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.update_cancel( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -191,7 +191,7 @@ def test_method_update_cancel(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_cancel_with_all_params(self, client: GradientAI) -> None: + def test_method_update_cancel_with_all_params(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.update_cancel( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_uuid='"12345678-1234-1234-1234-123456789012"', @@ -200,7 +200,7 @@ def test_method_update_cancel_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update_cancel(self, client: GradientAI) -> None: + def test_raw_response_update_cancel(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.update_cancel( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -212,7 +212,7 @@ def test_raw_response_update_cancel(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update_cancel(self, client: GradientAI) -> None: + def test_streaming_response_update_cancel(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.update_cancel( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -226,7 +226,7 @@ def test_streaming_response_update_cancel(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update_cancel(self, client: GradientAI) -> None: + def test_path_params_update_cancel(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.knowledge_bases.indexing_jobs.with_raw_response.update_cancel( path_uuid="", @@ -240,13 +240,13 @@ class TestAsyncIndexingJobs: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.create() assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.create( data_source_uuids=["example string"], knowledge_base_uuid='"12345678-1234-1234-1234-123456789012"', @@ -255,7 +255,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.create() assert response.is_closed is True @@ -265,7 +265,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -277,7 +277,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.retrieve( "uuid", ) @@ -285,7 +285,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve( "uuid", ) @@ -297,7 +297,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve( "uuid", ) as response: @@ -311,7 +311,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve( "", @@ -319,13 +319,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.list() assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.list( page=0, per_page=0, @@ -334,7 +334,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.list() assert response.is_closed is True @@ -344,7 +344,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -356,7 +356,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_retrieve_data_sources(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve_data_sources(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.retrieve_data_sources( "indexing_job_uuid", ) @@ -364,7 +364,7 @@ async def test_method_retrieve_data_sources(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve_data_sources(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve_data_sources(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_data_sources( "indexing_job_uuid", ) @@ -376,7 +376,7 @@ async def test_raw_response_retrieve_data_sources(self, async_client: AsyncGradi @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve_data_sources(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve_data_sources(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve_data_sources( "indexing_job_uuid", ) as response: @@ -390,7 +390,7 @@ async def test_streaming_response_retrieve_data_sources(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_path_params_retrieve_data_sources(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve_data_sources(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"): await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_data_sources( "", @@ -398,7 +398,7 @@ async def test_path_params_retrieve_data_sources(self, async_client: AsyncGradie @pytest.mark.skip() @parametrize - async def test_method_update_cancel(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_cancel(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.update_cancel( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -406,7 +406,7 @@ async def test_method_update_cancel(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update_cancel_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_cancel_with_all_params(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.update_cancel( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_uuid='"12345678-1234-1234-1234-123456789012"', @@ -415,7 +415,7 @@ async def test_method_update_cancel_with_all_params(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_raw_response_update_cancel(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update_cancel(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.update_cancel( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -427,7 +427,7 @@ async def test_raw_response_update_cancel(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_streaming_response_update_cancel(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update_cancel(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.update_cancel( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -441,7 +441,7 @@ async def test_streaming_response_update_cancel(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_path_params_update_cancel(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update_cancel(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.knowledge_bases.indexing_jobs.with_raw_response.update_cancel( path_uuid="", diff --git a/tests/api_resources/models/providers/test_anthropic.py b/tests/api_resources/models/providers/test_anthropic.py index 6b3d99a3..5bb7a1e9 100644 --- a/tests/api_resources/models/providers/test_anthropic.py +++ b/tests/api_resources/models/providers/test_anthropic.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.models.providers import ( +from gradient.types.models.providers import ( AnthropicListResponse, AnthropicCreateResponse, AnthropicDeleteResponse, @@ -26,13 +26,13 @@ class TestAnthropic: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.create() assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.create( api_key='"sk-ant-12345678901234567890123456789012"', name='"Production Key"', @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.create() assert response.is_closed is True @@ -51,7 +51,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.retrieve( "api_key_uuid", ) @@ -71,7 +71,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.retrieve( "api_key_uuid", ) @@ -83,7 +83,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -97,7 +97,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.models.providers.anthropic.with_raw_response.retrieve( "", @@ -105,7 +105,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -113,7 +113,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', api_key='"sk-ant-12345678901234567890123456789012"', @@ -124,7 +124,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -136,7 +136,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -150,7 +150,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): client.models.providers.anthropic.with_raw_response.update( path_api_key_uuid="", @@ -158,13 +158,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.list() assert_matches_type(AnthropicListResponse, anthropic, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.list( page=0, per_page=0, @@ -173,7 +173,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.list() assert response.is_closed is True @@ -183,7 +183,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -195,7 +195,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.delete( "api_key_uuid", ) @@ -203,7 +203,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.delete( "api_key_uuid", ) @@ -215,7 +215,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -229,7 +229,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.models.providers.anthropic.with_raw_response.delete( "", @@ -237,7 +237,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_agents(self, client: GradientAI) -> None: + def test_method_list_agents(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -245,7 +245,7 @@ def test_method_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_agents_with_all_params(self, client: GradientAI) -> None: + def test_method_list_agents_with_all_params(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -255,7 +255,7 @@ def test_method_list_agents_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_agents(self, client: GradientAI) -> None: + def test_raw_response_list_agents(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -267,7 +267,7 @@ def test_raw_response_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_agents(self, client: GradientAI) -> None: + def test_streaming_response_list_agents(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -281,7 +281,7 @@ def test_streaming_response_list_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_list_agents(self, client: GradientAI) -> None: + def test_path_params_list_agents(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.models.providers.anthropic.with_raw_response.list_agents( uuid="", @@ -295,13 +295,13 @@ class TestAsyncAnthropic: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.create() assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.create( api_key='"sk-ant-12345678901234567890123456789012"', name='"Production Key"', @@ -310,7 +310,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.create() assert response.is_closed is True @@ -320,7 +320,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -332,7 +332,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.retrieve( "api_key_uuid", ) @@ -340,7 +340,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.retrieve( "api_key_uuid", ) @@ -352,7 +352,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -366,7 +366,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.models.providers.anthropic.with_raw_response.retrieve( "", @@ -374,7 +374,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -382,7 +382,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', api_key='"sk-ant-12345678901234567890123456789012"', @@ -393,7 +393,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -405,7 +405,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -419,7 +419,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): await async_client.models.providers.anthropic.with_raw_response.update( path_api_key_uuid="", @@ -427,13 +427,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.list() assert_matches_type(AnthropicListResponse, anthropic, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.list( page=0, per_page=0, @@ -442,7 +442,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.list() assert response.is_closed is True @@ -452,7 +452,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -464,7 +464,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.delete( "api_key_uuid", ) @@ -472,7 +472,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.delete( "api_key_uuid", ) @@ -484,7 +484,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -498,7 +498,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.models.providers.anthropic.with_raw_response.delete( "", @@ -506,7 +506,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_agents(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -514,7 +514,7 @@ async def test_method_list_agents(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_agents_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -524,7 +524,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi @pytest.mark.skip() @parametrize - async def test_raw_response_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -536,7 +536,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.list_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -550,7 +550,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_path_params_list_agents(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.models.providers.anthropic.with_raw_response.list_agents( uuid="", diff --git a/tests/api_resources/models/providers/test_openai.py b/tests/api_resources/models/providers/test_openai.py index bdde97ca..ed2cfc8e 100644 --- a/tests/api_resources/models/providers/test_openai.py +++ b/tests/api_resources/models/providers/test_openai.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types.models.providers import ( +from gradient.types.models.providers import ( OpenAIListResponse, OpenAICreateResponse, OpenAIDeleteResponse, @@ -26,13 +26,13 @@ class TestOpenAI: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: openai = client.models.providers.openai.create() assert_matches_type(OpenAICreateResponse, openai, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: openai = client.models.providers.openai.create( api_key='"sk-proj--123456789098765432123456789"', name='"Production Key"', @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.create() assert response.is_closed is True @@ -51,7 +51,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: openai = client.models.providers.openai.retrieve( "api_key_uuid", ) @@ -71,7 +71,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.retrieve( "api_key_uuid", ) @@ -83,7 +83,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -97,7 +97,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.models.providers.openai.with_raw_response.retrieve( "", @@ -105,7 +105,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: openai = client.models.providers.openai.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -113,7 +113,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: openai = client.models.providers.openai.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', api_key='"sk-ant-12345678901234567890123456789012"', @@ -124,7 +124,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -136,7 +136,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -150,7 +150,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): client.models.providers.openai.with_raw_response.update( path_api_key_uuid="", @@ -158,13 +158,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: openai = client.models.providers.openai.list() assert_matches_type(OpenAIListResponse, openai, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: openai = client.models.providers.openai.list( page=0, per_page=0, @@ -173,7 +173,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.list() assert response.is_closed is True @@ -183,7 +183,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -195,7 +195,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: openai = client.models.providers.openai.delete( "api_key_uuid", ) @@ -203,7 +203,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.delete( "api_key_uuid", ) @@ -215,7 +215,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -229,7 +229,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): client.models.providers.openai.with_raw_response.delete( "", @@ -237,7 +237,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_agents(self, client: GradientAI) -> None: + def test_method_retrieve_agents(self, client: Gradient) -> None: openai = client.models.providers.openai.retrieve_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -245,7 +245,7 @@ def test_method_retrieve_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve_agents_with_all_params(self, client: GradientAI) -> None: + def test_method_retrieve_agents_with_all_params(self, client: Gradient) -> None: openai = client.models.providers.openai.retrieve_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -255,7 +255,7 @@ def test_method_retrieve_agents_with_all_params(self, client: GradientAI) -> Non @pytest.mark.skip() @parametrize - def test_raw_response_retrieve_agents(self, client: GradientAI) -> None: + def test_raw_response_retrieve_agents(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.retrieve_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -267,7 +267,7 @@ def test_raw_response_retrieve_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve_agents(self, client: GradientAI) -> None: + def test_streaming_response_retrieve_agents(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.retrieve_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -281,7 +281,7 @@ def test_streaming_response_retrieve_agents(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve_agents(self, client: GradientAI) -> None: + def test_path_params_retrieve_agents(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.models.providers.openai.with_raw_response.retrieve_agents( uuid="", @@ -295,13 +295,13 @@ class TestAsyncOpenAI: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.create() assert_matches_type(OpenAICreateResponse, openai, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.create( api_key='"sk-proj--123456789098765432123456789"', name='"Production Key"', @@ -310,7 +310,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.create() assert response.is_closed is True @@ -320,7 +320,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -332,7 +332,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.retrieve( "api_key_uuid", ) @@ -340,7 +340,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.retrieve( "api_key_uuid", ) @@ -352,7 +352,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.retrieve( "api_key_uuid", ) as response: @@ -366,7 +366,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.models.providers.openai.with_raw_response.retrieve( "", @@ -374,7 +374,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -382,7 +382,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', api_key='"sk-ant-12345678901234567890123456789012"', @@ -393,7 +393,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -405,7 +405,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.update( path_api_key_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -419,7 +419,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): await async_client.models.providers.openai.with_raw_response.update( path_api_key_uuid="", @@ -427,13 +427,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.list() assert_matches_type(OpenAIListResponse, openai, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.list( page=0, per_page=0, @@ -442,7 +442,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.list() assert response.is_closed is True @@ -452,7 +452,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -464,7 +464,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.delete( "api_key_uuid", ) @@ -472,7 +472,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.delete( "api_key_uuid", ) @@ -484,7 +484,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.delete( "api_key_uuid", ) as response: @@ -498,7 +498,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): await async_client.models.providers.openai.with_raw_response.delete( "", @@ -506,7 +506,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_retrieve_agents(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve_agents(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.retrieve_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -514,7 +514,7 @@ async def test_method_retrieve_agents(self, async_client: AsyncGradientAI) -> No @pytest.mark.skip() @parametrize - async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.retrieve_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', page=0, @@ -524,7 +524,7 @@ async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncG @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve_agents(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve_agents(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.retrieve_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -536,7 +536,7 @@ async def test_raw_response_retrieve_agents(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.retrieve_agents( uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -550,7 +550,7 @@ async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradi @pytest.mark.skip() @parametrize - async def test_path_params_retrieve_agents(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve_agents(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.models.providers.openai.with_raw_response.retrieve_agents( uuid="", diff --git a/tests/api_resources/test_agents.py b/tests/api_resources/test_agents.py index 2f68a06f..987f2eda 100644 --- a/tests/api_resources/test_agents.py +++ b/tests/api_resources/test_agents.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types import ( +from gradient.types import ( AgentListResponse, AgentCreateResponse, AgentDeleteResponse, @@ -26,13 +26,13 @@ class TestAgents: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: agent = client.agents.create() assert_matches_type(AgentCreateResponse, agent, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: agent = client.agents.create( anthropic_key_uuid='"12345678-1234-1234-1234-123456789012"', description='"My Agent Description"', @@ -49,7 +49,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.with_raw_response.create() assert response.is_closed is True @@ -59,7 +59,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -71,7 +71,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: agent = client.agents.retrieve( "uuid", ) @@ -79,7 +79,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.with_raw_response.retrieve( "uuid", ) @@ -91,7 +91,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.with_streaming_response.retrieve( "uuid", ) as response: @@ -105,7 +105,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.with_raw_response.retrieve( "", @@ -113,7 +113,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: agent = client.agents.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -121,7 +121,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: agent = client.agents.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', anthropic_key_uuid='"12345678-1234-1234-1234-123456789012"', @@ -145,7 +145,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.with_raw_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -157,7 +157,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.with_streaming_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -171,7 +171,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.agents.with_raw_response.update( path_uuid="", @@ -179,13 +179,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: agent = client.agents.list() assert_matches_type(AgentListResponse, agent, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: agent = client.agents.list( only_deployed=True, page=0, @@ -195,7 +195,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.with_raw_response.list() assert response.is_closed is True @@ -205,7 +205,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -217,7 +217,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: agent = client.agents.delete( "uuid", ) @@ -225,7 +225,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.with_raw_response.delete( "uuid", ) @@ -237,7 +237,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.with_streaming_response.delete( "uuid", ) as response: @@ -251,7 +251,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.agents.with_raw_response.delete( "", @@ -259,7 +259,7 @@ def test_path_params_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_status(self, client: GradientAI) -> None: + def test_method_update_status(self, client: Gradient) -> None: agent = client.agents.update_status( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -267,7 +267,7 @@ def test_method_update_status(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_status_with_all_params(self, client: GradientAI) -> None: + def test_method_update_status_with_all_params(self, client: Gradient) -> None: agent = client.agents.update_status( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_uuid='"12345678-1234-1234-1234-123456789012"', @@ -277,7 +277,7 @@ def test_method_update_status_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update_status(self, client: GradientAI) -> None: + def test_raw_response_update_status(self, client: Gradient) -> None: response = client.agents.with_raw_response.update_status( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -289,7 +289,7 @@ def test_raw_response_update_status(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update_status(self, client: GradientAI) -> None: + def test_streaming_response_update_status(self, client: Gradient) -> None: with client.agents.with_streaming_response.update_status( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -303,7 +303,7 @@ def test_streaming_response_update_status(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update_status(self, client: GradientAI) -> None: + def test_path_params_update_status(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.agents.with_raw_response.update_status( path_uuid="", @@ -317,13 +317,13 @@ class TestAsyncAgents: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.create() assert_matches_type(AgentCreateResponse, agent, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.create( anthropic_key_uuid='"12345678-1234-1234-1234-123456789012"', description='"My Agent Description"', @@ -340,7 +340,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.create() assert response.is_closed is True @@ -350,7 +350,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -362,7 +362,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.retrieve( "uuid", ) @@ -370,7 +370,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.retrieve( "uuid", ) @@ -382,7 +382,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.retrieve( "uuid", ) as response: @@ -396,7 +396,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.with_raw_response.retrieve( "", @@ -404,7 +404,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -412,7 +412,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', anthropic_key_uuid='"12345678-1234-1234-1234-123456789012"', @@ -436,7 +436,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -448,7 +448,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -462,7 +462,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.agents.with_raw_response.update( path_uuid="", @@ -470,13 +470,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.list() assert_matches_type(AgentListResponse, agent, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.list( only_deployed=True, page=0, @@ -486,7 +486,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.list() assert response.is_closed is True @@ -496,7 +496,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -508,7 +508,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.delete( "uuid", ) @@ -516,7 +516,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.delete( "uuid", ) @@ -528,7 +528,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.delete( "uuid", ) as response: @@ -542,7 +542,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.agents.with_raw_response.delete( "", @@ -550,7 +550,7 @@ async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_status(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_status(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.update_status( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -558,7 +558,7 @@ async def test_method_update_status(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update_status_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_status_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.update_status( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', body_uuid='"12345678-1234-1234-1234-123456789012"', @@ -568,7 +568,7 @@ async def test_method_update_status_with_all_params(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_raw_response_update_status(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update_status(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.update_status( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -580,7 +580,7 @@ async def test_raw_response_update_status(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_streaming_response_update_status(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update_status(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.update_status( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -594,7 +594,7 @@ async def test_streaming_response_update_status(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_path_params_update_status(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update_status(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.agents.with_raw_response.update_status( path_uuid="", diff --git a/tests/api_resources/test_gpu_droplets.py b/tests/api_resources/test_gpu_droplets.py index cbc7e63b..485fd5f9 100644 --- a/tests/api_resources/test_gpu_droplets.py +++ b/tests/api_resources/test_gpu_droplets.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types import ( +from gradient.types import ( GPUDropletListResponse, GPUDropletCreateResponse, GPUDropletRetrieveResponse, @@ -27,7 +27,7 @@ class TestGPUDroplets: @pytest.mark.skip() @parametrize - def test_method_create_overload_1(self, client: GradientAI) -> None: + def test_method_create_overload_1(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.create( image="ubuntu-20-04-x64", name="example.com", @@ -37,7 +37,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.create( image="ubuntu-20-04-x64", name="example.com", @@ -63,7 +63,7 @@ def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_1(self, client: GradientAI) -> None: + def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.create( image="ubuntu-20-04-x64", name="example.com", @@ -77,7 +77,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.create( image="ubuntu-20-04-x64", name="example.com", @@ -93,7 +93,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_overload_2(self, client: GradientAI) -> None: + def test_method_create_overload_2(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.create( image="ubuntu-20-04-x64", names=["sub-01.example.com", "sub-02.example.com"], @@ -103,7 +103,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None: + def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.create( image="ubuntu-20-04-x64", names=["sub-01.example.com", "sub-02.example.com"], @@ -129,7 +129,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N @pytest.mark.skip() @parametrize - def test_raw_response_create_overload_2(self, client: GradientAI) -> None: + def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.create( image="ubuntu-20-04-x64", names=["sub-01.example.com", "sub-02.example.com"], @@ -143,7 +143,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: + def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.create( image="ubuntu-20-04-x64", names=["sub-01.example.com", "sub-02.example.com"], @@ -159,7 +159,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.retrieve( 1, ) @@ -167,7 +167,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.retrieve( 1, ) @@ -179,7 +179,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.retrieve( 1, ) as response: @@ -193,13 +193,13 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list() assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list( name="name", page=1, @@ -211,7 +211,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list() assert response.is_closed is True @@ -221,7 +221,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -233,7 +233,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.delete( 1, ) @@ -241,7 +241,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.delete( 1, ) @@ -253,7 +253,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.delete( 1, ) as response: @@ -267,7 +267,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete_by_tag(self, client: GradientAI) -> None: + def test_method_delete_by_tag(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.delete_by_tag( tag_name="tag_name", ) @@ -275,7 +275,7 @@ def test_method_delete_by_tag(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete_by_tag(self, client: GradientAI) -> None: + def test_raw_response_delete_by_tag(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.delete_by_tag( tag_name="tag_name", ) @@ -287,7 +287,7 @@ def test_raw_response_delete_by_tag(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete_by_tag(self, client: GradientAI) -> None: + def test_streaming_response_delete_by_tag(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.delete_by_tag( tag_name="tag_name", ) as response: @@ -301,7 +301,7 @@ def test_streaming_response_delete_by_tag(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_firewalls(self, client: GradientAI) -> None: + def test_method_list_firewalls(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_firewalls( droplet_id=3164444, ) @@ -309,7 +309,7 @@ def test_method_list_firewalls(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_firewalls_with_all_params(self, client: GradientAI) -> None: + def test_method_list_firewalls_with_all_params(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_firewalls( droplet_id=3164444, page=1, @@ -319,7 +319,7 @@ def test_method_list_firewalls_with_all_params(self, client: GradientAI) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list_firewalls(self, client: GradientAI) -> None: + def test_raw_response_list_firewalls(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list_firewalls( droplet_id=3164444, ) @@ -331,7 +331,7 @@ def test_raw_response_list_firewalls(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_firewalls(self, client: GradientAI) -> None: + def test_streaming_response_list_firewalls(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list_firewalls( droplet_id=3164444, ) as response: @@ -345,7 +345,7 @@ def test_streaming_response_list_firewalls(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_kernels(self, client: GradientAI) -> None: + def test_method_list_kernels(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_kernels( droplet_id=3164444, ) @@ -353,7 +353,7 @@ def test_method_list_kernels(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_kernels_with_all_params(self, client: GradientAI) -> None: + def test_method_list_kernels_with_all_params(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_kernels( droplet_id=3164444, page=1, @@ -363,7 +363,7 @@ def test_method_list_kernels_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_kernels(self, client: GradientAI) -> None: + def test_raw_response_list_kernels(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list_kernels( droplet_id=3164444, ) @@ -375,7 +375,7 @@ def test_raw_response_list_kernels(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_kernels(self, client: GradientAI) -> None: + def test_streaming_response_list_kernels(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list_kernels( droplet_id=3164444, ) as response: @@ -389,7 +389,7 @@ def test_streaming_response_list_kernels(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_neighbors(self, client: GradientAI) -> None: + def test_method_list_neighbors(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_neighbors( 1, ) @@ -397,7 +397,7 @@ def test_method_list_neighbors(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list_neighbors(self, client: GradientAI) -> None: + def test_raw_response_list_neighbors(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list_neighbors( 1, ) @@ -409,7 +409,7 @@ def test_raw_response_list_neighbors(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_neighbors(self, client: GradientAI) -> None: + def test_streaming_response_list_neighbors(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list_neighbors( 1, ) as response: @@ -423,7 +423,7 @@ def test_streaming_response_list_neighbors(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_snapshots(self, client: GradientAI) -> None: + def test_method_list_snapshots(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_snapshots( droplet_id=3164444, ) @@ -431,7 +431,7 @@ def test_method_list_snapshots(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list_snapshots_with_all_params(self, client: GradientAI) -> None: + def test_method_list_snapshots_with_all_params(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_snapshots( droplet_id=3164444, page=1, @@ -441,7 +441,7 @@ def test_method_list_snapshots_with_all_params(self, client: GradientAI) -> None @pytest.mark.skip() @parametrize - def test_raw_response_list_snapshots(self, client: GradientAI) -> None: + def test_raw_response_list_snapshots(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list_snapshots( droplet_id=3164444, ) @@ -453,7 +453,7 @@ def test_raw_response_list_snapshots(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list_snapshots(self, client: GradientAI) -> None: + def test_streaming_response_list_snapshots(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list_snapshots( droplet_id=3164444, ) as response: @@ -473,7 +473,7 @@ class TestAsyncGPUDroplets: @pytest.mark.skip() @parametrize - async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.create( image="ubuntu-20-04-x64", name="example.com", @@ -483,7 +483,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.create( image="ubuntu-20-04-x64", name="example.com", @@ -509,7 +509,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.create( image="ubuntu-20-04-x64", name="example.com", @@ -523,7 +523,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.create( image="ubuntu-20-04-x64", name="example.com", @@ -539,7 +539,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.create( image="ubuntu-20-04-x64", names=["sub-01.example.com", "sub-02.example.com"], @@ -549,7 +549,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.create( image="ubuntu-20-04-x64", names=["sub-01.example.com", "sub-02.example.com"], @@ -575,7 +575,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn @pytest.mark.skip() @parametrize - async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.create( image="ubuntu-20-04-x64", names=["sub-01.example.com", "sub-02.example.com"], @@ -589,7 +589,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA @pytest.mark.skip() @parametrize - async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.create( image="ubuntu-20-04-x64", names=["sub-01.example.com", "sub-02.example.com"], @@ -605,7 +605,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.retrieve( 1, ) @@ -613,7 +613,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.retrieve( 1, ) @@ -625,7 +625,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.retrieve( 1, ) as response: @@ -639,13 +639,13 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list() assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list( name="name", page=1, @@ -657,7 +657,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list() assert response.is_closed is True @@ -667,7 +667,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -679,7 +679,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.delete( 1, ) @@ -687,7 +687,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.delete( 1, ) @@ -699,7 +699,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.delete( 1, ) as response: @@ -713,7 +713,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_delete_by_tag(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete_by_tag(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.delete_by_tag( tag_name="tag_name", ) @@ -721,7 +721,7 @@ async def test_method_delete_by_tag(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_raw_response_delete_by_tag(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete_by_tag(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.delete_by_tag( tag_name="tag_name", ) @@ -733,7 +733,7 @@ async def test_raw_response_delete_by_tag(self, async_client: AsyncGradientAI) - @pytest.mark.skip() @parametrize - async def test_streaming_response_delete_by_tag(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete_by_tag(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.delete_by_tag( tag_name="tag_name", ) as response: @@ -747,7 +747,7 @@ async def test_streaming_response_delete_by_tag(self, async_client: AsyncGradien @pytest.mark.skip() @parametrize - async def test_method_list_firewalls(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_firewalls(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_firewalls( droplet_id=3164444, ) @@ -755,7 +755,7 @@ async def test_method_list_firewalls(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_method_list_firewalls_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_firewalls_with_all_params(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_firewalls( droplet_id=3164444, page=1, @@ -765,7 +765,7 @@ async def test_method_list_firewalls_with_all_params(self, async_client: AsyncGr @pytest.mark.skip() @parametrize - async def test_raw_response_list_firewalls(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_firewalls(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list_firewalls( droplet_id=3164444, ) @@ -777,7 +777,7 @@ async def test_raw_response_list_firewalls(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_streaming_response_list_firewalls(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_firewalls(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list_firewalls( droplet_id=3164444, ) as response: @@ -791,7 +791,7 @@ async def test_streaming_response_list_firewalls(self, async_client: AsyncGradie @pytest.mark.skip() @parametrize - async def test_method_list_kernels(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_kernels(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_kernels( droplet_id=3164444, ) @@ -799,7 +799,7 @@ async def test_method_list_kernels(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list_kernels_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_kernels_with_all_params(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_kernels( droplet_id=3164444, page=1, @@ -809,7 +809,7 @@ async def test_method_list_kernels_with_all_params(self, async_client: AsyncGrad @pytest.mark.skip() @parametrize - async def test_raw_response_list_kernels(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_kernels(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list_kernels( droplet_id=3164444, ) @@ -821,7 +821,7 @@ async def test_raw_response_list_kernels(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_streaming_response_list_kernels(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_kernels(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list_kernels( droplet_id=3164444, ) as response: @@ -835,7 +835,7 @@ async def test_streaming_response_list_kernels(self, async_client: AsyncGradient @pytest.mark.skip() @parametrize - async def test_method_list_neighbors(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_neighbors(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_neighbors( 1, ) @@ -843,7 +843,7 @@ async def test_method_list_neighbors(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_raw_response_list_neighbors(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_neighbors(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list_neighbors( 1, ) @@ -855,7 +855,7 @@ async def test_raw_response_list_neighbors(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_streaming_response_list_neighbors(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_neighbors(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list_neighbors( 1, ) as response: @@ -869,7 +869,7 @@ async def test_streaming_response_list_neighbors(self, async_client: AsyncGradie @pytest.mark.skip() @parametrize - async def test_method_list_snapshots(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_snapshots(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_snapshots( droplet_id=3164444, ) @@ -877,7 +877,7 @@ async def test_method_list_snapshots(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_method_list_snapshots_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_snapshots_with_all_params(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_snapshots( droplet_id=3164444, page=1, @@ -887,7 +887,7 @@ async def test_method_list_snapshots_with_all_params(self, async_client: AsyncGr @pytest.mark.skip() @parametrize - async def test_raw_response_list_snapshots(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list_snapshots(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list_snapshots( droplet_id=3164444, ) @@ -899,7 +899,7 @@ async def test_raw_response_list_snapshots(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_streaming_response_list_snapshots(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list_snapshots(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list_snapshots( droplet_id=3164444, ) as response: diff --git a/tests/api_resources/test_knowledge_bases.py b/tests/api_resources/test_knowledge_bases.py index c4d179cc..1628fdbe 100644 --- a/tests/api_resources/test_knowledge_bases.py +++ b/tests/api_resources/test_knowledge_bases.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types import ( +from gradient.types import ( KnowledgeBaseListResponse, KnowledgeBaseCreateResponse, KnowledgeBaseDeleteResponse, @@ -25,13 +25,13 @@ class TestKnowledgeBases: @pytest.mark.skip() @parametrize - def test_method_create(self, client: GradientAI) -> None: + def test_method_create(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.create() assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_create_with_all_params(self, client: GradientAI) -> None: + def test_method_create_with_all_params(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.create( database_id='"12345678-1234-1234-1234-123456789012"', datasources=[ @@ -74,7 +74,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_create(self, client: GradientAI) -> None: + def test_raw_response_create(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.create() assert response.is_closed is True @@ -84,7 +84,7 @@ def test_raw_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_create(self, client: GradientAI) -> None: + def test_streaming_response_create(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -96,7 +96,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_retrieve(self, client: GradientAI) -> None: + def test_method_retrieve(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.retrieve( "uuid", ) @@ -104,7 +104,7 @@ def test_method_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_retrieve(self, client: GradientAI) -> None: + def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.retrieve( "uuid", ) @@ -116,7 +116,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_retrieve(self, client: GradientAI) -> None: + def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.retrieve( "uuid", ) as response: @@ -130,7 +130,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_retrieve(self, client: GradientAI) -> None: + def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.knowledge_bases.with_raw_response.retrieve( "", @@ -138,7 +138,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update(self, client: GradientAI) -> None: + def test_method_update(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -146,7 +146,7 @@ def test_method_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_update_with_all_params(self, client: GradientAI) -> None: + def test_method_update_with_all_params(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', database_id='"12345678-1234-1234-1234-123456789012"', @@ -160,7 +160,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_update(self, client: GradientAI) -> None: + def test_raw_response_update(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -172,7 +172,7 @@ def test_raw_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_update(self, client: GradientAI) -> None: + def test_streaming_response_update(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -186,7 +186,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_update(self, client: GradientAI) -> None: + def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): client.knowledge_bases.with_raw_response.update( path_uuid="", @@ -194,13 +194,13 @@ def test_path_params_update(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.list() assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.list( page=0, per_page=0, @@ -209,7 +209,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.list() assert response.is_closed is True @@ -219,7 +219,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -231,7 +231,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_method_delete(self, client: GradientAI) -> None: + def test_method_delete(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.delete( "uuid", ) @@ -239,7 +239,7 @@ def test_method_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_delete(self, client: GradientAI) -> None: + def test_raw_response_delete(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.delete( "uuid", ) @@ -251,7 +251,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_delete(self, client: GradientAI) -> None: + def test_streaming_response_delete(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.delete( "uuid", ) as response: @@ -265,7 +265,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_path_params_delete(self, client: GradientAI) -> None: + def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): client.knowledge_bases.with_raw_response.delete( "", @@ -279,13 +279,13 @@ class TestAsyncKnowledgeBases: @pytest.mark.skip() @parametrize - async def test_method_create(self, async_client: AsyncGradientAI) -> None: + async def test_method_create(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.create() assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.create( database_id='"12345678-1234-1234-1234-123456789012"', datasources=[ @@ -328,7 +328,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.create() assert response.is_closed is True @@ -338,7 +338,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -350,7 +350,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_method_retrieve(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.retrieve( "uuid", ) @@ -358,7 +358,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.retrieve( "uuid", ) @@ -370,7 +370,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non @pytest.mark.skip() @parametrize - async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.retrieve( "uuid", ) as response: @@ -384,7 +384,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.knowledge_bases.with_raw_response.retrieve( "", @@ -392,7 +392,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None @pytest.mark.skip() @parametrize - async def test_method_update(self, async_client: AsyncGradientAI) -> None: + async def test_method_update(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -400,7 +400,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', database_id='"12345678-1234-1234-1234-123456789012"', @@ -414,7 +414,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI @pytest.mark.skip() @parametrize - async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) @@ -426,7 +426,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.update( path_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) as response: @@ -440,7 +440,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): await async_client.knowledge_bases.with_raw_response.update( path_uuid="", @@ -448,13 +448,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.list() assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.list( page=0, per_page=0, @@ -463,7 +463,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.list() assert response.is_closed is True @@ -473,7 +473,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -485,7 +485,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N @pytest.mark.skip() @parametrize - async def test_method_delete(self, async_client: AsyncGradientAI) -> None: + async def test_method_delete(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.delete( "uuid", ) @@ -493,7 +493,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.delete( "uuid", ) @@ -505,7 +505,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.delete( "uuid", ) as response: @@ -519,7 +519,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> @pytest.mark.skip() @parametrize - async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None: + async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): await async_client.knowledge_bases.with_raw_response.delete( "", diff --git a/tests/api_resources/test_models.py b/tests/api_resources/test_models.py index f7e21015..7b2a5a4a 100644 --- a/tests/api_resources/test_models.py +++ b/tests/api_resources/test_models.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types import ModelListResponse +from gradient.types import ModelListResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,13 +19,13 @@ class TestModels: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: model = client.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: model = client.models.list( page=0, per_page=0, @@ -36,7 +36,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.models.with_raw_response.list() assert response.is_closed is True @@ -46,7 +46,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.models.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -64,13 +64,13 @@ class TestAsyncModels: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: model = await async_client.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: model = await async_client.models.list( page=0, per_page=0, @@ -81,7 +81,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.models.with_raw_response.list() assert response.is_closed is True @@ -91,7 +91,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.models.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/api_resources/test_regions.py b/tests/api_resources/test_regions.py index f331342e..5bf67e91 100644 --- a/tests/api_resources/test_regions.py +++ b/tests/api_resources/test_regions.py @@ -7,9 +7,9 @@ import pytest +from gradient import Gradient, AsyncGradient from tests.utils import assert_matches_type -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai.types import RegionListResponse +from gradient.types import RegionListResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") @@ -19,13 +19,13 @@ class TestRegions: @pytest.mark.skip() @parametrize - def test_method_list(self, client: GradientAI) -> None: + def test_method_list(self, client: Gradient) -> None: region = client.regions.list() assert_matches_type(RegionListResponse, region, path=["response"]) @pytest.mark.skip() @parametrize - def test_method_list_with_all_params(self, client: GradientAI) -> None: + def test_method_list_with_all_params(self, client: Gradient) -> None: region = client.regions.list( page=1, per_page=1, @@ -34,7 +34,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_raw_response_list(self, client: GradientAI) -> None: + def test_raw_response_list(self, client: Gradient) -> None: response = client.regions.with_raw_response.list() assert response.is_closed is True @@ -44,7 +44,7 @@ def test_raw_response_list(self, client: GradientAI) -> None: @pytest.mark.skip() @parametrize - def test_streaming_response_list(self, client: GradientAI) -> None: + def test_streaming_response_list(self, client: Gradient) -> None: with client.regions.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -62,13 +62,13 @@ class TestAsyncRegions: @pytest.mark.skip() @parametrize - async def test_method_list(self, async_client: AsyncGradientAI) -> None: + async def test_method_list(self, async_client: AsyncGradient) -> None: region = await async_client.regions.list() assert_matches_type(RegionListResponse, region, path=["response"]) @pytest.mark.skip() @parametrize - async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None: + async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: region = await async_client.regions.list( page=1, per_page=1, @@ -77,7 +77,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) @pytest.mark.skip() @parametrize - async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.regions.with_raw_response.list() assert response.is_closed is True @@ -87,7 +87,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None: @pytest.mark.skip() @parametrize - async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None: + async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.regions.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" diff --git a/tests/conftest.py b/tests/conftest.py index d61eb8b7..fecfc779 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,15 +10,15 @@ import pytest from pytest_asyncio import is_async_test -from do_gradientai import GradientAI, AsyncGradientAI, DefaultAioHttpClient -from do_gradientai._utils import is_dict +from gradient import Gradient, AsyncGradient, DefaultAioHttpClient +from gradient._utils import is_dict if TYPE_CHECKING: from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage] pytest.register_assert_rewrite("tests.utils") -logging.getLogger("do_gradientai").setLevel(logging.DEBUG) +logging.getLogger("gradient").setLevel(logging.DEBUG) # automatically add `pytest.mark.asyncio()` to all of our async tests @@ -52,12 +52,12 @@ def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: @pytest.fixture(scope="session") -def client(request: FixtureRequest) -> Iterator[GradientAI]: +def client(request: FixtureRequest) -> Iterator[Gradient]: strict = getattr(request, "param", True) if not isinstance(strict, bool): raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") - with GradientAI( + with Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -69,7 +69,7 @@ def client(request: FixtureRequest) -> Iterator[GradientAI]: @pytest.fixture(scope="session") -async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncGradientAI]: +async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncGradient]: param = getattr(request, "param", True) # defaults @@ -88,7 +88,7 @@ async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncGradientAI else: raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict") - async with AsyncGradientAI( + async with AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, diff --git a/tests/test_client.py b/tests/test_client.py index e88c4544..caf79355 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -21,12 +21,12 @@ from respx import MockRouter from pydantic import ValidationError -from do_gradientai import GradientAI, AsyncGradientAI, APIResponseValidationError -from do_gradientai._types import Omit -from do_gradientai._models import BaseModel, FinalRequestOptions -from do_gradientai._streaming import Stream, AsyncStream -from do_gradientai._exceptions import APIStatusError, APITimeoutError, APIResponseValidationError -from do_gradientai._base_client import ( +from gradient import Gradient, AsyncGradient, APIResponseValidationError +from gradient._types import Omit +from gradient._models import BaseModel, FinalRequestOptions +from gradient._streaming import Stream, AsyncStream +from gradient._exceptions import APIStatusError, APITimeoutError, APIResponseValidationError +from gradient._base_client import ( DEFAULT_TIMEOUT, HTTPX_DEFAULT_TIMEOUT, BaseClient, @@ -53,7 +53,7 @@ def _low_retry_timeout(*_args: Any, **_kwargs: Any) -> float: return 0.1 -def _get_open_connections(client: GradientAI | AsyncGradientAI) -> int: +def _get_open_connections(client: Gradient | AsyncGradient) -> int: transport = client._client._transport assert isinstance(transport, httpx.HTTPTransport) or isinstance(transport, httpx.AsyncHTTPTransport) @@ -61,8 +61,8 @@ def _get_open_connections(client: GradientAI | AsyncGradientAI) -> int: return len(pool._requests) -class TestGradientAI: - client = GradientAI( +class TestGradient: + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -123,7 +123,7 @@ def test_copy_default_options(self) -> None: assert isinstance(self.client.timeout, httpx.Timeout) def test_copy_default_headers(self) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -162,7 +162,7 @@ def test_copy_default_headers(self) -> None: client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) def test_copy_default_query(self) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -259,10 +259,10 @@ def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.Statistic # to_raw_response_wrapper leaks through the @functools.wraps() decorator. # # removing the decorator fixes the leak for reasons we don't understand. - "do_gradientai/_legacy_response.py", - "do_gradientai/_response.py", + "gradient/_legacy_response.py", + "gradient/_response.py", # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. - "do_gradientai/_compat.py", + "gradient/_compat.py", # Standard library leaks we don't care about. "/logging/__init__.py", ] @@ -293,7 +293,7 @@ def test_request_timeout(self) -> None: assert timeout == httpx.Timeout(100.0) def test_client_timeout_option(self) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -309,7 +309,7 @@ def test_client_timeout_option(self) -> None: def test_http_client_timeout_option(self) -> None: # custom timeout given to the httpx client should be used with httpx.Client(timeout=None) as http_client: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -324,7 +324,7 @@ def test_http_client_timeout_option(self) -> None: # no timeout given to the httpx client should not use the httpx default with httpx.Client() as http_client: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -339,7 +339,7 @@ def test_http_client_timeout_option(self) -> None: # explicitly passing the default timeout currently results in it being ignored with httpx.Client(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -355,7 +355,7 @@ def test_http_client_timeout_option(self) -> None: async def test_invalid_http_client(self) -> None: with pytest.raises(TypeError, match="Invalid `http_client` arg"): async with httpx.AsyncClient() as http_client: - GradientAI( + Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -365,7 +365,7 @@ async def test_invalid_http_client(self) -> None: ) def test_default_headers_option(self) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -377,7 +377,7 @@ def test_default_headers_option(self) -> None: assert request.headers.get("x-foo") == "bar" assert request.headers.get("x-stainless-lang") == "python" - client2 = GradientAI( + client2 = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -393,7 +393,7 @@ def test_default_headers_option(self) -> None: assert request.headers.get("x-stainless-lang") == "my-overriding-header" def test_validate_headers(self) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -403,8 +403,8 @@ def test_validate_headers(self) -> None: request = client._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("Authorization") == f"Bearer {api_key}" - with update_env(**{"GRADIENTAI_API_KEY": Omit()}): - client2 = GradientAI( + with update_env(**{"GRADIENT_API_KEY": Omit()}): + client2 = Gradient( base_url=base_url, api_key=None, inference_key=inference_key, @@ -424,7 +424,7 @@ def test_validate_headers(self) -> None: assert request2.headers.get("Authorization") is None def test_default_query_option(self) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -543,7 +543,7 @@ def test_request_extra_query(self) -> None: params = dict(request.url.params) assert params == {"foo": "2"} - def test_multipart_repeating_array(self, client: GradientAI) -> None: + def test_multipart_repeating_array(self, client: Gradient) -> None: request = client._build_request( FinalRequestOptions.construct( method="post", @@ -630,7 +630,7 @@ class Model(BaseModel): assert response.foo == 2 def test_base_url_setter(self) -> None: - client = GradientAI( + client = Gradient( base_url="https://example.com/from_init", api_key=api_key, inference_key=inference_key, @@ -644,8 +644,8 @@ def test_base_url_setter(self) -> None: assert client.base_url == "https://example.com/from_setter/" def test_base_url_env(self) -> None: - with update_env(GRADIENT_AI_BASE_URL="http://localhost:5000/from/env"): - client = GradientAI( + with update_env(GRADIENT_BASE_URL="http://localhost:5000/from/env"): + client = Gradient( api_key=api_key, inference_key=inference_key, agent_key=agent_key, _strict_response_validation=True ) assert client.base_url == "http://localhost:5000/from/env/" @@ -653,14 +653,14 @@ def test_base_url_env(self) -> None: @pytest.mark.parametrize( "client", [ - GradientAI( + Gradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, agent_key=agent_key, _strict_response_validation=True, ), - GradientAI( + Gradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, @@ -671,7 +671,7 @@ def test_base_url_env(self) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_trailing_slash(self, client: GradientAI) -> None: + def test_base_url_trailing_slash(self, client: Gradient) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -684,14 +684,14 @@ def test_base_url_trailing_slash(self, client: GradientAI) -> None: @pytest.mark.parametrize( "client", [ - GradientAI( + Gradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, agent_key=agent_key, _strict_response_validation=True, ), - GradientAI( + Gradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, @@ -702,7 +702,7 @@ def test_base_url_trailing_slash(self, client: GradientAI) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_no_trailing_slash(self, client: GradientAI) -> None: + def test_base_url_no_trailing_slash(self, client: Gradient) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -715,14 +715,14 @@ def test_base_url_no_trailing_slash(self, client: GradientAI) -> None: @pytest.mark.parametrize( "client", [ - GradientAI( + Gradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, agent_key=agent_key, _strict_response_validation=True, ), - GradientAI( + Gradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, @@ -733,7 +733,7 @@ def test_base_url_no_trailing_slash(self, client: GradientAI) -> None: ], ids=["standard", "custom http client"], ) - def test_absolute_request_url(self, client: GradientAI) -> None: + def test_absolute_request_url(self, client: Gradient) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -744,7 +744,7 @@ def test_absolute_request_url(self, client: GradientAI) -> None: assert request.url == "https://myapi.com/foo" def test_copied_client_does_not_close_http(self) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -761,7 +761,7 @@ def test_copied_client_does_not_close_http(self) -> None: assert not client.is_closed() def test_client_context_manager(self) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -788,7 +788,7 @@ class Model(BaseModel): def test_client_max_retries_validation(self) -> None: with pytest.raises(TypeError, match=r"max_retries cannot be None"): - GradientAI( + Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -815,7 +815,7 @@ class Model(BaseModel): respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) - strict_client = GradientAI( + strict_client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -826,7 +826,7 @@ class Model(BaseModel): with pytest.raises(APIResponseValidationError): strict_client.get("/foo", cast_to=Model) - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -860,7 +860,7 @@ class Model(BaseModel): ) @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: - client = GradientAI( + client = Gradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -873,9 +873,9 @@ def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str calculated = client._calculate_retry_timeout(remaining_retries, options, headers) assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: GradientAI) -> None: + def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Gradient) -> None: respx_mock.post("/chat/completions").mock(side_effect=httpx.TimeoutException("Test timeout error")) with pytest.raises(APITimeoutError): @@ -891,9 +891,9 @@ def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, clien assert _get_open_connections(self.client) == 0 - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) - def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: GradientAI) -> None: + def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Gradient) -> None: respx_mock.post("/chat/completions").mock(return_value=httpx.Response(500)) with pytest.raises(APIStatusError): @@ -909,12 +909,12 @@ def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @pytest.mark.parametrize("failure_mode", ["status", "exception"]) def test_retries_taken( self, - client: GradientAI, + client: Gradient, failures_before_success: int, failure_mode: Literal["status", "exception"], respx_mock: MockRouter, @@ -948,10 +948,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) def test_omit_retry_count_header( - self, client: GradientAI, failures_before_success: int, respx_mock: MockRouter + self, client: Gradient, failures_before_success: int, respx_mock: MockRouter ) -> None: client = client.with_options(max_retries=4) @@ -980,10 +980,10 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) def test_overwrite_retry_count_header( - self, client: GradientAI, failures_before_success: int, respx_mock: MockRouter + self, client: Gradient, failures_before_success: int, respx_mock: MockRouter ) -> None: client = client.with_options(max_retries=4) @@ -1061,8 +1061,8 @@ def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None: assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" -class TestAsyncGradientAI: - client = AsyncGradientAI( +class TestAsyncGradient: + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1125,7 +1125,7 @@ def test_copy_default_options(self) -> None: assert isinstance(self.client.timeout, httpx.Timeout) def test_copy_default_headers(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1164,7 +1164,7 @@ def test_copy_default_headers(self) -> None: client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) def test_copy_default_query(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1261,10 +1261,10 @@ def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.Statistic # to_raw_response_wrapper leaks through the @functools.wraps() decorator. # # removing the decorator fixes the leak for reasons we don't understand. - "do_gradientai/_legacy_response.py", - "do_gradientai/_response.py", + "gradient/_legacy_response.py", + "gradient/_response.py", # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. - "do_gradientai/_compat.py", + "gradient/_compat.py", # Standard library leaks we don't care about. "/logging/__init__.py", ] @@ -1295,7 +1295,7 @@ async def test_request_timeout(self) -> None: assert timeout == httpx.Timeout(100.0) async def test_client_timeout_option(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1311,7 +1311,7 @@ async def test_client_timeout_option(self) -> None: async def test_http_client_timeout_option(self) -> None: # custom timeout given to the httpx client should be used async with httpx.AsyncClient(timeout=None) as http_client: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1326,7 +1326,7 @@ async def test_http_client_timeout_option(self) -> None: # no timeout given to the httpx client should not use the httpx default async with httpx.AsyncClient() as http_client: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1341,7 +1341,7 @@ async def test_http_client_timeout_option(self) -> None: # explicitly passing the default timeout currently results in it being ignored async with httpx.AsyncClient(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1357,7 +1357,7 @@ async def test_http_client_timeout_option(self) -> None: def test_invalid_http_client(self) -> None: with pytest.raises(TypeError, match="Invalid `http_client` arg"): with httpx.Client() as http_client: - AsyncGradientAI( + AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1367,7 +1367,7 @@ def test_invalid_http_client(self) -> None: ) def test_default_headers_option(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1379,7 +1379,7 @@ def test_default_headers_option(self) -> None: assert request.headers.get("x-foo") == "bar" assert request.headers.get("x-stainless-lang") == "python" - client2 = AsyncGradientAI( + client2 = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1395,7 +1395,7 @@ def test_default_headers_option(self) -> None: assert request.headers.get("x-stainless-lang") == "my-overriding-header" def test_validate_headers(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1405,8 +1405,8 @@ def test_validate_headers(self) -> None: request = client._build_request(FinalRequestOptions(method="get", url="/foo")) assert request.headers.get("Authorization") == f"Bearer {api_key}" - with update_env(**{"GRADIENTAI_API_KEY": Omit()}): - client2 = AsyncGradientAI( + with update_env(**{"GRADIENT_API_KEY": Omit()}): + client2 = AsyncGradient( base_url=base_url, api_key=None, inference_key=inference_key, @@ -1426,7 +1426,7 @@ def test_validate_headers(self) -> None: assert request2.headers.get("Authorization") is None def test_default_query_option(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1545,7 +1545,7 @@ def test_request_extra_query(self) -> None: params = dict(request.url.params) assert params == {"foo": "2"} - def test_multipart_repeating_array(self, async_client: AsyncGradientAI) -> None: + def test_multipart_repeating_array(self, async_client: AsyncGradient) -> None: request = async_client._build_request( FinalRequestOptions.construct( method="post", @@ -1632,7 +1632,7 @@ class Model(BaseModel): assert response.foo == 2 def test_base_url_setter(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url="https://example.com/from_init", api_key=api_key, inference_key=inference_key, @@ -1646,8 +1646,8 @@ def test_base_url_setter(self) -> None: assert client.base_url == "https://example.com/from_setter/" def test_base_url_env(self) -> None: - with update_env(GRADIENT_AI_BASE_URL="http://localhost:5000/from/env"): - client = AsyncGradientAI( + with update_env(GRADIENT_BASE_URL="http://localhost:5000/from/env"): + client = AsyncGradient( api_key=api_key, inference_key=inference_key, agent_key=agent_key, _strict_response_validation=True ) assert client.base_url == "http://localhost:5000/from/env/" @@ -1655,14 +1655,14 @@ def test_base_url_env(self) -> None: @pytest.mark.parametrize( "client", [ - AsyncGradientAI( + AsyncGradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, agent_key=agent_key, _strict_response_validation=True, ), - AsyncGradientAI( + AsyncGradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, @@ -1673,7 +1673,7 @@ def test_base_url_env(self) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_trailing_slash(self, client: AsyncGradientAI) -> None: + def test_base_url_trailing_slash(self, client: AsyncGradient) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1686,14 +1686,14 @@ def test_base_url_trailing_slash(self, client: AsyncGradientAI) -> None: @pytest.mark.parametrize( "client", [ - AsyncGradientAI( + AsyncGradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, agent_key=agent_key, _strict_response_validation=True, ), - AsyncGradientAI( + AsyncGradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, @@ -1704,7 +1704,7 @@ def test_base_url_trailing_slash(self, client: AsyncGradientAI) -> None: ], ids=["standard", "custom http client"], ) - def test_base_url_no_trailing_slash(self, client: AsyncGradientAI) -> None: + def test_base_url_no_trailing_slash(self, client: AsyncGradient) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1717,14 +1717,14 @@ def test_base_url_no_trailing_slash(self, client: AsyncGradientAI) -> None: @pytest.mark.parametrize( "client", [ - AsyncGradientAI( + AsyncGradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, agent_key=agent_key, _strict_response_validation=True, ), - AsyncGradientAI( + AsyncGradient( base_url="http://localhost:5000/custom/path/", api_key=api_key, inference_key=inference_key, @@ -1735,7 +1735,7 @@ def test_base_url_no_trailing_slash(self, client: AsyncGradientAI) -> None: ], ids=["standard", "custom http client"], ) - def test_absolute_request_url(self, client: AsyncGradientAI) -> None: + def test_absolute_request_url(self, client: AsyncGradient) -> None: request = client._build_request( FinalRequestOptions( method="post", @@ -1746,7 +1746,7 @@ def test_absolute_request_url(self, client: AsyncGradientAI) -> None: assert request.url == "https://myapi.com/foo" async def test_copied_client_does_not_close_http(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1764,7 +1764,7 @@ async def test_copied_client_does_not_close_http(self) -> None: assert not client.is_closed() async def test_client_context_manager(self) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1792,7 +1792,7 @@ class Model(BaseModel): async def test_client_max_retries_validation(self) -> None: with pytest.raises(TypeError, match=r"max_retries cannot be None"): - AsyncGradientAI( + AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1821,7 +1821,7 @@ class Model(BaseModel): respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) - strict_client = AsyncGradientAI( + strict_client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1832,7 +1832,7 @@ class Model(BaseModel): with pytest.raises(APIResponseValidationError): await strict_client.get("/foo", cast_to=Model) - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1867,7 +1867,7 @@ class Model(BaseModel): @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) @pytest.mark.asyncio async def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: - client = AsyncGradientAI( + client = AsyncGradient( base_url=base_url, api_key=api_key, inference_key=inference_key, @@ -1880,10 +1880,10 @@ async def test_parse_retry_after_header(self, remaining_retries: int, retry_afte calculated = client._calculate_retry_timeout(remaining_retries, options, headers) assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) async def test_retrying_timeout_errors_doesnt_leak( - self, respx_mock: MockRouter, async_client: AsyncGradientAI + self, respx_mock: MockRouter, async_client: AsyncGradient ) -> None: respx_mock.post("/chat/completions").mock(side_effect=httpx.TimeoutException("Test timeout error")) @@ -1900,10 +1900,10 @@ async def test_retrying_timeout_errors_doesnt_leak( assert _get_open_connections(self.client) == 0 - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) async def test_retrying_status_errors_doesnt_leak( - self, respx_mock: MockRouter, async_client: AsyncGradientAI + self, respx_mock: MockRouter, async_client: AsyncGradient ) -> None: respx_mock.post("/chat/completions").mock(return_value=httpx.Response(500)) @@ -1920,13 +1920,13 @@ async def test_retrying_status_errors_doesnt_leak( assert _get_open_connections(self.client) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @pytest.mark.asyncio @pytest.mark.parametrize("failure_mode", ["status", "exception"]) async def test_retries_taken( self, - async_client: AsyncGradientAI, + async_client: AsyncGradient, failures_before_success: int, failure_mode: Literal["status", "exception"], respx_mock: MockRouter, @@ -1960,11 +1960,11 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @pytest.mark.asyncio async def test_omit_retry_count_header( - self, async_client: AsyncGradientAI, failures_before_success: int, respx_mock: MockRouter + self, async_client: AsyncGradient, failures_before_success: int, respx_mock: MockRouter ) -> None: client = async_client.with_options(max_retries=4) @@ -1993,11 +1993,11 @@ def retry_handler(_request: httpx.Request) -> httpx.Response: assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) - @mock.patch("do_gradientai._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @mock.patch("gradient._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) @pytest.mark.respx(base_url=base_url) @pytest.mark.asyncio async def test_overwrite_retry_count_header( - self, async_client: AsyncGradientAI, failures_before_success: int, respx_mock: MockRouter + self, async_client: AsyncGradient, failures_before_success: int, respx_mock: MockRouter ) -> None: client = async_client.with_options(max_retries=4) @@ -2036,8 +2036,8 @@ def test_get_platform(self) -> None: import nest_asyncio import threading - from do_gradientai._utils import asyncify - from do_gradientai._base_client import get_platform + from gradient._utils import asyncify + from gradient._base_client import get_platform async def test_main() -> None: result = await asyncify(get_platform)() diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py index 5a98ce1b..b5520a27 100644 --- a/tests/test_deepcopy.py +++ b/tests/test_deepcopy.py @@ -1,4 +1,4 @@ -from do_gradientai._utils import deepcopy_minimal +from gradient._utils import deepcopy_minimal def assert_different_identities(obj1: object, obj2: object) -> None: diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py index 341e65ae..9514d242 100644 --- a/tests/test_extract_files.py +++ b/tests/test_extract_files.py @@ -4,8 +4,8 @@ import pytest -from do_gradientai._types import FileTypes -from do_gradientai._utils import extract_files +from gradient._types import FileTypes +from gradient._utils import extract_files def test_removes_files_from_input() -> None: diff --git a/tests/test_files.py b/tests/test_files.py index ff7914bb..4d9f4066 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -4,7 +4,7 @@ import pytest from dirty_equals import IsDict, IsList, IsBytes, IsTuple -from do_gradientai._files import to_httpx_files, async_to_httpx_files +from gradient._files import to_httpx_files, async_to_httpx_files readme_path = Path(__file__).parent.parent.joinpath("README.md") diff --git a/tests/test_models.py b/tests/test_models.py index 9a3891e3..9a2ee908 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -7,9 +7,9 @@ import pydantic from pydantic import Field -from do_gradientai._utils import PropertyInfo -from do_gradientai._compat import PYDANTIC_V2, parse_obj, model_dump, model_json -from do_gradientai._models import BaseModel, construct_type +from gradient._utils import PropertyInfo +from gradient._compat import PYDANTIC_V2, parse_obj, model_dump, model_json +from gradient._models import BaseModel, construct_type class BasicModel(BaseModel): diff --git a/tests/test_qs.py b/tests/test_qs.py index c9213571..32fb2091 100644 --- a/tests/test_qs.py +++ b/tests/test_qs.py @@ -4,7 +4,7 @@ import pytest -from do_gradientai._qs import Querystring, stringify +from gradient._qs import Querystring, stringify def test_empty() -> None: diff --git a/tests/test_required_args.py b/tests/test_required_args.py index 434e9491..3956dc02 100644 --- a/tests/test_required_args.py +++ b/tests/test_required_args.py @@ -2,7 +2,7 @@ import pytest -from do_gradientai._utils import required_args +from gradient._utils import required_args def test_too_many_positional_params() -> None: diff --git a/tests/test_response.py b/tests/test_response.py index 001ce776..6dd53185 100644 --- a/tests/test_response.py +++ b/tests/test_response.py @@ -6,8 +6,8 @@ import pytest import pydantic -from do_gradientai import BaseModel, GradientAI, AsyncGradientAI -from do_gradientai._response import ( +from gradient import Gradient, BaseModel, AsyncGradient +from gradient._response import ( APIResponse, BaseAPIResponse, AsyncAPIResponse, @@ -15,8 +15,8 @@ AsyncBinaryAPIResponse, extract_response_type, ) -from do_gradientai._streaming import Stream -from do_gradientai._base_client import FinalRequestOptions +from gradient._streaming import Stream +from gradient._base_client import FinalRequestOptions class ConcreteBaseAPIResponse(APIResponse[bytes]): ... @@ -37,7 +37,7 @@ def test_extract_response_type_direct_classes() -> None: def test_extract_response_type_direct_class_missing_type_arg() -> None: with pytest.raises( RuntimeError, - match="Expected type to have a type argument at index 0 but it did not", + match="Expected type to have a type argument at index 0 but it did not", ): extract_response_type(AsyncAPIResponse) @@ -56,7 +56,7 @@ def test_extract_response_type_binary_response() -> None: class PydanticModel(pydantic.BaseModel): ... -def test_response_parse_mismatched_basemodel(client: GradientAI) -> None: +def test_response_parse_mismatched_basemodel(client: Gradient) -> None: response = APIResponse( raw=httpx.Response(200, content=b"foo"), client=client, @@ -68,13 +68,13 @@ def test_response_parse_mismatched_basemodel(client: GradientAI) -> None: with pytest.raises( TypeError, - match="Pydantic models must subclass our base model type, e.g. `from do_gradientai import BaseModel`", + match="Pydantic models must subclass our base model type, e.g. `from gradient import BaseModel`", ): response.parse(to=PydanticModel) @pytest.mark.asyncio -async def test_async_response_parse_mismatched_basemodel(async_client: AsyncGradientAI) -> None: +async def test_async_response_parse_mismatched_basemodel(async_client: AsyncGradient) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=b"foo"), client=async_client, @@ -86,12 +86,12 @@ async def test_async_response_parse_mismatched_basemodel(async_client: AsyncGrad with pytest.raises( TypeError, - match="Pydantic models must subclass our base model type, e.g. `from do_gradientai import BaseModel`", + match="Pydantic models must subclass our base model type, e.g. `from gradient import BaseModel`", ): await response.parse(to=PydanticModel) -def test_response_parse_custom_stream(client: GradientAI) -> None: +def test_response_parse_custom_stream(client: Gradient) -> None: response = APIResponse( raw=httpx.Response(200, content=b"foo"), client=client, @@ -106,7 +106,7 @@ def test_response_parse_custom_stream(client: GradientAI) -> None: @pytest.mark.asyncio -async def test_async_response_parse_custom_stream(async_client: AsyncGradientAI) -> None: +async def test_async_response_parse_custom_stream(async_client: AsyncGradient) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=b"foo"), client=async_client, @@ -125,7 +125,7 @@ class CustomModel(BaseModel): bar: int -def test_response_parse_custom_model(client: GradientAI) -> None: +def test_response_parse_custom_model(client: Gradient) -> None: response = APIResponse( raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), client=client, @@ -141,7 +141,7 @@ def test_response_parse_custom_model(client: GradientAI) -> None: @pytest.mark.asyncio -async def test_async_response_parse_custom_model(async_client: AsyncGradientAI) -> None: +async def test_async_response_parse_custom_model(async_client: AsyncGradient) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), client=async_client, @@ -156,7 +156,7 @@ async def test_async_response_parse_custom_model(async_client: AsyncGradientAI) assert obj.bar == 2 -def test_response_parse_annotated_type(client: GradientAI) -> None: +def test_response_parse_annotated_type(client: Gradient) -> None: response = APIResponse( raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), client=client, @@ -173,7 +173,7 @@ def test_response_parse_annotated_type(client: GradientAI) -> None: assert obj.bar == 2 -async def test_async_response_parse_annotated_type(async_client: AsyncGradientAI) -> None: +async def test_async_response_parse_annotated_type(async_client: AsyncGradient) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), client=async_client, @@ -201,7 +201,7 @@ async def test_async_response_parse_annotated_type(async_client: AsyncGradientAI ("FalSe", False), ], ) -def test_response_parse_bool(client: GradientAI, content: str, expected: bool) -> None: +def test_response_parse_bool(client: Gradient, content: str, expected: bool) -> None: response = APIResponse( raw=httpx.Response(200, content=content), client=client, @@ -226,7 +226,7 @@ def test_response_parse_bool(client: GradientAI, content: str, expected: bool) - ("FalSe", False), ], ) -async def test_async_response_parse_bool(client: AsyncGradientAI, content: str, expected: bool) -> None: +async def test_async_response_parse_bool(client: AsyncGradient, content: str, expected: bool) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=content), client=client, @@ -245,7 +245,7 @@ class OtherModel(BaseModel): @pytest.mark.parametrize("client", [False], indirect=True) # loose validation -def test_response_parse_expect_model_union_non_json_content(client: GradientAI) -> None: +def test_response_parse_expect_model_union_non_json_content(client: Gradient) -> None: response = APIResponse( raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), client=client, @@ -262,7 +262,7 @@ def test_response_parse_expect_model_union_non_json_content(client: GradientAI) @pytest.mark.asyncio @pytest.mark.parametrize("async_client", [False], indirect=True) # loose validation -async def test_async_response_parse_expect_model_union_non_json_content(async_client: AsyncGradientAI) -> None: +async def test_async_response_parse_expect_model_union_non_json_content(async_client: AsyncGradient) -> None: response = AsyncAPIResponse( raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), client=async_client, diff --git a/tests/test_streaming.py b/tests/test_streaming.py index c1ce8e85..c4a8e46f 100644 --- a/tests/test_streaming.py +++ b/tests/test_streaming.py @@ -5,13 +5,13 @@ import httpx import pytest -from do_gradientai import GradientAI, AsyncGradientAI -from do_gradientai._streaming import Stream, AsyncStream, ServerSentEvent +from gradient import Gradient, AsyncGradient +from gradient._streaming import Stream, AsyncStream, ServerSentEvent @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_basic(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: +async def test_basic(sync: bool, client: Gradient, async_client: AsyncGradient) -> None: def body() -> Iterator[bytes]: yield b"event: completion\n" yield b'data: {"foo":true}\n' @@ -28,7 +28,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_data_missing_event(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: +async def test_data_missing_event(sync: bool, client: Gradient, async_client: AsyncGradient) -> None: def body() -> Iterator[bytes]: yield b'data: {"foo":true}\n' yield b"\n" @@ -44,7 +44,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_event_missing_data(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: +async def test_event_missing_data(sync: bool, client: Gradient, async_client: AsyncGradient) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b"\n" @@ -60,7 +60,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_multiple_events(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: +async def test_multiple_events(sync: bool, client: Gradient, async_client: AsyncGradient) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b"\n" @@ -82,7 +82,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_multiple_events_with_data(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: +async def test_multiple_events_with_data(sync: bool, client: Gradient, async_client: AsyncGradient) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b'data: {"foo":true}\n' @@ -106,9 +106,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_multiple_data_lines_with_empty_line( - sync: bool, client: GradientAI, async_client: AsyncGradientAI -) -> None: +async def test_multiple_data_lines_with_empty_line(sync: bool, client: Gradient, async_client: AsyncGradient) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b"data: {\n" @@ -130,7 +128,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_data_json_escaped_double_new_line(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: +async def test_data_json_escaped_double_new_line(sync: bool, client: Gradient, async_client: AsyncGradient) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b'data: {"foo": "my long\\n\\ncontent"}' @@ -147,7 +145,7 @@ def body() -> Iterator[bytes]: @pytest.mark.asyncio @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) -async def test_multiple_data_lines(sync: bool, client: GradientAI, async_client: AsyncGradientAI) -> None: +async def test_multiple_data_lines(sync: bool, client: Gradient, async_client: AsyncGradient) -> None: def body() -> Iterator[bytes]: yield b"event: ping\n" yield b"data: {\n" @@ -167,8 +165,8 @@ def body() -> Iterator[bytes]: @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) async def test_special_new_line_character( sync: bool, - client: GradientAI, - async_client: AsyncGradientAI, + client: Gradient, + async_client: AsyncGradient, ) -> None: def body() -> Iterator[bytes]: yield b'data: {"content":" culpa"}\n' @@ -198,8 +196,8 @@ def body() -> Iterator[bytes]: @pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) async def test_multi_byte_character_multiple_chunks( sync: bool, - client: GradientAI, - async_client: AsyncGradientAI, + client: Gradient, + async_client: AsyncGradient, ) -> None: def body() -> Iterator[bytes]: yield b'data: {"content":"' @@ -239,8 +237,8 @@ def make_event_iterator( content: Iterator[bytes], *, sync: bool, - client: GradientAI, - async_client: AsyncGradientAI, + client: Gradient, + async_client: AsyncGradient, ) -> Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]: if sync: return Stream(cast_to=object, client=client, response=httpx.Response(200, content=content))._iter_events() diff --git a/tests/test_transform.py b/tests/test_transform.py index 30c06d6a..552462fa 100644 --- a/tests/test_transform.py +++ b/tests/test_transform.py @@ -8,15 +8,15 @@ import pytest -from do_gradientai._types import NOT_GIVEN, Base64FileInput -from do_gradientai._utils import ( +from gradient._types import NOT_GIVEN, Base64FileInput +from gradient._utils import ( PropertyInfo, transform as _transform, parse_datetime, async_transform as _async_transform, ) -from do_gradientai._compat import PYDANTIC_V2 -from do_gradientai._models import BaseModel +from gradient._compat import PYDANTIC_V2 +from gradient._models import BaseModel _T = TypeVar("_T") diff --git a/tests/test_utils/test_proxy.py b/tests/test_utils/test_proxy.py index 9ce2e0d3..af6d092a 100644 --- a/tests/test_utils/test_proxy.py +++ b/tests/test_utils/test_proxy.py @@ -2,7 +2,7 @@ from typing import Any from typing_extensions import override -from do_gradientai._utils import LazyProxy +from gradient._utils import LazyProxy class RecursiveLazyProxy(LazyProxy[Any]): diff --git a/tests/test_utils/test_typing.py b/tests/test_utils/test_typing.py index c9129fdc..5f9711a2 100644 --- a/tests/test_utils/test_typing.py +++ b/tests/test_utils/test_typing.py @@ -2,7 +2,7 @@ from typing import Generic, TypeVar, cast -from do_gradientai._utils import extract_type_var_from_base +from gradient._utils import extract_type_var_from_base _T = TypeVar("_T") _T2 = TypeVar("_T2") diff --git a/tests/utils.py b/tests/utils.py index 9def7c60..e150f00b 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -8,8 +8,8 @@ from datetime import date, datetime from typing_extensions import Literal, get_args, get_origin, assert_type -from do_gradientai._types import Omit, NoneType -from do_gradientai._utils import ( +from gradient._types import Omit, NoneType +from gradient._utils import ( is_dict, is_list, is_list_type, @@ -18,8 +18,8 @@ is_annotated_type, is_type_alias_type, ) -from do_gradientai._compat import PYDANTIC_V2, field_outer_type, get_model_fields -from do_gradientai._models import BaseModel +from gradient._compat import PYDANTIC_V2, field_outer_type, get_model_fields +from gradient._models import BaseModel BaseModelT = TypeVar("BaseModelT", bound=BaseModel)