diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index b0699969..08e82c45 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.1.0-alpha.14"
+ ".": "0.1.0-alpha.15"
}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
index c0860901..c001b5ff 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
-configured_endpoints: 169
+configured_endpoints: 168
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/digitalocean%2Fgradientai-f8e8c290636c1e218efcf7bfe92ba7570c11690754d21287d838919fbc943a80.yml
openapi_spec_hash: 1eddf488ecbe415efb45445697716f5d
-config_hash: c59a2f17744fc2b7a8248ec916b8aa70
+config_hash: 0a72b6161859b504ed3b5a2a142ba5a5
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 33b1a566..f37f144c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,32 @@
# Changelog
+## 0.1.0-alpha.15 (2025-07-18)
+
+Full Changelog: [v0.1.0-alpha.14...v0.1.0-alpha.15](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.14...v0.1.0-alpha.15)
+
+### Features
+
+* **api:** add gpu droplets ([b207e9a](https://github.com/digitalocean/gradientai-python/commit/b207e9a69ddf821522f5d9e9f10502850220585f))
+* **api:** add gpu droplets ([b9e317b](https://github.com/digitalocean/gradientai-python/commit/b9e317bac2c541a7eafcfb59a4b19c81e1145075))
+
+
+### Chores
+
+* format ([d940e66](https://github.com/digitalocean/gradientai-python/commit/d940e66107e00f351853c0bc667ca6ed3cf98605))
+* **internal:** version bump ([1a66126](https://github.com/digitalocean/gradientai-python/commit/1a661264f68580dff74c3f7d4891ab2661fde190))
+* **internal:** version bump ([9c546a1](https://github.com/digitalocean/gradientai-python/commit/9c546a1f97241bb448430e1e43f4e20589e243c1))
+* **internal:** version bump ([8814098](https://github.com/digitalocean/gradientai-python/commit/881409847161671b798baf2c89f37ae29e195f29))
+* **internal:** version bump ([bb3ad60](https://github.com/digitalocean/gradientai-python/commit/bb3ad60d02fe01b937eaced64682fd66d95a9aec))
+* **internal:** version bump ([2022024](https://github.com/digitalocean/gradientai-python/commit/20220246634accf95c4a53df200db5ace7107c55))
+* **internal:** version bump ([52e2c23](https://github.com/digitalocean/gradientai-python/commit/52e2c23c23d4dc27c176ebf4783c8fbd86a4c07b))
+* **internal:** version bump ([8ac0f2a](https://github.com/digitalocean/gradientai-python/commit/8ac0f2a6d4862907243ba78b132373289e2c3543))
+* **internal:** version bump ([d83fe97](https://github.com/digitalocean/gradientai-python/commit/d83fe97aa2f77c84c3c7f4bf40b9fb94c5c28aca))
+* **internal:** version bump ([9d20399](https://github.com/digitalocean/gradientai-python/commit/9d2039919e1d9c9e6d153edfb03bccff18b56686))
+* **internal:** version bump ([44a045a](https://github.com/digitalocean/gradientai-python/commit/44a045a9c0ce0f0769cce66bc7421a9d81cbc645))
+* **internal:** version bump ([95d1dd2](https://github.com/digitalocean/gradientai-python/commit/95d1dd24d290d7d5f23328e4c45c439dca5df748))
+* **internal:** version bump ([7416147](https://github.com/digitalocean/gradientai-python/commit/74161477f98e3a76b7227b07d942e1f26a4612b3))
+* **internal:** version bump ([06d7f19](https://github.com/digitalocean/gradientai-python/commit/06d7f19cd42a6bc578b39709fe6efed8741a24bc))
+
## 0.1.0-alpha.14 (2025-07-17)
Full Changelog: [v0.1.0-alpha.13...v0.1.0-alpha.14](https://github.com/digitalocean/gradientai-python/compare/v0.1.0-alpha.13...v0.1.0-alpha.14)
diff --git a/README.md b/README.md
index c2cb97ad..bebfbb0e 100644
--- a/README.md
+++ b/README.md
@@ -32,10 +32,13 @@ api_client = GradientAI(
api_key=os.environ.get("GRADIENTAI_API_KEY"), # This is the default and can be omitted
)
inference_client = GradientAI(
- inference_key=os.environ.get("GRADIENTAI_INFERENCE_KEY"), # This is the default and can be omitted
+ inference_key=os.environ.get(
+ "GRADIENTAI_INFERENCE_KEY"
+ ), # This is the default and can be omitted
)
agent_client = GradientAI(
agent_key=os.environ.get("GRADIENTAI_AGENT_KEY"), # This is the default and can be omitted
+ agent_endpoint="https://my-cool-agent.agents.do-ai.run",
)
print(api_client.agents.list())
@@ -51,7 +54,6 @@ completion = inference_client.chat.completions.create(
)
print(completion.choices[0].message)
-
```
While you can provide an `api_key`, `inference_key` keyword argument,
diff --git a/api.md b/api.md
index 20483e04..fa4e0edb 100644
--- a/api.md
+++ b/api.md
@@ -24,9 +24,6 @@ from do_gradientai.types import (
NetworkV6,
PageLinks,
Region,
- RepositoryBlob,
- RepositoryManifest,
- RepositoryTag,
Size,
Snapshots,
Subscription,
@@ -490,41 +487,41 @@ Methods:
- client.models.providers.openai.delete(api_key_uuid) -> OpenAIDeleteResponse
- client.models.providers.openai.retrieve_agents(uuid, \*\*params) -> OpenAIRetrieveAgentsResponse
-# Droplets
+# GPUDroplets
Types:
```python
from do_gradientai.types import (
DropletBackupPolicy,
- DropletCreateResponse,
- DropletRetrieveResponse,
- DropletListResponse,
- DropletListFirewallsResponse,
- DropletListKernelsResponse,
- DropletListNeighborsResponse,
- DropletListSnapshotsResponse,
+ GPUDropletCreateResponse,
+ GPUDropletRetrieveResponse,
+ GPUDropletListResponse,
+ GPUDropletListFirewallsResponse,
+ GPUDropletListKernelsResponse,
+ GPUDropletListNeighborsResponse,
+ GPUDropletListSnapshotsResponse,
)
```
Methods:
-- client.droplets.create(\*\*params) -> DropletCreateResponse
-- client.droplets.retrieve(droplet_id) -> DropletRetrieveResponse
-- client.droplets.list(\*\*params) -> DropletListResponse
-- client.droplets.delete(droplet_id) -> None
-- client.droplets.delete_by_tag(\*\*params) -> None
-- client.droplets.list_firewalls(droplet_id, \*\*params) -> DropletListFirewallsResponse
-- client.droplets.list_kernels(droplet_id, \*\*params) -> DropletListKernelsResponse
-- client.droplets.list_neighbors(droplet_id) -> DropletListNeighborsResponse
-- client.droplets.list_snapshots(droplet_id, \*\*params) -> DropletListSnapshotsResponse
+- client.gpu_droplets.create(\*\*params) -> GPUDropletCreateResponse
+- client.gpu_droplets.retrieve(droplet_id) -> GPUDropletRetrieveResponse
+- client.gpu_droplets.list(\*\*params) -> GPUDropletListResponse
+- client.gpu_droplets.delete(droplet_id) -> None
+- client.gpu_droplets.delete_by_tag(\*\*params) -> None
+- client.gpu_droplets.list_firewalls(droplet_id, \*\*params) -> GPUDropletListFirewallsResponse
+- client.gpu_droplets.list_kernels(droplet_id, \*\*params) -> GPUDropletListKernelsResponse
+- client.gpu_droplets.list_neighbors(droplet_id) -> GPUDropletListNeighborsResponse
+- client.gpu_droplets.list_snapshots(droplet_id, \*\*params) -> GPUDropletListSnapshotsResponse
## Backups
Types:
```python
-from do_gradientai.types.droplets import (
+from do_gradientai.types.gpu_droplets import (
BackupListResponse,
BackupListPoliciesResponse,
BackupListSupportedPoliciesResponse,
@@ -534,17 +531,17 @@ from do_gradientai.types.droplets import (
Methods:
-- client.droplets.backups.list(droplet_id, \*\*params) -> BackupListResponse
-- client.droplets.backups.list_policies(\*\*params) -> BackupListPoliciesResponse
-- client.droplets.backups.list_supported_policies() -> BackupListSupportedPoliciesResponse
-- client.droplets.backups.retrieve_policy(droplet_id) -> BackupRetrievePolicyResponse
+- client.gpu_droplets.backups.list(droplet_id, \*\*params) -> BackupListResponse
+- client.gpu_droplets.backups.list_policies(\*\*params) -> BackupListPoliciesResponse
+- client.gpu_droplets.backups.list_supported_policies() -> BackupListSupportedPoliciesResponse
+- client.gpu_droplets.backups.retrieve_policy(droplet_id) -> BackupRetrievePolicyResponse
## Actions
Types:
```python
-from do_gradientai.types.droplets import (
+from do_gradientai.types.gpu_droplets import (
ActionRetrieveResponse,
ActionListResponse,
ActionBulkInitiateResponse,
@@ -554,17 +551,17 @@ from do_gradientai.types.droplets import (
Methods:
-- client.droplets.actions.retrieve(action_id, \*, droplet_id) -> ActionRetrieveResponse
-- client.droplets.actions.list(droplet_id, \*\*params) -> ActionListResponse
-- client.droplets.actions.bulk_initiate(\*\*params) -> ActionBulkInitiateResponse
-- client.droplets.actions.initiate(droplet_id, \*\*params) -> ActionInitiateResponse
+- client.gpu_droplets.actions.retrieve(action_id, \*, droplet_id) -> ActionRetrieveResponse
+- client.gpu_droplets.actions.list(droplet_id, \*\*params) -> ActionListResponse
+- client.gpu_droplets.actions.bulk_initiate(\*\*params) -> ActionBulkInitiateResponse
+- client.gpu_droplets.actions.initiate(droplet_id, \*\*params) -> ActionInitiateResponse
## DestroyWithAssociatedResources
Types:
```python
-from do_gradientai.types.droplets import (
+from do_gradientai.types.gpu_droplets import (
AssociatedResource,
DestroyedAssociatedResource,
DestroyWithAssociatedResourceListResponse,
@@ -574,18 +571,18 @@ from do_gradientai.types.droplets import (
Methods:
-- client.droplets.destroy_with_associated_resources.list(droplet_id) -> DestroyWithAssociatedResourceListResponse
-- client.droplets.destroy_with_associated_resources.check_status(droplet_id) -> DestroyWithAssociatedResourceCheckStatusResponse
-- client.droplets.destroy_with_associated_resources.delete_dangerous(droplet_id) -> None
-- client.droplets.destroy_with_associated_resources.delete_selective(droplet_id, \*\*params) -> None
-- client.droplets.destroy_with_associated_resources.retry(droplet_id) -> None
+- client.gpu_droplets.destroy_with_associated_resources.list(droplet_id) -> DestroyWithAssociatedResourceListResponse
+- client.gpu_droplets.destroy_with_associated_resources.check_status(droplet_id) -> DestroyWithAssociatedResourceCheckStatusResponse
+- client.gpu_droplets.destroy_with_associated_resources.delete_dangerous(droplet_id) -> None
+- client.gpu_droplets.destroy_with_associated_resources.delete_selective(droplet_id, \*\*params) -> None
+- client.gpu_droplets.destroy_with_associated_resources.retry(droplet_id) -> None
## Autoscale
Types:
```python
-from do_gradientai.types.droplets import (
+from do_gradientai.types.gpu_droplets import (
AutoscalePool,
AutoscalePoolDropletTemplate,
AutoscalePoolDynamicConfig,
@@ -602,21 +599,21 @@ from do_gradientai.types.droplets import (
Methods:
-- client.droplets.autoscale.create(\*\*params) -> AutoscaleCreateResponse
-- client.droplets.autoscale.retrieve(autoscale_pool_id) -> AutoscaleRetrieveResponse
-- client.droplets.autoscale.update(autoscale_pool_id, \*\*params) -> AutoscaleUpdateResponse
-- client.droplets.autoscale.list(\*\*params) -> AutoscaleListResponse
-- client.droplets.autoscale.delete(autoscale_pool_id) -> None
-- client.droplets.autoscale.delete_dangerous(autoscale_pool_id) -> None
-- client.droplets.autoscale.list_history(autoscale_pool_id, \*\*params) -> AutoscaleListHistoryResponse
-- client.droplets.autoscale.list_members(autoscale_pool_id, \*\*params) -> AutoscaleListMembersResponse
+- client.gpu_droplets.autoscale.create(\*\*params) -> AutoscaleCreateResponse
+- client.gpu_droplets.autoscale.retrieve(autoscale_pool_id) -> AutoscaleRetrieveResponse
+- client.gpu_droplets.autoscale.update(autoscale_pool_id, \*\*params) -> AutoscaleUpdateResponse
+- client.gpu_droplets.autoscale.list(\*\*params) -> AutoscaleListResponse
+- client.gpu_droplets.autoscale.delete(autoscale_pool_id) -> None
+- client.gpu_droplets.autoscale.delete_dangerous(autoscale_pool_id) -> None
+- client.gpu_droplets.autoscale.list_history(autoscale_pool_id, \*\*params) -> AutoscaleListHistoryResponse
+- client.gpu_droplets.autoscale.list_members(autoscale_pool_id, \*\*params) -> AutoscaleListMembersResponse
-# Firewalls
+## Firewalls
Types:
```python
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
Firewall,
FirewallCreateResponse,
FirewallRetrieveResponse,
@@ -627,39 +624,39 @@ from do_gradientai.types import (
Methods:
-- client.firewalls.create(\*\*params) -> FirewallCreateResponse
-- client.firewalls.retrieve(firewall_id) -> FirewallRetrieveResponse
-- client.firewalls.update(firewall_id, \*\*params) -> FirewallUpdateResponse
-- client.firewalls.list(\*\*params) -> FirewallListResponse
-- client.firewalls.delete(firewall_id) -> None
+- client.gpu_droplets.firewalls.create(\*\*params) -> FirewallCreateResponse
+- client.gpu_droplets.firewalls.retrieve(firewall_id) -> FirewallRetrieveResponse
+- client.gpu_droplets.firewalls.update(firewall_id, \*\*params) -> FirewallUpdateResponse
+- client.gpu_droplets.firewalls.list(\*\*params) -> FirewallListResponse
+- client.gpu_droplets.firewalls.delete(firewall_id) -> None
-## Droplets
+### Droplets
Methods:
-- client.firewalls.droplets.add(firewall_id, \*\*params) -> None
-- client.firewalls.droplets.remove(firewall_id, \*\*params) -> None
+- client.gpu_droplets.firewalls.droplets.add(firewall_id, \*\*params) -> None
+- client.gpu_droplets.firewalls.droplets.remove(firewall_id, \*\*params) -> None
-## Tags
+### Tags
Methods:
-- client.firewalls.tags.add(firewall_id, \*\*params) -> None
-- client.firewalls.tags.remove(firewall_id, \*\*params) -> None
+- client.gpu_droplets.firewalls.tags.add(firewall_id, \*\*params) -> None
+- client.gpu_droplets.firewalls.tags.remove(firewall_id, \*\*params) -> None
-## Rules
+### Rules
Methods:
-- client.firewalls.rules.add(firewall_id, \*\*params) -> None
-- client.firewalls.rules.remove(firewall_id, \*\*params) -> None
+- client.gpu_droplets.firewalls.rules.add(firewall_id, \*\*params) -> None
+- client.gpu_droplets.firewalls.rules.remove(firewall_id, \*\*params) -> None
-# FloatingIPs
+## FloatingIPs
Types:
```python
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
FloatingIP,
FloatingIPCreateResponse,
FloatingIPRetrieveResponse,
@@ -669,17 +666,17 @@ from do_gradientai.types import (
Methods:
-- client.floating_ips.create(\*\*params) -> FloatingIPCreateResponse
-- client.floating_ips.retrieve(floating_ip) -> FloatingIPRetrieveResponse
-- client.floating_ips.list(\*\*params) -> FloatingIPListResponse
-- client.floating_ips.delete(floating_ip) -> None
+- client.gpu_droplets.floating_ips.create(\*\*params) -> FloatingIPCreateResponse
+- client.gpu_droplets.floating_ips.retrieve(floating_ip) -> FloatingIPRetrieveResponse
+- client.gpu_droplets.floating_ips.list(\*\*params) -> FloatingIPListResponse
+- client.gpu_droplets.floating_ips.delete(floating_ip) -> None
-## Actions
+### Actions
Types:
```python
-from do_gradientai.types.floating_ips import (
+from do_gradientai.types.gpu_droplets.floating_ips import (
ActionCreateResponse,
ActionRetrieveResponse,
ActionListResponse,
@@ -688,16 +685,16 @@ from do_gradientai.types.floating_ips import (
Methods:
-- client.floating_ips.actions.create(floating_ip, \*\*params) -> ActionCreateResponse
-- client.floating_ips.actions.retrieve(action_id, \*, floating_ip) -> ActionRetrieveResponse
-- client.floating_ips.actions.list(floating_ip) -> ActionListResponse
+- client.gpu_droplets.floating_ips.actions.create(floating_ip, \*\*params) -> ActionCreateResponse
+- client.gpu_droplets.floating_ips.actions.retrieve(action_id, \*, floating_ip) -> ActionRetrieveResponse
+- client.gpu_droplets.floating_ips.actions.list(floating_ip) -> ActionListResponse
-# Images
+## Images
Types:
```python
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
ImageCreateResponse,
ImageRetrieveResponse,
ImageUpdateResponse,
@@ -707,32 +704,32 @@ from do_gradientai.types import (
Methods:
-- client.images.create(\*\*params) -> ImageCreateResponse
-- client.images.retrieve(image_id) -> ImageRetrieveResponse
-- client.images.update(image_id, \*\*params) -> ImageUpdateResponse
-- client.images.list(\*\*params) -> ImageListResponse
-- client.images.delete(image_id) -> None
+- client.gpu_droplets.images.create(\*\*params) -> ImageCreateResponse
+- client.gpu_droplets.images.retrieve(image_id) -> ImageRetrieveResponse
+- client.gpu_droplets.images.update(image_id, \*\*params) -> ImageUpdateResponse
+- client.gpu_droplets.images.list(\*\*params) -> ImageListResponse
+- client.gpu_droplets.images.delete(image_id) -> None
-## Actions
+### Actions
Types:
```python
-from do_gradientai.types.images import ActionListResponse
+from do_gradientai.types.gpu_droplets.images import ActionListResponse
```
Methods:
-- client.images.actions.create(image_id, \*\*params) -> Action
-- client.images.actions.retrieve(action_id, \*, image_id) -> Action
-- client.images.actions.list(image_id) -> ActionListResponse
+- client.gpu_droplets.images.actions.create(image_id, \*\*params) -> Action
+- client.gpu_droplets.images.actions.retrieve(action_id, \*, image_id) -> Action
+- client.gpu_droplets.images.actions.list(image_id) -> ActionListResponse
-# LoadBalancers
+## LoadBalancers
Types:
```python
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
Domains,
ForwardingRule,
GlbSettings,
@@ -749,75 +746,79 @@ from do_gradientai.types import (
Methods:
-- client.load_balancers.create(\*\*params) -> LoadBalancerCreateResponse
-- client.load_balancers.retrieve(lb_id) -> LoadBalancerRetrieveResponse
-- client.load_balancers.update(lb_id, \*\*params) -> LoadBalancerUpdateResponse
-- client.load_balancers.list(\*\*params) -> LoadBalancerListResponse
-- client.load_balancers.delete(lb_id) -> None
-- client.load_balancers.delete_cache(lb_id) -> None
+- client.gpu_droplets.load_balancers.create(\*\*params) -> LoadBalancerCreateResponse
+- client.gpu_droplets.load_balancers.retrieve(lb_id) -> LoadBalancerRetrieveResponse
+- client.gpu_droplets.load_balancers.update(lb_id, \*\*params) -> LoadBalancerUpdateResponse
+- client.gpu_droplets.load_balancers.list(\*\*params) -> LoadBalancerListResponse
+- client.gpu_droplets.load_balancers.delete(lb_id) -> None
+- client.gpu_droplets.load_balancers.delete_cache(lb_id) -> None
-## Droplets
+### Droplets
Methods:
-- client.load_balancers.droplets.add(lb_id, \*\*params) -> None
-- client.load_balancers.droplets.remove(lb_id, \*\*params) -> None
+- client.gpu_droplets.load_balancers.droplets.add(lb_id, \*\*params) -> None
+- client.gpu_droplets.load_balancers.droplets.remove(lb_id, \*\*params) -> None
-## ForwardingRules
+### ForwardingRules
Methods:
-- client.load_balancers.forwarding_rules.add(lb_id, \*\*params) -> None
-- client.load_balancers.forwarding_rules.remove(lb_id, \*\*params) -> None
+- client.gpu_droplets.load_balancers.forwarding_rules.add(lb_id, \*\*params) -> None
+- client.gpu_droplets.load_balancers.forwarding_rules.remove(lb_id, \*\*params) -> None
-# Sizes
+## Sizes
Types:
```python
-from do_gradientai.types import SizeListResponse
+from do_gradientai.types.gpu_droplets import SizeListResponse
```
Methods:
-- client.sizes.list(\*\*params) -> SizeListResponse
+- client.gpu_droplets.sizes.list(\*\*params) -> SizeListResponse
-# Snapshots
+## Snapshots
Types:
```python
-from do_gradientai.types import SnapshotRetrieveResponse, SnapshotListResponse
+from do_gradientai.types.gpu_droplets import SnapshotRetrieveResponse, SnapshotListResponse
```
Methods:
-- client.snapshots.retrieve(snapshot_id) -> SnapshotRetrieveResponse
-- client.snapshots.list(\*\*params) -> SnapshotListResponse
-- client.snapshots.delete(snapshot_id) -> None
+- client.gpu_droplets.snapshots.retrieve(snapshot_id) -> SnapshotRetrieveResponse
+- client.gpu_droplets.snapshots.list(\*\*params) -> SnapshotListResponse
+- client.gpu_droplets.snapshots.delete(snapshot_id) -> None
-# Volumes
+## Volumes
Types:
```python
-from do_gradientai.types import VolumeCreateResponse, VolumeRetrieveResponse, VolumeListResponse
+from do_gradientai.types.gpu_droplets import (
+ VolumeCreateResponse,
+ VolumeRetrieveResponse,
+ VolumeListResponse,
+)
```
Methods:
-- client.volumes.create(\*\*params) -> VolumeCreateResponse
-- client.volumes.retrieve(volume_id) -> VolumeRetrieveResponse
-- client.volumes.list(\*\*params) -> VolumeListResponse
-- client.volumes.delete(volume_id) -> None
-- client.volumes.delete_by_name(\*\*params) -> None
+- client.gpu_droplets.volumes.create(\*\*params) -> VolumeCreateResponse
+- client.gpu_droplets.volumes.retrieve(volume_id) -> VolumeRetrieveResponse
+- client.gpu_droplets.volumes.list(\*\*params) -> VolumeListResponse
+- client.gpu_droplets.volumes.delete(volume_id) -> None
+- client.gpu_droplets.volumes.delete_by_name(\*\*params) -> None
-## Actions
+### Actions
Types:
```python
-from do_gradientai.types.volumes import (
+from do_gradientai.types.gpu_droplets.volumes import (
VolumeAction,
ActionRetrieveResponse,
ActionListResponse,
@@ -828,17 +829,17 @@ from do_gradientai.types.volumes import (
Methods:
-- client.volumes.actions.retrieve(action_id, \*, volume_id, \*\*params) -> ActionRetrieveResponse
-- client.volumes.actions.list(volume_id, \*\*params) -> ActionListResponse
-- client.volumes.actions.initiate_by_id(volume_id, \*\*params) -> ActionInitiateByIDResponse
-- client.volumes.actions.initiate_by_name(\*\*params) -> ActionInitiateByNameResponse
+- client.gpu_droplets.volumes.actions.retrieve(action_id, \*, volume_id, \*\*params) -> ActionRetrieveResponse
+- client.gpu_droplets.volumes.actions.list(volume_id, \*\*params) -> ActionListResponse
+- client.gpu_droplets.volumes.actions.initiate_by_id(volume_id, \*\*params) -> ActionInitiateByIDResponse
+- client.gpu_droplets.volumes.actions.initiate_by_name(\*\*params) -> ActionInitiateByNameResponse
-## Snapshots
+### Snapshots
Types:
```python
-from do_gradientai.types.volumes import (
+from do_gradientai.types.gpu_droplets.volumes import (
SnapshotCreateResponse,
SnapshotRetrieveResponse,
SnapshotListResponse,
@@ -847,29 +848,19 @@ from do_gradientai.types.volumes import (
Methods:
-- client.volumes.snapshots.create(volume_id, \*\*params) -> SnapshotCreateResponse
-- client.volumes.snapshots.retrieve(snapshot_id) -> SnapshotRetrieveResponse
-- client.volumes.snapshots.list(volume_id, \*\*params) -> SnapshotListResponse
-- client.volumes.snapshots.delete(snapshot_id) -> None
-
-# Account
-
-Types:
-
-```python
-from do_gradientai.types import AccountRetrieveResponse
-```
-
-Methods:
+- client.gpu_droplets.volumes.snapshots.create(volume_id, \*\*params) -> SnapshotCreateResponse
+- client.gpu_droplets.volumes.snapshots.retrieve(snapshot_id) -> SnapshotRetrieveResponse
+- client.gpu_droplets.volumes.snapshots.list(volume_id, \*\*params) -> SnapshotListResponse
+- client.gpu_droplets.volumes.snapshots.delete(snapshot_id) -> None
-- client.account.retrieve() -> AccountRetrieveResponse
+## Account
-## Keys
+### Keys
Types:
```python
-from do_gradientai.types.account import (
+from do_gradientai.types.gpu_droplets.account import (
KeyCreateResponse,
KeyRetrieveResponse,
KeyUpdateResponse,
@@ -879,8 +870,8 @@ from do_gradientai.types.account import (
Methods:
-- client.account.keys.create(\*\*params) -> KeyCreateResponse
-- client.account.keys.retrieve(ssh_key_identifier) -> KeyRetrieveResponse
-- client.account.keys.update(ssh_key_identifier, \*\*params) -> KeyUpdateResponse
-- client.account.keys.list(\*\*params) -> KeyListResponse
-- client.account.keys.delete(ssh_key_identifier) -> None
+- client.gpu_droplets.account.keys.create(\*\*params) -> KeyCreateResponse
+- client.gpu_droplets.account.keys.retrieve(ssh_key_identifier) -> KeyRetrieveResponse
+- client.gpu_droplets.account.keys.update(ssh_key_identifier, \*\*params) -> KeyUpdateResponse
+- client.gpu_droplets.account.keys.list(\*\*params) -> KeyListResponse
+- client.gpu_droplets.account.keys.delete(ssh_key_identifier) -> None
diff --git a/pyproject.toml b/pyproject.toml
index f5e5770a..23570f42 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "c63a5cfe-b235-4fbe-8bbb-82a9e02a482a-python"
-version = "0.1.0-alpha.14"
+version = "0.1.0-alpha.15"
description = "The official Python library for GradientAI"
dynamic = ["readme"]
license = "Apache-2.0"
diff --git a/src/do_gradientai/_client.py b/src/do_gradientai/_client.py
index 43bcc2c4..27287ad9 100644
--- a/src/do_gradientai/_client.py
+++ b/src/do_gradientai/_client.py
@@ -34,45 +34,63 @@
if TYPE_CHECKING:
from .resources import (
chat,
- sizes,
agents,
- images,
models,
- account,
regions,
+ inference,
+ gpu_droplets,
+ knowledge_bases,
+ )
+ from .resources.regions import RegionsResource, AsyncRegionsResource
+ from .resources.chat.chat import ChatResource, AsyncChatResource
+ from .resources.gpu_droplets import (
+ GPUDropletsResource,
+ AsyncGPUDropletsResource,
+ sizes,
+ images,
+ account,
volumes,
- droplets,
firewalls,
- inference,
snapshots,
floating_ips,
load_balancers,
- knowledge_bases,
)
- from .resources.sizes import SizesResource, AsyncSizesResource
- from .resources.regions import RegionsResource, AsyncRegionsResource
- from .resources.chat.chat import ChatResource, AsyncChatResource
- from .resources.snapshots import SnapshotsResource, AsyncSnapshotsResource
from .resources.agents.agents import AgentsResource, AsyncAgentsResource
- from .resources.images.images import ImagesResource, AsyncImagesResource
from .resources.models.models import ModelsResource, AsyncModelsResource
- from .resources.account.account import AccountResource, AsyncAccountResource
- from .resources.volumes.volumes import VolumesResource, AsyncVolumesResource
- from .resources.droplets.droplets import DropletsResource, AsyncDropletsResource
- from .resources.firewalls.firewalls import FirewallsResource, AsyncFirewallsResource
+ from .resources.gpu_droplets.sizes import SizesResource, AsyncSizesResource
from .resources.inference.inference import InferenceResource, AsyncInferenceResource
- from .resources.floating_ips.floating_ips import (
- FloatingIPsResource,
- AsyncFloatingIPsResource,
+ from .resources.gpu_droplets.snapshots import (
+ SnapshotsResource,
+ AsyncSnapshotsResource,
)
- from .resources.load_balancers.load_balancers import (
- LoadBalancersResource,
- AsyncLoadBalancersResource,
+ from .resources.gpu_droplets.images.images import (
+ ImagesResource,
+ AsyncImagesResource,
+ )
+ from .resources.gpu_droplets.account.account import (
+ AccountResource,
+ AsyncAccountResource,
+ )
+ from .resources.gpu_droplets.volumes.volumes import (
+ VolumesResource,
+ AsyncVolumesResource,
)
from .resources.knowledge_bases.knowledge_bases import (
KnowledgeBasesResource,
AsyncKnowledgeBasesResource,
)
+ from .resources.gpu_droplets.firewalls.firewalls import (
+ FirewallsResource,
+ AsyncFirewallsResource,
+ )
+ from .resources.gpu_droplets.floating_ips.floating_ips import (
+ FloatingIPsResource,
+ AsyncFloatingIPsResource,
+ )
+ from .resources.gpu_droplets.load_balancers.load_balancers import (
+ LoadBalancersResource,
+ AsyncLoadBalancersResource,
+ )
__all__ = [
"Timeout",
@@ -209,56 +227,56 @@ def models(self) -> ModelsResource:
return ModelsResource(self)
@cached_property
- def droplets(self) -> DropletsResource:
- from .resources.droplets import DropletsResource
+ def gpu_droplets(self) -> GPUDropletsResource:
+ from .resources.gpu_droplets import GPUDropletsResource
- return DropletsResource(self)
+ return GPUDropletsResource(self)
@cached_property
def firewalls(self) -> FirewallsResource:
- from .resources.firewalls import FirewallsResource
+ from .resources.gpu_droplets.firewalls import FirewallsResource
return FirewallsResource(self)
@cached_property
def floating_ips(self) -> FloatingIPsResource:
- from .resources.floating_ips import FloatingIPsResource
+ from .resources.gpu_droplets.floating_ips import FloatingIPsResource
return FloatingIPsResource(self)
@cached_property
def images(self) -> ImagesResource:
- from .resources.images import ImagesResource
+ from .resources.gpu_droplets.images import ImagesResource
return ImagesResource(self)
@cached_property
def load_balancers(self) -> LoadBalancersResource:
- from .resources.load_balancers import LoadBalancersResource
+ from .resources.gpu_droplets.load_balancers import LoadBalancersResource
return LoadBalancersResource(self)
@cached_property
def sizes(self) -> SizesResource:
- from .resources.sizes import SizesResource
+ from .resources.gpu_droplets.sizes import SizesResource
return SizesResource(self)
@cached_property
def snapshots(self) -> SnapshotsResource:
- from .resources.snapshots import SnapshotsResource
+ from .resources.gpu_droplets.snapshots import SnapshotsResource
return SnapshotsResource(self)
@cached_property
def volumes(self) -> VolumesResource:
- from .resources.volumes import VolumesResource
+ from .resources.gpu_droplets.volumes import VolumesResource
return VolumesResource(self)
@cached_property
def account(self) -> AccountResource:
- from .resources.account import AccountResource
+ from .resources.gpu_droplets.account import AccountResource
return AccountResource(self)
@@ -294,9 +312,7 @@ def default_headers(self) -> dict[str, str | Omit]:
@override
def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
- if (self.api_key or self.agent_key or self.inference_key) and headers.get(
- "Authorization"
- ):
+ if (self.api_key or self.agent_key or self.inference_key) and headers.get("Authorization"):
return
if isinstance(custom_headers.get("Authorization"), Omit):
return
@@ -326,14 +342,10 @@ def copy(
Create a new client instance re-using the same options given to the current client with optional overriding.
"""
if default_headers is not None and set_default_headers is not None:
- raise ValueError(
- "The `default_headers` and `set_default_headers` arguments are mutually exclusive"
- )
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
if default_query is not None and set_default_query is not None:
- raise ValueError(
- "The `default_query` and `set_default_query` arguments are mutually exclusive"
- )
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
headers = self._custom_headers
if default_headers is not None:
@@ -380,14 +392,10 @@ def _make_status_error(
return _exceptions.BadRequestError(err_msg, response=response, body=body)
if response.status_code == 401:
- return _exceptions.AuthenticationError(
- err_msg, response=response, body=body
- )
+ return _exceptions.AuthenticationError(err_msg, response=response, body=body)
if response.status_code == 403:
- return _exceptions.PermissionDeniedError(
- err_msg, response=response, body=body
- )
+ return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
if response.status_code == 404:
return _exceptions.NotFoundError(err_msg, response=response, body=body)
@@ -396,17 +404,13 @@ def _make_status_error(
return _exceptions.ConflictError(err_msg, response=response, body=body)
if response.status_code == 422:
- return _exceptions.UnprocessableEntityError(
- err_msg, response=response, body=body
- )
+ return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
if response.status_code == 429:
return _exceptions.RateLimitError(err_msg, response=response, body=body)
if response.status_code >= 500:
- return _exceptions.InternalServerError(
- err_msg, response=response, body=body
- )
+ return _exceptions.InternalServerError(err_msg, response=response, body=body)
return APIStatusError(err_msg, response=response, body=body)
@@ -533,56 +537,56 @@ def models(self) -> AsyncModelsResource:
return AsyncModelsResource(self)
@cached_property
- def droplets(self) -> AsyncDropletsResource:
- from .resources.droplets import AsyncDropletsResource
+ def gpu_droplets(self) -> AsyncGPUDropletsResource:
+ from .resources.gpu_droplets import AsyncGPUDropletsResource
- return AsyncDropletsResource(self)
+ return AsyncGPUDropletsResource(self)
@cached_property
def firewalls(self) -> AsyncFirewallsResource:
- from .resources.firewalls import AsyncFirewallsResource
+ from .resources.gpu_droplets.firewalls import AsyncFirewallsResource
return AsyncFirewallsResource(self)
@cached_property
def floating_ips(self) -> AsyncFloatingIPsResource:
- from .resources.floating_ips import AsyncFloatingIPsResource
+ from .resources.gpu_droplets.floating_ips import AsyncFloatingIPsResource
return AsyncFloatingIPsResource(self)
@cached_property
def images(self) -> AsyncImagesResource:
- from .resources.images import AsyncImagesResource
+ from .resources.gpu_droplets.images import AsyncImagesResource
return AsyncImagesResource(self)
@cached_property
def load_balancers(self) -> AsyncLoadBalancersResource:
- from .resources.load_balancers import AsyncLoadBalancersResource
+ from .resources.gpu_droplets.load_balancers import AsyncLoadBalancersResource
return AsyncLoadBalancersResource(self)
@cached_property
def sizes(self) -> AsyncSizesResource:
- from .resources.sizes import AsyncSizesResource
+ from .resources.gpu_droplets.sizes import AsyncSizesResource
return AsyncSizesResource(self)
@cached_property
def snapshots(self) -> AsyncSnapshotsResource:
- from .resources.snapshots import AsyncSnapshotsResource
+ from .resources.gpu_droplets.snapshots import AsyncSnapshotsResource
return AsyncSnapshotsResource(self)
@cached_property
def volumes(self) -> AsyncVolumesResource:
- from .resources.volumes import AsyncVolumesResource
+ from .resources.gpu_droplets.volumes import AsyncVolumesResource
return AsyncVolumesResource(self)
@cached_property
def account(self) -> AsyncAccountResource:
- from .resources.account import AsyncAccountResource
+ from .resources.gpu_droplets.account import AsyncAccountResource
return AsyncAccountResource(self)
@@ -618,9 +622,7 @@ def default_headers(self) -> dict[str, str | Omit]:
@override
def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None:
- if (self.api_key or self.agent_key or self.inference_key) and headers.get(
- "Authorization"
- ):
+ if (self.api_key or self.agent_key or self.inference_key) and headers.get("Authorization"):
return
if isinstance(custom_headers.get("Authorization"), Omit):
return
@@ -650,14 +652,10 @@ def copy(
Create a new client instance re-using the same options given to the current client with optional overriding.
"""
if default_headers is not None and set_default_headers is not None:
- raise ValueError(
- "The `default_headers` and `set_default_headers` arguments are mutually exclusive"
- )
+ raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive")
if default_query is not None and set_default_query is not None:
- raise ValueError(
- "The `default_query` and `set_default_query` arguments are mutually exclusive"
- )
+ raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive")
headers = self._custom_headers
if default_headers is not None:
@@ -704,14 +702,10 @@ def _make_status_error(
return _exceptions.BadRequestError(err_msg, response=response, body=body)
if response.status_code == 401:
- return _exceptions.AuthenticationError(
- err_msg, response=response, body=body
- )
+ return _exceptions.AuthenticationError(err_msg, response=response, body=body)
if response.status_code == 403:
- return _exceptions.PermissionDeniedError(
- err_msg, response=response, body=body
- )
+ return _exceptions.PermissionDeniedError(err_msg, response=response, body=body)
if response.status_code == 404:
return _exceptions.NotFoundError(err_msg, response=response, body=body)
@@ -720,17 +714,13 @@ def _make_status_error(
return _exceptions.ConflictError(err_msg, response=response, body=body)
if response.status_code == 422:
- return _exceptions.UnprocessableEntityError(
- err_msg, response=response, body=body
- )
+ return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body)
if response.status_code == 429:
return _exceptions.RateLimitError(err_msg, response=response, body=body)
if response.status_code >= 500:
- return _exceptions.InternalServerError(
- err_msg, response=response, body=body
- )
+ return _exceptions.InternalServerError(err_msg, response=response, body=body)
return APIStatusError(err_msg, response=response, body=body)
@@ -777,56 +767,60 @@ def models(self) -> models.ModelsResourceWithRawResponse:
return ModelsResourceWithRawResponse(self._client.models)
@cached_property
- def droplets(self) -> droplets.DropletsResourceWithRawResponse:
- from .resources.droplets import DropletsResourceWithRawResponse
+ def gpu_droplets(self) -> gpu_droplets.GPUDropletsResourceWithRawResponse:
+ from .resources.gpu_droplets import GPUDropletsResourceWithRawResponse
- return DropletsResourceWithRawResponse(self._client.droplets)
+ return GPUDropletsResourceWithRawResponse(self._client.gpu_droplets)
@cached_property
def firewalls(self) -> firewalls.FirewallsResourceWithRawResponse:
- from .resources.firewalls import FirewallsResourceWithRawResponse
+ from .resources.gpu_droplets.firewalls import FirewallsResourceWithRawResponse
return FirewallsResourceWithRawResponse(self._client.firewalls)
@cached_property
def floating_ips(self) -> floating_ips.FloatingIPsResourceWithRawResponse:
- from .resources.floating_ips import FloatingIPsResourceWithRawResponse
+ from .resources.gpu_droplets.floating_ips import (
+ FloatingIPsResourceWithRawResponse,
+ )
return FloatingIPsResourceWithRawResponse(self._client.floating_ips)
@cached_property
def images(self) -> images.ImagesResourceWithRawResponse:
- from .resources.images import ImagesResourceWithRawResponse
+ from .resources.gpu_droplets.images import ImagesResourceWithRawResponse
return ImagesResourceWithRawResponse(self._client.images)
@cached_property
def load_balancers(self) -> load_balancers.LoadBalancersResourceWithRawResponse:
- from .resources.load_balancers import LoadBalancersResourceWithRawResponse
+ from .resources.gpu_droplets.load_balancers import (
+ LoadBalancersResourceWithRawResponse,
+ )
return LoadBalancersResourceWithRawResponse(self._client.load_balancers)
@cached_property
def sizes(self) -> sizes.SizesResourceWithRawResponse:
- from .resources.sizes import SizesResourceWithRawResponse
+ from .resources.gpu_droplets.sizes import SizesResourceWithRawResponse
return SizesResourceWithRawResponse(self._client.sizes)
@cached_property
def snapshots(self) -> snapshots.SnapshotsResourceWithRawResponse:
- from .resources.snapshots import SnapshotsResourceWithRawResponse
+ from .resources.gpu_droplets.snapshots import SnapshotsResourceWithRawResponse
return SnapshotsResourceWithRawResponse(self._client.snapshots)
@cached_property
def volumes(self) -> volumes.VolumesResourceWithRawResponse:
- from .resources.volumes import VolumesResourceWithRawResponse
+ from .resources.gpu_droplets.volumes import VolumesResourceWithRawResponse
return VolumesResourceWithRawResponse(self._client.volumes)
@cached_property
def account(self) -> account.AccountResourceWithRawResponse:
- from .resources.account import AccountResourceWithRawResponse
+ from .resources.gpu_droplets.account import AccountResourceWithRawResponse
return AccountResourceWithRawResponse(self._client.account)
@@ -878,26 +872,30 @@ def models(self) -> models.AsyncModelsResourceWithRawResponse:
return AsyncModelsResourceWithRawResponse(self._client.models)
@cached_property
- def droplets(self) -> droplets.AsyncDropletsResourceWithRawResponse:
- from .resources.droplets import AsyncDropletsResourceWithRawResponse
+ def gpu_droplets(self) -> gpu_droplets.AsyncGPUDropletsResourceWithRawResponse:
+ from .resources.gpu_droplets import AsyncGPUDropletsResourceWithRawResponse
- return AsyncDropletsResourceWithRawResponse(self._client.droplets)
+ return AsyncGPUDropletsResourceWithRawResponse(self._client.gpu_droplets)
@cached_property
def firewalls(self) -> firewalls.AsyncFirewallsResourceWithRawResponse:
- from .resources.firewalls import AsyncFirewallsResourceWithRawResponse
+ from .resources.gpu_droplets.firewalls import (
+ AsyncFirewallsResourceWithRawResponse,
+ )
return AsyncFirewallsResourceWithRawResponse(self._client.firewalls)
@cached_property
def floating_ips(self) -> floating_ips.AsyncFloatingIPsResourceWithRawResponse:
- from .resources.floating_ips import AsyncFloatingIPsResourceWithRawResponse
+ from .resources.gpu_droplets.floating_ips import (
+ AsyncFloatingIPsResourceWithRawResponse,
+ )
return AsyncFloatingIPsResourceWithRawResponse(self._client.floating_ips)
@cached_property
def images(self) -> images.AsyncImagesResourceWithRawResponse:
- from .resources.images import AsyncImagesResourceWithRawResponse
+ from .resources.gpu_droplets.images import AsyncImagesResourceWithRawResponse
return AsyncImagesResourceWithRawResponse(self._client.images)
@@ -905,31 +903,35 @@ def images(self) -> images.AsyncImagesResourceWithRawResponse:
def load_balancers(
self,
) -> load_balancers.AsyncLoadBalancersResourceWithRawResponse:
- from .resources.load_balancers import AsyncLoadBalancersResourceWithRawResponse
+ from .resources.gpu_droplets.load_balancers import (
+ AsyncLoadBalancersResourceWithRawResponse,
+ )
return AsyncLoadBalancersResourceWithRawResponse(self._client.load_balancers)
@cached_property
def sizes(self) -> sizes.AsyncSizesResourceWithRawResponse:
- from .resources.sizes import AsyncSizesResourceWithRawResponse
+ from .resources.gpu_droplets.sizes import AsyncSizesResourceWithRawResponse
return AsyncSizesResourceWithRawResponse(self._client.sizes)
@cached_property
def snapshots(self) -> snapshots.AsyncSnapshotsResourceWithRawResponse:
- from .resources.snapshots import AsyncSnapshotsResourceWithRawResponse
+ from .resources.gpu_droplets.snapshots import (
+ AsyncSnapshotsResourceWithRawResponse,
+ )
return AsyncSnapshotsResourceWithRawResponse(self._client.snapshots)
@cached_property
def volumes(self) -> volumes.AsyncVolumesResourceWithRawResponse:
- from .resources.volumes import AsyncVolumesResourceWithRawResponse
+ from .resources.gpu_droplets.volumes import AsyncVolumesResourceWithRawResponse
return AsyncVolumesResourceWithRawResponse(self._client.volumes)
@cached_property
def account(self) -> account.AsyncAccountResourceWithRawResponse:
- from .resources.account import AsyncAccountResourceWithRawResponse
+ from .resources.gpu_droplets.account import AsyncAccountResourceWithRawResponse
return AsyncAccountResourceWithRawResponse(self._client.account)
@@ -981,26 +983,30 @@ def models(self) -> models.ModelsResourceWithStreamingResponse:
return ModelsResourceWithStreamingResponse(self._client.models)
@cached_property
- def droplets(self) -> droplets.DropletsResourceWithStreamingResponse:
- from .resources.droplets import DropletsResourceWithStreamingResponse
+ def droplets(self) -> gpu_droplets.GPUDropletsResourceWithStreamingResponse:
+ from .resources.gpu_droplets import GPUDropletsResourceWithStreamingResponse
- return DropletsResourceWithStreamingResponse(self._client.droplets)
+ return GPUDropletsResourceWithStreamingResponse(self._client.gpu_droplets)
@cached_property
def firewalls(self) -> firewalls.FirewallsResourceWithStreamingResponse:
- from .resources.firewalls import FirewallsResourceWithStreamingResponse
+ from .resources.gpu_droplets.firewalls import (
+ FirewallsResourceWithStreamingResponse,
+ )
return FirewallsResourceWithStreamingResponse(self._client.firewalls)
@cached_property
def floating_ips(self) -> floating_ips.FloatingIPsResourceWithStreamingResponse:
- from .resources.floating_ips import FloatingIPsResourceWithStreamingResponse
+ from .resources.gpu_droplets.floating_ips import (
+ FloatingIPsResourceWithStreamingResponse,
+ )
return FloatingIPsResourceWithStreamingResponse(self._client.floating_ips)
@cached_property
def images(self) -> images.ImagesResourceWithStreamingResponse:
- from .resources.images import ImagesResourceWithStreamingResponse
+ from .resources.gpu_droplets.images import ImagesResourceWithStreamingResponse
return ImagesResourceWithStreamingResponse(self._client.images)
@@ -1008,31 +1014,35 @@ def images(self) -> images.ImagesResourceWithStreamingResponse:
def load_balancers(
self,
) -> load_balancers.LoadBalancersResourceWithStreamingResponse:
- from .resources.load_balancers import LoadBalancersResourceWithStreamingResponse
+ from .resources.gpu_droplets.load_balancers import (
+ LoadBalancersResourceWithStreamingResponse,
+ )
return LoadBalancersResourceWithStreamingResponse(self._client.load_balancers)
@cached_property
def sizes(self) -> sizes.SizesResourceWithStreamingResponse:
- from .resources.sizes import SizesResourceWithStreamingResponse
+ from .resources.gpu_droplets.sizes import SizesResourceWithStreamingResponse
return SizesResourceWithStreamingResponse(self._client.sizes)
@cached_property
def snapshots(self) -> snapshots.SnapshotsResourceWithStreamingResponse:
- from .resources.snapshots import SnapshotsResourceWithStreamingResponse
+ from .resources.gpu_droplets.snapshots import (
+ SnapshotsResourceWithStreamingResponse,
+ )
return SnapshotsResourceWithStreamingResponse(self._client.snapshots)
@cached_property
def volumes(self) -> volumes.VolumesResourceWithStreamingResponse:
- from .resources.volumes import VolumesResourceWithStreamingResponse
+ from .resources.gpu_droplets.volumes import VolumesResourceWithStreamingResponse
return VolumesResourceWithStreamingResponse(self._client.volumes)
@cached_property
def account(self) -> account.AccountResourceWithStreamingResponse:
- from .resources.account import AccountResourceWithStreamingResponse
+ from .resources.gpu_droplets.account import AccountResourceWithStreamingResponse
return AccountResourceWithStreamingResponse(self._client.account)
@@ -1069,9 +1079,7 @@ def knowledge_bases(
AsyncKnowledgeBasesResourceWithStreamingResponse,
)
- return AsyncKnowledgeBasesResourceWithStreamingResponse(
- self._client.knowledge_bases
- )
+ return AsyncKnowledgeBasesResourceWithStreamingResponse(self._client.knowledge_bases)
@cached_property
def inference(self) -> inference.AsyncInferenceResourceWithStreamingResponse:
@@ -1086,14 +1094,20 @@ def models(self) -> models.AsyncModelsResourceWithStreamingResponse:
return AsyncModelsResourceWithStreamingResponse(self._client.models)
@cached_property
- def droplets(self) -> droplets.AsyncDropletsResourceWithStreamingResponse:
- from .resources.droplets import AsyncDropletsResourceWithStreamingResponse
+ def gpu_droplets(
+ self,
+ ) -> gpu_droplets.AsyncGPUDropletsResourceWithStreamingResponse:
+ from .resources.gpu_droplets import (
+ AsyncGPUDropletsResourceWithStreamingResponse,
+ )
- return AsyncDropletsResourceWithStreamingResponse(self._client.droplets)
+ return AsyncGPUDropletsResourceWithStreamingResponse(self._client.gpu_droplets)
@cached_property
def firewalls(self) -> firewalls.AsyncFirewallsResourceWithStreamingResponse:
- from .resources.firewalls import AsyncFirewallsResourceWithStreamingResponse
+ from .resources.gpu_droplets.firewalls import (
+ AsyncFirewallsResourceWithStreamingResponse,
+ )
return AsyncFirewallsResourceWithStreamingResponse(self._client.firewalls)
@@ -1101,7 +1115,7 @@ def firewalls(self) -> firewalls.AsyncFirewallsResourceWithStreamingResponse:
def floating_ips(
self,
) -> floating_ips.AsyncFloatingIPsResourceWithStreamingResponse:
- from .resources.floating_ips import (
+ from .resources.gpu_droplets.floating_ips import (
AsyncFloatingIPsResourceWithStreamingResponse,
)
@@ -1109,7 +1123,9 @@ def floating_ips(
@cached_property
def images(self) -> images.AsyncImagesResourceWithStreamingResponse:
- from .resources.images import AsyncImagesResourceWithStreamingResponse
+ from .resources.gpu_droplets.images import (
+ AsyncImagesResourceWithStreamingResponse,
+ )
return AsyncImagesResourceWithStreamingResponse(self._client.images)
@@ -1117,35 +1133,41 @@ def images(self) -> images.AsyncImagesResourceWithStreamingResponse:
def load_balancers(
self,
) -> load_balancers.AsyncLoadBalancersResourceWithStreamingResponse:
- from .resources.load_balancers import (
+ from .resources.gpu_droplets.load_balancers import (
AsyncLoadBalancersResourceWithStreamingResponse,
)
- return AsyncLoadBalancersResourceWithStreamingResponse(
- self._client.load_balancers
- )
+ return AsyncLoadBalancersResourceWithStreamingResponse(self._client.load_balancers)
@cached_property
def sizes(self) -> sizes.AsyncSizesResourceWithStreamingResponse:
- from .resources.sizes import AsyncSizesResourceWithStreamingResponse
+ from .resources.gpu_droplets.sizes import (
+ AsyncSizesResourceWithStreamingResponse,
+ )
return AsyncSizesResourceWithStreamingResponse(self._client.sizes)
@cached_property
def snapshots(self) -> snapshots.AsyncSnapshotsResourceWithStreamingResponse:
- from .resources.snapshots import AsyncSnapshotsResourceWithStreamingResponse
+ from .resources.gpu_droplets.snapshots import (
+ AsyncSnapshotsResourceWithStreamingResponse,
+ )
return AsyncSnapshotsResourceWithStreamingResponse(self._client.snapshots)
@cached_property
def volumes(self) -> volumes.AsyncVolumesResourceWithStreamingResponse:
- from .resources.volumes import AsyncVolumesResourceWithStreamingResponse
+ from .resources.gpu_droplets.volumes import (
+ AsyncVolumesResourceWithStreamingResponse,
+ )
return AsyncVolumesResourceWithStreamingResponse(self._client.volumes)
@cached_property
def account(self) -> account.AsyncAccountResourceWithStreamingResponse:
- from .resources.account import AsyncAccountResourceWithStreamingResponse
+ from .resources.gpu_droplets.account import (
+ AsyncAccountResourceWithStreamingResponse,
+ )
return AsyncAccountResourceWithStreamingResponse(self._client.account)
diff --git a/src/do_gradientai/_version.py b/src/do_gradientai/_version.py
index d69cef74..d0c1c939 100644
--- a/src/do_gradientai/_version.py
+++ b/src/do_gradientai/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "do_gradientai"
-__version__ = "0.1.0-alpha.14" # x-release-please-version
+__version__ = "0.1.0-alpha.15" # x-release-please-version
diff --git a/src/do_gradientai/resources/__init__.py b/src/do_gradientai/resources/__init__.py
index 29fcc7e9..fd6da608 100644
--- a/src/do_gradientai/resources/__init__.py
+++ b/src/do_gradientai/resources/__init__.py
@@ -8,14 +8,6 @@
ChatResourceWithStreamingResponse,
AsyncChatResourceWithStreamingResponse,
)
-from .sizes import (
- SizesResource,
- AsyncSizesResource,
- SizesResourceWithRawResponse,
- AsyncSizesResourceWithRawResponse,
- SizesResourceWithStreamingResponse,
- AsyncSizesResourceWithStreamingResponse,
-)
from .agents import (
AgentsResource,
AsyncAgentsResource,
@@ -24,14 +16,6 @@
AgentsResourceWithStreamingResponse,
AsyncAgentsResourceWithStreamingResponse,
)
-from .images import (
- ImagesResource,
- AsyncImagesResource,
- ImagesResourceWithRawResponse,
- AsyncImagesResourceWithRawResponse,
- ImagesResourceWithStreamingResponse,
- AsyncImagesResourceWithStreamingResponse,
-)
from .models import (
ModelsResource,
AsyncModelsResource,
@@ -40,14 +24,6 @@
ModelsResourceWithStreamingResponse,
AsyncModelsResourceWithStreamingResponse,
)
-from .account import (
- AccountResource,
- AsyncAccountResource,
- AccountResourceWithRawResponse,
- AsyncAccountResourceWithRawResponse,
- AccountResourceWithStreamingResponse,
- AsyncAccountResourceWithStreamingResponse,
-)
from .regions import (
RegionsResource,
AsyncRegionsResource,
@@ -56,30 +32,6 @@
RegionsResourceWithStreamingResponse,
AsyncRegionsResourceWithStreamingResponse,
)
-from .volumes import (
- VolumesResource,
- AsyncVolumesResource,
- VolumesResourceWithRawResponse,
- AsyncVolumesResourceWithRawResponse,
- VolumesResourceWithStreamingResponse,
- AsyncVolumesResourceWithStreamingResponse,
-)
-from .droplets import (
- DropletsResource,
- AsyncDropletsResource,
- DropletsResourceWithRawResponse,
- AsyncDropletsResourceWithRawResponse,
- DropletsResourceWithStreamingResponse,
- AsyncDropletsResourceWithStreamingResponse,
-)
-from .firewalls import (
- FirewallsResource,
- AsyncFirewallsResource,
- FirewallsResourceWithRawResponse,
- AsyncFirewallsResourceWithRawResponse,
- FirewallsResourceWithStreamingResponse,
- AsyncFirewallsResourceWithStreamingResponse,
-)
from .inference import (
InferenceResource,
AsyncInferenceResource,
@@ -88,29 +40,13 @@
InferenceResourceWithStreamingResponse,
AsyncInferenceResourceWithStreamingResponse,
)
-from .snapshots import (
- SnapshotsResource,
- AsyncSnapshotsResource,
- SnapshotsResourceWithRawResponse,
- AsyncSnapshotsResourceWithRawResponse,
- SnapshotsResourceWithStreamingResponse,
- AsyncSnapshotsResourceWithStreamingResponse,
-)
-from .floating_ips import (
- FloatingIPsResource,
- AsyncFloatingIPsResource,
- FloatingIPsResourceWithRawResponse,
- AsyncFloatingIPsResourceWithRawResponse,
- FloatingIPsResourceWithStreamingResponse,
- AsyncFloatingIPsResourceWithStreamingResponse,
-)
-from .load_balancers import (
- LoadBalancersResource,
- AsyncLoadBalancersResource,
- LoadBalancersResourceWithRawResponse,
- AsyncLoadBalancersResourceWithRawResponse,
- LoadBalancersResourceWithStreamingResponse,
- AsyncLoadBalancersResourceWithStreamingResponse,
+from .gpu_droplets import (
+ GPUDropletsResource,
+ AsyncGPUDropletsResource,
+ GPUDropletsResourceWithRawResponse,
+ AsyncGPUDropletsResourceWithRawResponse,
+ GPUDropletsResourceWithStreamingResponse,
+ AsyncGPUDropletsResourceWithStreamingResponse,
)
from .knowledge_bases import (
KnowledgeBasesResource,
@@ -158,58 +94,10 @@
"AsyncModelsResourceWithRawResponse",
"ModelsResourceWithStreamingResponse",
"AsyncModelsResourceWithStreamingResponse",
- "DropletsResource",
- "AsyncDropletsResource",
- "DropletsResourceWithRawResponse",
- "AsyncDropletsResourceWithRawResponse",
- "DropletsResourceWithStreamingResponse",
- "AsyncDropletsResourceWithStreamingResponse",
- "FirewallsResource",
- "AsyncFirewallsResource",
- "FirewallsResourceWithRawResponse",
- "AsyncFirewallsResourceWithRawResponse",
- "FirewallsResourceWithStreamingResponse",
- "AsyncFirewallsResourceWithStreamingResponse",
- "FloatingIPsResource",
- "AsyncFloatingIPsResource",
- "FloatingIPsResourceWithRawResponse",
- "AsyncFloatingIPsResourceWithRawResponse",
- "FloatingIPsResourceWithStreamingResponse",
- "AsyncFloatingIPsResourceWithStreamingResponse",
- "ImagesResource",
- "AsyncImagesResource",
- "ImagesResourceWithRawResponse",
- "AsyncImagesResourceWithRawResponse",
- "ImagesResourceWithStreamingResponse",
- "AsyncImagesResourceWithStreamingResponse",
- "LoadBalancersResource",
- "AsyncLoadBalancersResource",
- "LoadBalancersResourceWithRawResponse",
- "AsyncLoadBalancersResourceWithRawResponse",
- "LoadBalancersResourceWithStreamingResponse",
- "AsyncLoadBalancersResourceWithStreamingResponse",
- "SizesResource",
- "AsyncSizesResource",
- "SizesResourceWithRawResponse",
- "AsyncSizesResourceWithRawResponse",
- "SizesResourceWithStreamingResponse",
- "AsyncSizesResourceWithStreamingResponse",
- "SnapshotsResource",
- "AsyncSnapshotsResource",
- "SnapshotsResourceWithRawResponse",
- "AsyncSnapshotsResourceWithRawResponse",
- "SnapshotsResourceWithStreamingResponse",
- "AsyncSnapshotsResourceWithStreamingResponse",
- "VolumesResource",
- "AsyncVolumesResource",
- "VolumesResourceWithRawResponse",
- "AsyncVolumesResourceWithRawResponse",
- "VolumesResourceWithStreamingResponse",
- "AsyncVolumesResourceWithStreamingResponse",
- "AccountResource",
- "AsyncAccountResource",
- "AccountResourceWithRawResponse",
- "AsyncAccountResourceWithRawResponse",
- "AccountResourceWithStreamingResponse",
- "AsyncAccountResourceWithStreamingResponse",
+ "GPUDropletsResource",
+ "AsyncGPUDropletsResource",
+ "GPUDropletsResourceWithRawResponse",
+ "AsyncGPUDropletsResourceWithRawResponse",
+ "GPUDropletsResourceWithStreamingResponse",
+ "AsyncGPUDropletsResourceWithStreamingResponse",
]
diff --git a/src/do_gradientai/resources/agents/chat/completions.py b/src/do_gradientai/resources/agents/chat/completions.py
index 96a6d843..23b17011 100644
--- a/src/do_gradientai/resources/agents/chat/completions.py
+++ b/src/do_gradientai/resources/agents/chat/completions.py
@@ -62,9 +62,7 @@ def create(
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
- stream_options: (
- Optional[completion_create_params.StreamOptions] | NotGiven
- ) = NOT_GIVEN,
+ stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -192,9 +190,7 @@ def create(
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
- stream_options: (
- Optional[completion_create_params.StreamOptions] | NotGiven
- ) = NOT_GIVEN,
+ stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -322,9 +318,7 @@ def create(
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
- stream_options: (
- Optional[completion_create_params.StreamOptions] | NotGiven
- ) = NOT_GIVEN,
+ stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -455,9 +449,7 @@ def create(
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
- stream_options: (
- Optional[completion_create_params.StreamOptions] | NotGiven
- ) = NOT_GIVEN,
+ stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -561,9 +553,7 @@ async def create(
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN,
- stream_options: (
- Optional[completion_create_params.StreamOptions] | NotGiven
- ) = NOT_GIVEN,
+ stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -691,9 +681,7 @@ async def create(
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
- stream_options: (
- Optional[completion_create_params.StreamOptions] | NotGiven
- ) = NOT_GIVEN,
+ stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -821,9 +809,7 @@ async def create(
n: Optional[int] | NotGiven = NOT_GIVEN,
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
- stream_options: (
- Optional[completion_create_params.StreamOptions] | NotGiven
- ) = NOT_GIVEN,
+ stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
@@ -951,9 +937,7 @@ async def create(
presence_penalty: Optional[float] | NotGiven = NOT_GIVEN,
stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN,
stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN,
- stream_options: (
- Optional[completion_create_params.StreamOptions] | NotGiven
- ) = NOT_GIVEN,
+ stream_options: (Optional[completion_create_params.StreamOptions] | NotGiven) = NOT_GIVEN,
temperature: Optional[float] | NotGiven = NOT_GIVEN,
tool_choice: completion_create_params.ToolChoice | NotGiven = NOT_GIVEN,
tools: Iterable[completion_create_params.Tool] | NotGiven = NOT_GIVEN,
diff --git a/src/do_gradientai/resources/droplets/__init__.py b/src/do_gradientai/resources/droplets/__init__.py
deleted file mode 100644
index 284925dc..00000000
--- a/src/do_gradientai/resources/droplets/__init__.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from .actions import (
- ActionsResource,
- AsyncActionsResource,
- ActionsResourceWithRawResponse,
- AsyncActionsResourceWithRawResponse,
- ActionsResourceWithStreamingResponse,
- AsyncActionsResourceWithStreamingResponse,
-)
-from .backups import (
- BackupsResource,
- AsyncBackupsResource,
- BackupsResourceWithRawResponse,
- AsyncBackupsResourceWithRawResponse,
- BackupsResourceWithStreamingResponse,
- AsyncBackupsResourceWithStreamingResponse,
-)
-from .droplets import (
- DropletsResource,
- AsyncDropletsResource,
- DropletsResourceWithRawResponse,
- AsyncDropletsResourceWithRawResponse,
- DropletsResourceWithStreamingResponse,
- AsyncDropletsResourceWithStreamingResponse,
-)
-from .autoscale import (
- AutoscaleResource,
- AsyncAutoscaleResource,
- AutoscaleResourceWithRawResponse,
- AsyncAutoscaleResourceWithRawResponse,
- AutoscaleResourceWithStreamingResponse,
- AsyncAutoscaleResourceWithStreamingResponse,
-)
-from .destroy_with_associated_resources import (
- DestroyWithAssociatedResourcesResource,
- AsyncDestroyWithAssociatedResourcesResource,
- DestroyWithAssociatedResourcesResourceWithRawResponse,
- AsyncDestroyWithAssociatedResourcesResourceWithRawResponse,
- DestroyWithAssociatedResourcesResourceWithStreamingResponse,
- AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse,
-)
-
-__all__ = [
- "BackupsResource",
- "AsyncBackupsResource",
- "BackupsResourceWithRawResponse",
- "AsyncBackupsResourceWithRawResponse",
- "BackupsResourceWithStreamingResponse",
- "AsyncBackupsResourceWithStreamingResponse",
- "ActionsResource",
- "AsyncActionsResource",
- "ActionsResourceWithRawResponse",
- "AsyncActionsResourceWithRawResponse",
- "ActionsResourceWithStreamingResponse",
- "AsyncActionsResourceWithStreamingResponse",
- "DestroyWithAssociatedResourcesResource",
- "AsyncDestroyWithAssociatedResourcesResource",
- "DestroyWithAssociatedResourcesResourceWithRawResponse",
- "AsyncDestroyWithAssociatedResourcesResourceWithRawResponse",
- "DestroyWithAssociatedResourcesResourceWithStreamingResponse",
- "AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse",
- "AutoscaleResource",
- "AsyncAutoscaleResource",
- "AutoscaleResourceWithRawResponse",
- "AsyncAutoscaleResourceWithRawResponse",
- "AutoscaleResourceWithStreamingResponse",
- "AsyncAutoscaleResourceWithStreamingResponse",
- "DropletsResource",
- "AsyncDropletsResource",
- "DropletsResourceWithRawResponse",
- "AsyncDropletsResourceWithRawResponse",
- "DropletsResourceWithStreamingResponse",
- "AsyncDropletsResourceWithStreamingResponse",
-]
diff --git a/src/do_gradientai/resources/gpu_droplets/__init__.py b/src/do_gradientai/resources/gpu_droplets/__init__.py
new file mode 100644
index 00000000..064a36ce
--- /dev/null
+++ b/src/do_gradientai/resources/gpu_droplets/__init__.py
@@ -0,0 +1,187 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from .sizes import (
+ SizesResource,
+ AsyncSizesResource,
+ SizesResourceWithRawResponse,
+ AsyncSizesResourceWithRawResponse,
+ SizesResourceWithStreamingResponse,
+ AsyncSizesResourceWithStreamingResponse,
+)
+from .images import (
+ ImagesResource,
+ AsyncImagesResource,
+ ImagesResourceWithRawResponse,
+ AsyncImagesResourceWithRawResponse,
+ ImagesResourceWithStreamingResponse,
+ AsyncImagesResourceWithStreamingResponse,
+)
+from .account import (
+ AccountResource,
+ AsyncAccountResource,
+ AccountResourceWithRawResponse,
+ AsyncAccountResourceWithRawResponse,
+ AccountResourceWithStreamingResponse,
+ AsyncAccountResourceWithStreamingResponse,
+)
+from .actions import (
+ ActionsResource,
+ AsyncActionsResource,
+ ActionsResourceWithRawResponse,
+ AsyncActionsResourceWithRawResponse,
+ ActionsResourceWithStreamingResponse,
+ AsyncActionsResourceWithStreamingResponse,
+)
+from .backups import (
+ BackupsResource,
+ AsyncBackupsResource,
+ BackupsResourceWithRawResponse,
+ AsyncBackupsResourceWithRawResponse,
+ BackupsResourceWithStreamingResponse,
+ AsyncBackupsResourceWithStreamingResponse,
+)
+from .volumes import (
+ VolumesResource,
+ AsyncVolumesResource,
+ VolumesResourceWithRawResponse,
+ AsyncVolumesResourceWithRawResponse,
+ VolumesResourceWithStreamingResponse,
+ AsyncVolumesResourceWithStreamingResponse,
+)
+from .autoscale import (
+ AutoscaleResource,
+ AsyncAutoscaleResource,
+ AutoscaleResourceWithRawResponse,
+ AsyncAutoscaleResourceWithRawResponse,
+ AutoscaleResourceWithStreamingResponse,
+ AsyncAutoscaleResourceWithStreamingResponse,
+)
+from .firewalls import (
+ FirewallsResource,
+ AsyncFirewallsResource,
+ FirewallsResourceWithRawResponse,
+ AsyncFirewallsResourceWithRawResponse,
+ FirewallsResourceWithStreamingResponse,
+ AsyncFirewallsResourceWithStreamingResponse,
+)
+from .snapshots import (
+ SnapshotsResource,
+ AsyncSnapshotsResource,
+ SnapshotsResourceWithRawResponse,
+ AsyncSnapshotsResourceWithRawResponse,
+ SnapshotsResourceWithStreamingResponse,
+ AsyncSnapshotsResourceWithStreamingResponse,
+)
+from .floating_ips import (
+ FloatingIPsResource,
+ AsyncFloatingIPsResource,
+ FloatingIPsResourceWithRawResponse,
+ AsyncFloatingIPsResourceWithRawResponse,
+ FloatingIPsResourceWithStreamingResponse,
+ AsyncFloatingIPsResourceWithStreamingResponse,
+)
+from .gpu_droplets import (
+ GPUDropletsResource,
+ AsyncGPUDropletsResource,
+ GPUDropletsResourceWithRawResponse,
+ AsyncGPUDropletsResourceWithRawResponse,
+ GPUDropletsResourceWithStreamingResponse,
+ AsyncGPUDropletsResourceWithStreamingResponse,
+)
+from .load_balancers import (
+ LoadBalancersResource,
+ AsyncLoadBalancersResource,
+ LoadBalancersResourceWithRawResponse,
+ AsyncLoadBalancersResourceWithRawResponse,
+ LoadBalancersResourceWithStreamingResponse,
+ AsyncLoadBalancersResourceWithStreamingResponse,
+)
+from .destroy_with_associated_resources import (
+ DestroyWithAssociatedResourcesResource,
+ AsyncDestroyWithAssociatedResourcesResource,
+ DestroyWithAssociatedResourcesResourceWithRawResponse,
+ AsyncDestroyWithAssociatedResourcesResourceWithRawResponse,
+ DestroyWithAssociatedResourcesResourceWithStreamingResponse,
+ AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse,
+)
+
+__all__ = [
+ "BackupsResource",
+ "AsyncBackupsResource",
+ "BackupsResourceWithRawResponse",
+ "AsyncBackupsResourceWithRawResponse",
+ "BackupsResourceWithStreamingResponse",
+ "AsyncBackupsResourceWithStreamingResponse",
+ "ActionsResource",
+ "AsyncActionsResource",
+ "ActionsResourceWithRawResponse",
+ "AsyncActionsResourceWithRawResponse",
+ "ActionsResourceWithStreamingResponse",
+ "AsyncActionsResourceWithStreamingResponse",
+ "DestroyWithAssociatedResourcesResource",
+ "AsyncDestroyWithAssociatedResourcesResource",
+ "DestroyWithAssociatedResourcesResourceWithRawResponse",
+ "AsyncDestroyWithAssociatedResourcesResourceWithRawResponse",
+ "DestroyWithAssociatedResourcesResourceWithStreamingResponse",
+ "AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse",
+ "AutoscaleResource",
+ "AsyncAutoscaleResource",
+ "AutoscaleResourceWithRawResponse",
+ "AsyncAutoscaleResourceWithRawResponse",
+ "AutoscaleResourceWithStreamingResponse",
+ "AsyncAutoscaleResourceWithStreamingResponse",
+ "FirewallsResource",
+ "AsyncFirewallsResource",
+ "FirewallsResourceWithRawResponse",
+ "AsyncFirewallsResourceWithRawResponse",
+ "FirewallsResourceWithStreamingResponse",
+ "AsyncFirewallsResourceWithStreamingResponse",
+ "FloatingIPsResource",
+ "AsyncFloatingIPsResource",
+ "FloatingIPsResourceWithRawResponse",
+ "AsyncFloatingIPsResourceWithRawResponse",
+ "FloatingIPsResourceWithStreamingResponse",
+ "AsyncFloatingIPsResourceWithStreamingResponse",
+ "ImagesResource",
+ "AsyncImagesResource",
+ "ImagesResourceWithRawResponse",
+ "AsyncImagesResourceWithRawResponse",
+ "ImagesResourceWithStreamingResponse",
+ "AsyncImagesResourceWithStreamingResponse",
+ "LoadBalancersResource",
+ "AsyncLoadBalancersResource",
+ "LoadBalancersResourceWithRawResponse",
+ "AsyncLoadBalancersResourceWithRawResponse",
+ "LoadBalancersResourceWithStreamingResponse",
+ "AsyncLoadBalancersResourceWithStreamingResponse",
+ "SizesResource",
+ "AsyncSizesResource",
+ "SizesResourceWithRawResponse",
+ "AsyncSizesResourceWithRawResponse",
+ "SizesResourceWithStreamingResponse",
+ "AsyncSizesResourceWithStreamingResponse",
+ "SnapshotsResource",
+ "AsyncSnapshotsResource",
+ "SnapshotsResourceWithRawResponse",
+ "AsyncSnapshotsResourceWithRawResponse",
+ "SnapshotsResourceWithStreamingResponse",
+ "AsyncSnapshotsResourceWithStreamingResponse",
+ "VolumesResource",
+ "AsyncVolumesResource",
+ "VolumesResourceWithRawResponse",
+ "AsyncVolumesResourceWithRawResponse",
+ "VolumesResourceWithStreamingResponse",
+ "AsyncVolumesResourceWithStreamingResponse",
+ "AccountResource",
+ "AsyncAccountResource",
+ "AccountResourceWithRawResponse",
+ "AsyncAccountResourceWithRawResponse",
+ "AccountResourceWithStreamingResponse",
+ "AsyncAccountResourceWithStreamingResponse",
+ "GPUDropletsResource",
+ "AsyncGPUDropletsResource",
+ "GPUDropletsResourceWithRawResponse",
+ "AsyncGPUDropletsResourceWithRawResponse",
+ "GPUDropletsResourceWithStreamingResponse",
+ "AsyncGPUDropletsResourceWithStreamingResponse",
+]
diff --git a/src/do_gradientai/resources/account/__init__.py b/src/do_gradientai/resources/gpu_droplets/account/__init__.py
similarity index 100%
rename from src/do_gradientai/resources/account/__init__.py
rename to src/do_gradientai/resources/gpu_droplets/account/__init__.py
diff --git a/src/do_gradientai/resources/account/account.py b/src/do_gradientai/resources/gpu_droplets/account/account.py
similarity index 55%
rename from src/do_gradientai/resources/account/account.py
rename to src/do_gradientai/resources/gpu_droplets/account/account.py
index 7af8d0e1..d61fb68b 100644
--- a/src/do_gradientai/resources/account/account.py
+++ b/src/do_gradientai/resources/gpu_droplets/account/account.py
@@ -2,8 +2,6 @@
from __future__ import annotations
-import httpx
-
from .keys import (
KeysResource,
AsyncKeysResource,
@@ -12,17 +10,8 @@
KeysResourceWithStreamingResponse,
AsyncKeysResourceWithStreamingResponse,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
- to_raw_response_wrapper,
- to_streamed_response_wrapper,
- async_to_raw_response_wrapper,
- async_to_streamed_response_wrapper,
-)
-from ..._base_client import make_request_options
-from ...types.account_retrieve_response import AccountRetrieveResponse
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
__all__ = ["AccountResource", "AsyncAccountResource"]
@@ -51,28 +40,6 @@ def with_streaming_response(self) -> AccountResourceWithStreamingResponse:
"""
return AccountResourceWithStreamingResponse(self)
- def retrieve(
- self,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AccountRetrieveResponse:
- """
- To show information about the current user account, send a GET request to
- `/v2/account`.
- """
- return self._get(
- "/v2/account" if self._client._base_url_overridden else "https://api.digitalocean.com/v2/account",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=AccountRetrieveResponse,
- )
-
class AsyncAccountResource(AsyncAPIResource):
@cached_property
@@ -98,37 +65,11 @@ def with_streaming_response(self) -> AsyncAccountResourceWithStreamingResponse:
"""
return AsyncAccountResourceWithStreamingResponse(self)
- async def retrieve(
- self,
- *,
- # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
- # The extra values given here take precedence over values defined on the client or passed to this method.
- extra_headers: Headers | None = None,
- extra_query: Query | None = None,
- extra_body: Body | None = None,
- timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AccountRetrieveResponse:
- """
- To show information about the current user account, send a GET request to
- `/v2/account`.
- """
- return await self._get(
- "/v2/account" if self._client._base_url_overridden else "https://api.digitalocean.com/v2/account",
- options=make_request_options(
- extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
- ),
- cast_to=AccountRetrieveResponse,
- )
-
class AccountResourceWithRawResponse:
def __init__(self, account: AccountResource) -> None:
self._account = account
- self.retrieve = to_raw_response_wrapper(
- account.retrieve,
- )
-
@cached_property
def keys(self) -> KeysResourceWithRawResponse:
return KeysResourceWithRawResponse(self._account.keys)
@@ -138,10 +79,6 @@ class AsyncAccountResourceWithRawResponse:
def __init__(self, account: AsyncAccountResource) -> None:
self._account = account
- self.retrieve = async_to_raw_response_wrapper(
- account.retrieve,
- )
-
@cached_property
def keys(self) -> AsyncKeysResourceWithRawResponse:
return AsyncKeysResourceWithRawResponse(self._account.keys)
@@ -151,10 +88,6 @@ class AccountResourceWithStreamingResponse:
def __init__(self, account: AccountResource) -> None:
self._account = account
- self.retrieve = to_streamed_response_wrapper(
- account.retrieve,
- )
-
@cached_property
def keys(self) -> KeysResourceWithStreamingResponse:
return KeysResourceWithStreamingResponse(self._account.keys)
@@ -164,10 +97,6 @@ class AsyncAccountResourceWithStreamingResponse:
def __init__(self, account: AsyncAccountResource) -> None:
self._account = account
- self.retrieve = async_to_streamed_response_wrapper(
- account.retrieve,
- )
-
@cached_property
def keys(self) -> AsyncKeysResourceWithStreamingResponse:
return AsyncKeysResourceWithStreamingResponse(self._account.keys)
diff --git a/src/do_gradientai/resources/account/keys.py b/src/do_gradientai/resources/gpu_droplets/account/keys.py
similarity index 96%
rename from src/do_gradientai/resources/account/keys.py
rename to src/do_gradientai/resources/gpu_droplets/account/keys.py
index 2cfd5c6a..66d3bd55 100644
--- a/src/do_gradientai/resources/account/keys.py
+++ b/src/do_gradientai/resources/gpu_droplets/account/keys.py
@@ -6,22 +6,22 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.account import key_list_params, key_create_params, key_update_params
-from ...types.account.key_list_response import KeyListResponse
-from ...types.account.key_create_response import KeyCreateResponse
-from ...types.account.key_update_response import KeyUpdateResponse
-from ...types.account.key_retrieve_response import KeyRetrieveResponse
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.account import key_list_params, key_create_params, key_update_params
+from ....types.gpu_droplets.account.key_list_response import KeyListResponse
+from ....types.gpu_droplets.account.key_create_response import KeyCreateResponse
+from ....types.gpu_droplets.account.key_update_response import KeyUpdateResponse
+from ....types.gpu_droplets.account.key_retrieve_response import KeyRetrieveResponse
__all__ = ["KeysResource", "AsyncKeysResource"]
diff --git a/src/do_gradientai/resources/droplets/actions.py b/src/do_gradientai/resources/gpu_droplets/actions.py
similarity index 99%
rename from src/do_gradientai/resources/droplets/actions.py
rename to src/do_gradientai/resources/gpu_droplets/actions.py
index 93d03c2d..197b2ce7 100644
--- a/src/do_gradientai/resources/droplets/actions.py
+++ b/src/do_gradientai/resources/gpu_droplets/actions.py
@@ -18,12 +18,12 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.droplets import action_list_params, action_initiate_params, action_bulk_initiate_params
+from ...types.gpu_droplets import action_list_params, action_initiate_params, action_bulk_initiate_params
from ...types.droplet_backup_policy_param import DropletBackupPolicyParam
-from ...types.droplets.action_list_response import ActionListResponse
-from ...types.droplets.action_initiate_response import ActionInitiateResponse
-from ...types.droplets.action_retrieve_response import ActionRetrieveResponse
-from ...types.droplets.action_bulk_initiate_response import ActionBulkInitiateResponse
+from ...types.gpu_droplets.action_list_response import ActionListResponse
+from ...types.gpu_droplets.action_initiate_response import ActionInitiateResponse
+from ...types.gpu_droplets.action_retrieve_response import ActionRetrieveResponse
+from ...types.gpu_droplets.action_bulk_initiate_response import ActionBulkInitiateResponse
__all__ = ["ActionsResource", "AsyncActionsResource"]
diff --git a/src/do_gradientai/resources/droplets/autoscale.py b/src/do_gradientai/resources/gpu_droplets/autoscale.py
similarity index 98%
rename from src/do_gradientai/resources/droplets/autoscale.py
rename to src/do_gradientai/resources/gpu_droplets/autoscale.py
index 7522385f..a1a72430 100644
--- a/src/do_gradientai/resources/droplets/autoscale.py
+++ b/src/do_gradientai/resources/gpu_droplets/autoscale.py
@@ -15,20 +15,20 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.droplets import (
+from ...types.gpu_droplets import (
autoscale_list_params,
autoscale_create_params,
autoscale_update_params,
autoscale_list_history_params,
autoscale_list_members_params,
)
-from ...types.droplets.autoscale_list_response import AutoscaleListResponse
-from ...types.droplets.autoscale_create_response import AutoscaleCreateResponse
-from ...types.droplets.autoscale_update_response import AutoscaleUpdateResponse
-from ...types.droplets.autoscale_retrieve_response import AutoscaleRetrieveResponse
-from ...types.droplets.autoscale_list_history_response import AutoscaleListHistoryResponse
-from ...types.droplets.autoscale_list_members_response import AutoscaleListMembersResponse
-from ...types.droplets.autoscale_pool_droplet_template_param import AutoscalePoolDropletTemplateParam
+from ...types.gpu_droplets.autoscale_list_response import AutoscaleListResponse
+from ...types.gpu_droplets.autoscale_create_response import AutoscaleCreateResponse
+from ...types.gpu_droplets.autoscale_update_response import AutoscaleUpdateResponse
+from ...types.gpu_droplets.autoscale_retrieve_response import AutoscaleRetrieveResponse
+from ...types.gpu_droplets.autoscale_list_history_response import AutoscaleListHistoryResponse
+from ...types.gpu_droplets.autoscale_list_members_response import AutoscaleListMembersResponse
+from ...types.gpu_droplets.autoscale_pool_droplet_template_param import AutoscalePoolDropletTemplateParam
__all__ = ["AutoscaleResource", "AsyncAutoscaleResource"]
diff --git a/src/do_gradientai/resources/droplets/backups.py b/src/do_gradientai/resources/gpu_droplets/backups.py
similarity index 97%
rename from src/do_gradientai/resources/droplets/backups.py
rename to src/do_gradientai/resources/gpu_droplets/backups.py
index d8635c46..06fca19e 100644
--- a/src/do_gradientai/resources/droplets/backups.py
+++ b/src/do_gradientai/resources/gpu_droplets/backups.py
@@ -15,11 +15,11 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.droplets import backup_list_params, backup_list_policies_params
-from ...types.droplets.backup_list_response import BackupListResponse
-from ...types.droplets.backup_list_policies_response import BackupListPoliciesResponse
-from ...types.droplets.backup_retrieve_policy_response import BackupRetrievePolicyResponse
-from ...types.droplets.backup_list_supported_policies_response import BackupListSupportedPoliciesResponse
+from ...types.gpu_droplets import backup_list_params, backup_list_policies_params
+from ...types.gpu_droplets.backup_list_response import BackupListResponse
+from ...types.gpu_droplets.backup_list_policies_response import BackupListPoliciesResponse
+from ...types.gpu_droplets.backup_retrieve_policy_response import BackupRetrievePolicyResponse
+from ...types.gpu_droplets.backup_list_supported_policies_response import BackupListSupportedPoliciesResponse
__all__ = ["BackupsResource", "AsyncBackupsResource"]
diff --git a/src/do_gradientai/resources/droplets/destroy_with_associated_resources.py b/src/do_gradientai/resources/gpu_droplets/destroy_with_associated_resources.py
similarity index 98%
rename from src/do_gradientai/resources/droplets/destroy_with_associated_resources.py
rename to src/do_gradientai/resources/gpu_droplets/destroy_with_associated_resources.py
index 96cc0615..46db6563 100644
--- a/src/do_gradientai/resources/droplets/destroy_with_associated_resources.py
+++ b/src/do_gradientai/resources/gpu_droplets/destroy_with_associated_resources.py
@@ -17,9 +17,11 @@
async_to_streamed_response_wrapper,
)
from ..._base_client import make_request_options
-from ...types.droplets import destroy_with_associated_resource_delete_selective_params
-from ...types.droplets.destroy_with_associated_resource_list_response import DestroyWithAssociatedResourceListResponse
-from ...types.droplets.destroy_with_associated_resource_check_status_response import (
+from ...types.gpu_droplets import destroy_with_associated_resource_delete_selective_params
+from ...types.gpu_droplets.destroy_with_associated_resource_list_response import (
+ DestroyWithAssociatedResourceListResponse,
+)
+from ...types.gpu_droplets.destroy_with_associated_resource_check_status_response import (
DestroyWithAssociatedResourceCheckStatusResponse,
)
diff --git a/src/do_gradientai/resources/firewalls/__init__.py b/src/do_gradientai/resources/gpu_droplets/firewalls/__init__.py
similarity index 100%
rename from src/do_gradientai/resources/firewalls/__init__.py
rename to src/do_gradientai/resources/gpu_droplets/firewalls/__init__.py
diff --git a/src/do_gradientai/resources/firewalls/droplets.py b/src/do_gradientai/resources/gpu_droplets/firewalls/droplets.py
similarity index 96%
rename from src/do_gradientai/resources/firewalls/droplets.py
rename to src/do_gradientai/resources/gpu_droplets/firewalls/droplets.py
index 435b28e1..025d1ba4 100644
--- a/src/do_gradientai/resources/firewalls/droplets.py
+++ b/src/do_gradientai/resources/gpu_droplets/firewalls/droplets.py
@@ -6,18 +6,18 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.firewalls import droplet_add_params, droplet_remove_params
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.firewalls import droplet_add_params, droplet_remove_params
__all__ = ["DropletsResource", "AsyncDropletsResource"]
diff --git a/src/do_gradientai/resources/firewalls/firewalls.py b/src/do_gradientai/resources/gpu_droplets/firewalls/firewalls.py
similarity index 96%
rename from src/do_gradientai/resources/firewalls/firewalls.py
rename to src/do_gradientai/resources/gpu_droplets/firewalls/firewalls.py
index f59cd64d..a6c21928 100644
--- a/src/do_gradientai/resources/firewalls/firewalls.py
+++ b/src/do_gradientai/resources/gpu_droplets/firewalls/firewalls.py
@@ -20,9 +20,6 @@
RulesResourceWithStreamingResponse,
AsyncRulesResourceWithStreamingResponse,
)
-from ...types import firewall_list_params, firewall_create_params, firewall_update_params
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
from .droplets import (
DropletsResource,
AsyncDropletsResource,
@@ -31,20 +28,23 @@
DropletsResourceWithStreamingResponse,
AsyncDropletsResourceWithStreamingResponse,
)
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.firewall_param import FirewallParam
-from ...types.firewall_list_response import FirewallListResponse
-from ...types.firewall_create_response import FirewallCreateResponse
-from ...types.firewall_update_response import FirewallUpdateResponse
-from ...types.firewall_retrieve_response import FirewallRetrieveResponse
+from ...._base_client import make_request_options
+from ....types.gpu_droplets import firewall_list_params, firewall_create_params, firewall_update_params
+from ....types.gpu_droplets.firewall_param import FirewallParam
+from ....types.gpu_droplets.firewall_list_response import FirewallListResponse
+from ....types.gpu_droplets.firewall_create_response import FirewallCreateResponse
+from ....types.gpu_droplets.firewall_update_response import FirewallUpdateResponse
+from ....types.gpu_droplets.firewall_retrieve_response import FirewallRetrieveResponse
__all__ = ["FirewallsResource", "AsyncFirewallsResource"]
diff --git a/src/do_gradientai/resources/firewalls/rules.py b/src/do_gradientai/resources/gpu_droplets/firewalls/rules.py
similarity index 97%
rename from src/do_gradientai/resources/firewalls/rules.py
rename to src/do_gradientai/resources/gpu_droplets/firewalls/rules.py
index 756cd0bd..61026779 100644
--- a/src/do_gradientai/resources/firewalls/rules.py
+++ b/src/do_gradientai/resources/gpu_droplets/firewalls/rules.py
@@ -6,18 +6,18 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.firewalls import rule_add_params, rule_remove_params
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.firewalls import rule_add_params, rule_remove_params
__all__ = ["RulesResource", "AsyncRulesResource"]
diff --git a/src/do_gradientai/resources/firewalls/tags.py b/src/do_gradientai/resources/gpu_droplets/firewalls/tags.py
similarity index 96%
rename from src/do_gradientai/resources/firewalls/tags.py
rename to src/do_gradientai/resources/gpu_droplets/firewalls/tags.py
index 966015ea..725bc014 100644
--- a/src/do_gradientai/resources/firewalls/tags.py
+++ b/src/do_gradientai/resources/gpu_droplets/firewalls/tags.py
@@ -6,18 +6,18 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.firewalls import tag_add_params, tag_remove_params
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.firewalls import tag_add_params, tag_remove_params
__all__ = ["TagsResource", "AsyncTagsResource"]
diff --git a/src/do_gradientai/resources/floating_ips/__init__.py b/src/do_gradientai/resources/gpu_droplets/floating_ips/__init__.py
similarity index 100%
rename from src/do_gradientai/resources/floating_ips/__init__.py
rename to src/do_gradientai/resources/gpu_droplets/floating_ips/__init__.py
diff --git a/src/do_gradientai/resources/floating_ips/actions.py b/src/do_gradientai/resources/gpu_droplets/floating_ips/actions.py
similarity index 96%
rename from src/do_gradientai/resources/floating_ips/actions.py
rename to src/do_gradientai/resources/gpu_droplets/floating_ips/actions.py
index 69b9b67e..7ba3899d 100644
--- a/src/do_gradientai/resources/floating_ips/actions.py
+++ b/src/do_gradientai/resources/gpu_droplets/floating_ips/actions.py
@@ -6,21 +6,21 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from ..._utils import required_args, maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._utils import required_args, maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.floating_ips import action_create_params
-from ...types.floating_ips.action_list_response import ActionListResponse
-from ...types.floating_ips.action_create_response import ActionCreateResponse
-from ...types.floating_ips.action_retrieve_response import ActionRetrieveResponse
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.floating_ips import action_create_params
+from ....types.gpu_droplets.floating_ips.action_list_response import ActionListResponse
+from ....types.gpu_droplets.floating_ips.action_create_response import ActionCreateResponse
+from ....types.gpu_droplets.floating_ips.action_retrieve_response import ActionRetrieveResponse
__all__ = ["ActionsResource", "AsyncActionsResource"]
diff --git a/src/do_gradientai/resources/floating_ips/floating_ips.py b/src/do_gradientai/resources/gpu_droplets/floating_ips/floating_ips.py
similarity index 97%
rename from src/do_gradientai/resources/floating_ips/floating_ips.py
rename to src/do_gradientai/resources/gpu_droplets/floating_ips/floating_ips.py
index 0cc083b6..cabe012e 100644
--- a/src/do_gradientai/resources/floating_ips/floating_ips.py
+++ b/src/do_gradientai/resources/gpu_droplets/floating_ips/floating_ips.py
@@ -6,7 +6,6 @@
import httpx
-from ...types import floating_ip_list_params, floating_ip_create_params
from .actions import (
ActionsResource,
AsyncActionsResource,
@@ -15,20 +14,21 @@
ActionsResourceWithStreamingResponse,
AsyncActionsResourceWithStreamingResponse,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import required_args, maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import required_args, maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.floating_ip_list_response import FloatingIPListResponse
-from ...types.floating_ip_create_response import FloatingIPCreateResponse
-from ...types.floating_ip_retrieve_response import FloatingIPRetrieveResponse
+from ...._base_client import make_request_options
+from ....types.gpu_droplets import floating_ip_list_params, floating_ip_create_params
+from ....types.gpu_droplets.floating_ip_list_response import FloatingIPListResponse
+from ....types.gpu_droplets.floating_ip_create_response import FloatingIPCreateResponse
+from ....types.gpu_droplets.floating_ip_retrieve_response import FloatingIPRetrieveResponse
__all__ = ["FloatingIPsResource", "AsyncFloatingIPsResource"]
diff --git a/src/do_gradientai/resources/droplets/droplets.py b/src/do_gradientai/resources/gpu_droplets/gpu_droplets.py
similarity index 81%
rename from src/do_gradientai/resources/droplets/droplets.py
rename to src/do_gradientai/resources/gpu_droplets/gpu_droplets.py
index fbe2aba5..cbb07830 100644
--- a/src/do_gradientai/resources/droplets/droplets.py
+++ b/src/do_gradientai/resources/gpu_droplets/gpu_droplets.py
@@ -7,13 +7,21 @@
import httpx
+from .sizes import (
+ SizesResource,
+ AsyncSizesResource,
+ SizesResourceWithRawResponse,
+ AsyncSizesResourceWithRawResponse,
+ SizesResourceWithStreamingResponse,
+ AsyncSizesResourceWithStreamingResponse,
+)
from ...types import (
- droplet_list_params,
- droplet_create_params,
- droplet_list_kernels_params,
- droplet_delete_by_tag_params,
- droplet_list_firewalls_params,
- droplet_list_snapshots_params,
+ gpu_droplet_list_params,
+ gpu_droplet_create_params,
+ gpu_droplet_list_kernels_params,
+ gpu_droplet_delete_by_tag_params,
+ gpu_droplet_list_firewalls_params,
+ gpu_droplet_list_snapshots_params,
)
from .actions import (
ActionsResource,
@@ -42,6 +50,14 @@
AutoscaleResourceWithStreamingResponse,
AsyncAutoscaleResourceWithStreamingResponse,
)
+from .snapshots import (
+ SnapshotsResource,
+ AsyncSnapshotsResource,
+ SnapshotsResourceWithRawResponse,
+ AsyncSnapshotsResourceWithRawResponse,
+ SnapshotsResourceWithStreamingResponse,
+ AsyncSnapshotsResourceWithStreamingResponse,
+)
from ..._resource import SyncAPIResource, AsyncAPIResource
from ..._response import (
to_raw_response_wrapper,
@@ -49,10 +65,56 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
+from .images.images import (
+ ImagesResource,
+ AsyncImagesResource,
+ ImagesResourceWithRawResponse,
+ AsyncImagesResourceWithRawResponse,
+ ImagesResourceWithStreamingResponse,
+ AsyncImagesResourceWithStreamingResponse,
+)
from ..._base_client import make_request_options
-from ...types.droplet_list_response import DropletListResponse
-from ...types.droplet_create_response import DropletCreateResponse
-from ...types.droplet_retrieve_response import DropletRetrieveResponse
+from .account.account import (
+ AccountResource,
+ AsyncAccountResource,
+ AccountResourceWithRawResponse,
+ AsyncAccountResourceWithRawResponse,
+ AccountResourceWithStreamingResponse,
+ AsyncAccountResourceWithStreamingResponse,
+)
+from .volumes.volumes import (
+ VolumesResource,
+ AsyncVolumesResource,
+ VolumesResourceWithRawResponse,
+ AsyncVolumesResourceWithRawResponse,
+ VolumesResourceWithStreamingResponse,
+ AsyncVolumesResourceWithStreamingResponse,
+)
+from .firewalls.firewalls import (
+ FirewallsResource,
+ AsyncFirewallsResource,
+ FirewallsResourceWithRawResponse,
+ AsyncFirewallsResourceWithRawResponse,
+ FirewallsResourceWithStreamingResponse,
+ AsyncFirewallsResourceWithStreamingResponse,
+)
+from .floating_ips.floating_ips import (
+ FloatingIPsResource,
+ AsyncFloatingIPsResource,
+ FloatingIPsResourceWithRawResponse,
+ AsyncFloatingIPsResourceWithRawResponse,
+ FloatingIPsResourceWithStreamingResponse,
+ AsyncFloatingIPsResourceWithStreamingResponse,
+)
+from .load_balancers.load_balancers import (
+ LoadBalancersResource,
+ AsyncLoadBalancersResource,
+ LoadBalancersResourceWithRawResponse,
+ AsyncLoadBalancersResourceWithRawResponse,
+ LoadBalancersResourceWithStreamingResponse,
+ AsyncLoadBalancersResourceWithStreamingResponse,
+)
+from ...types.gpu_droplet_list_response import GPUDropletListResponse
from .destroy_with_associated_resources import (
DestroyWithAssociatedResourcesResource,
AsyncDestroyWithAssociatedResourcesResource,
@@ -62,15 +124,17 @@
AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse,
)
from ...types.droplet_backup_policy_param import DropletBackupPolicyParam
-from ...types.droplet_list_kernels_response import DropletListKernelsResponse
-from ...types.droplet_list_firewalls_response import DropletListFirewallsResponse
-from ...types.droplet_list_neighbors_response import DropletListNeighborsResponse
-from ...types.droplet_list_snapshots_response import DropletListSnapshotsResponse
+from ...types.gpu_droplet_create_response import GPUDropletCreateResponse
+from ...types.gpu_droplet_retrieve_response import GPUDropletRetrieveResponse
+from ...types.gpu_droplet_list_kernels_response import GPUDropletListKernelsResponse
+from ...types.gpu_droplet_list_firewalls_response import GPUDropletListFirewallsResponse
+from ...types.gpu_droplet_list_neighbors_response import GPUDropletListNeighborsResponse
+from ...types.gpu_droplet_list_snapshots_response import GPUDropletListSnapshotsResponse
-__all__ = ["DropletsResource", "AsyncDropletsResource"]
+__all__ = ["GPUDropletsResource", "AsyncGPUDropletsResource"]
-class DropletsResource(SyncAPIResource):
+class GPUDropletsResource(SyncAPIResource):
@cached_property
def backups(self) -> BackupsResource:
return BackupsResource(self._client)
@@ -88,23 +152,55 @@ def autoscale(self) -> AutoscaleResource:
return AutoscaleResource(self._client)
@cached_property
- def with_raw_response(self) -> DropletsResourceWithRawResponse:
+ def firewalls(self) -> FirewallsResource:
+ return FirewallsResource(self._client)
+
+ @cached_property
+ def floating_ips(self) -> FloatingIPsResource:
+ return FloatingIPsResource(self._client)
+
+ @cached_property
+ def images(self) -> ImagesResource:
+ return ImagesResource(self._client)
+
+ @cached_property
+ def load_balancers(self) -> LoadBalancersResource:
+ return LoadBalancersResource(self._client)
+
+ @cached_property
+ def sizes(self) -> SizesResource:
+ return SizesResource(self._client)
+
+ @cached_property
+ def snapshots(self) -> SnapshotsResource:
+ return SnapshotsResource(self._client)
+
+ @cached_property
+ def volumes(self) -> VolumesResource:
+ return VolumesResource(self._client)
+
+ @cached_property
+ def account(self) -> AccountResource:
+ return AccountResource(self._client)
+
+ @cached_property
+ def with_raw_response(self) -> GPUDropletsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
"""
- return DropletsResourceWithRawResponse(self)
+ return GPUDropletsResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> DropletsResourceWithStreamingResponse:
+ def with_streaming_response(self) -> GPUDropletsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
"""
- return DropletsResourceWithStreamingResponse(self)
+ return GPUDropletsResourceWithStreamingResponse(self)
@overload
def create(
@@ -131,7 +227,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletCreateResponse:
+ ) -> GPUDropletCreateResponse:
"""
To create a new Droplet, send a POST request to `/v2/droplets` setting the
required attributes.
@@ -252,7 +348,7 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletCreateResponse:
+ ) -> GPUDropletCreateResponse:
"""
To create a new Droplet, send a POST request to `/v2/droplets` setting the
required attributes.
@@ -375,9 +471,9 @@ def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletCreateResponse:
+ ) -> GPUDropletCreateResponse:
return cast(
- DropletCreateResponse,
+ GPUDropletCreateResponse,
self._post(
"/v2/droplets" if self._client._base_url_overridden else "https://api.digitalocean.com/v2/droplets",
body=maybe_transform(
@@ -399,13 +495,13 @@ def create(
"with_droplet_agent": with_droplet_agent,
"names": names,
},
- droplet_create_params.DropletCreateParams,
+ gpu_droplet_create_params.GPUDropletCreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=cast(
- Any, DropletCreateResponse
+ Any, GPUDropletCreateResponse
), # Union types cannot be passed in as arguments in the type system
),
)
@@ -420,7 +516,7 @@ def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletRetrieveResponse:
+ ) -> GPUDropletRetrieveResponse:
"""
To show information about an individual Droplet, send a GET request to
`/v2/droplets/$DROPLET_ID`.
@@ -441,7 +537,7 @@ def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=DropletRetrieveResponse,
+ cast_to=GPUDropletRetrieveResponse,
)
def list(
@@ -458,7 +554,7 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListResponse:
+ ) -> GPUDropletListResponse:
"""
To list all Droplets in your account, send a GET request to `/v2/droplets`.
@@ -515,10 +611,10 @@ def list(
"tag_name": tag_name,
"type": type,
},
- droplet_list_params.DropletListParams,
+ gpu_droplet_list_params.GPUDropletListParams,
),
),
- cast_to=DropletListResponse,
+ cast_to=GPUDropletListResponse,
)
def delete(
@@ -598,7 +694,9 @@ def delete_by_tag(
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=maybe_transform({"tag_name": tag_name}, droplet_delete_by_tag_params.DropletDeleteByTagParams),
+ query=maybe_transform(
+ {"tag_name": tag_name}, gpu_droplet_delete_by_tag_params.GPUDropletDeleteByTagParams
+ ),
),
cast_to=NoneType,
)
@@ -615,7 +713,7 @@ def list_firewalls(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListFirewallsResponse:
+ ) -> GPUDropletListFirewallsResponse:
"""
To retrieve a list of all firewalls available to a Droplet, send a GET request
to `/v2/droplets/$DROPLET_ID/firewalls`
@@ -651,10 +749,10 @@ def list_firewalls(
"page": page,
"per_page": per_page,
},
- droplet_list_firewalls_params.DropletListFirewallsParams,
+ gpu_droplet_list_firewalls_params.GPUDropletListFirewallsParams,
),
),
- cast_to=DropletListFirewallsResponse,
+ cast_to=GPUDropletListFirewallsResponse,
)
def list_kernels(
@@ -669,7 +767,7 @@ def list_kernels(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListKernelsResponse:
+ ) -> GPUDropletListKernelsResponse:
"""
To retrieve a list of all kernels available to a Droplet, send a GET request to
`/v2/droplets/$DROPLET_ID/kernels`
@@ -705,10 +803,10 @@ def list_kernels(
"page": page,
"per_page": per_page,
},
- droplet_list_kernels_params.DropletListKernelsParams,
+ gpu_droplet_list_kernels_params.GPUDropletListKernelsParams,
),
),
- cast_to=DropletListKernelsResponse,
+ cast_to=GPUDropletListKernelsResponse,
)
def list_neighbors(
@@ -721,7 +819,7 @@ def list_neighbors(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListNeighborsResponse:
+ ) -> GPUDropletListNeighborsResponse:
"""To retrieve a list of any "neighbors" (i.e.
Droplets that are co-located on the
@@ -749,7 +847,7 @@ def list_neighbors(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=DropletListNeighborsResponse,
+ cast_to=GPUDropletListNeighborsResponse,
)
def list_snapshots(
@@ -764,7 +862,7 @@ def list_snapshots(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListSnapshotsResponse:
+ ) -> GPUDropletListSnapshotsResponse:
"""
To retrieve the snapshots that have been created from a Droplet, send a GET
request to `/v2/droplets/$DROPLET_ID/snapshots`.
@@ -800,14 +898,14 @@ def list_snapshots(
"page": page,
"per_page": per_page,
},
- droplet_list_snapshots_params.DropletListSnapshotsParams,
+ gpu_droplet_list_snapshots_params.GPUDropletListSnapshotsParams,
),
),
- cast_to=DropletListSnapshotsResponse,
+ cast_to=GPUDropletListSnapshotsResponse,
)
-class AsyncDropletsResource(AsyncAPIResource):
+class AsyncGPUDropletsResource(AsyncAPIResource):
@cached_property
def backups(self) -> AsyncBackupsResource:
return AsyncBackupsResource(self._client)
@@ -825,23 +923,55 @@ def autoscale(self) -> AsyncAutoscaleResource:
return AsyncAutoscaleResource(self._client)
@cached_property
- def with_raw_response(self) -> AsyncDropletsResourceWithRawResponse:
+ def firewalls(self) -> AsyncFirewallsResource:
+ return AsyncFirewallsResource(self._client)
+
+ @cached_property
+ def floating_ips(self) -> AsyncFloatingIPsResource:
+ return AsyncFloatingIPsResource(self._client)
+
+ @cached_property
+ def images(self) -> AsyncImagesResource:
+ return AsyncImagesResource(self._client)
+
+ @cached_property
+ def load_balancers(self) -> AsyncLoadBalancersResource:
+ return AsyncLoadBalancersResource(self._client)
+
+ @cached_property
+ def sizes(self) -> AsyncSizesResource:
+ return AsyncSizesResource(self._client)
+
+ @cached_property
+ def snapshots(self) -> AsyncSnapshotsResource:
+ return AsyncSnapshotsResource(self._client)
+
+ @cached_property
+ def volumes(self) -> AsyncVolumesResource:
+ return AsyncVolumesResource(self._client)
+
+ @cached_property
+ def account(self) -> AsyncAccountResource:
+ return AsyncAccountResource(self._client)
+
+ @cached_property
+ def with_raw_response(self) -> AsyncGPUDropletsResourceWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/digitalocean/gradientai-python#accessing-raw-response-data-eg-headers
"""
- return AsyncDropletsResourceWithRawResponse(self)
+ return AsyncGPUDropletsResourceWithRawResponse(self)
@cached_property
- def with_streaming_response(self) -> AsyncDropletsResourceWithStreamingResponse:
+ def with_streaming_response(self) -> AsyncGPUDropletsResourceWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/digitalocean/gradientai-python#with_streaming_response
"""
- return AsyncDropletsResourceWithStreamingResponse(self)
+ return AsyncGPUDropletsResourceWithStreamingResponse(self)
@overload
async def create(
@@ -868,7 +998,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletCreateResponse:
+ ) -> GPUDropletCreateResponse:
"""
To create a new Droplet, send a POST request to `/v2/droplets` setting the
required attributes.
@@ -989,7 +1119,7 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletCreateResponse:
+ ) -> GPUDropletCreateResponse:
"""
To create a new Droplet, send a POST request to `/v2/droplets` setting the
required attributes.
@@ -1112,9 +1242,9 @@ async def create(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletCreateResponse:
+ ) -> GPUDropletCreateResponse:
return cast(
- DropletCreateResponse,
+ GPUDropletCreateResponse,
await self._post(
"/v2/droplets" if self._client._base_url_overridden else "https://api.digitalocean.com/v2/droplets",
body=await async_maybe_transform(
@@ -1136,13 +1266,13 @@ async def create(
"with_droplet_agent": with_droplet_agent,
"names": names,
},
- droplet_create_params.DropletCreateParams,
+ gpu_droplet_create_params.GPUDropletCreateParams,
),
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
cast_to=cast(
- Any, DropletCreateResponse
+ Any, GPUDropletCreateResponse
), # Union types cannot be passed in as arguments in the type system
),
)
@@ -1157,7 +1287,7 @@ async def retrieve(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletRetrieveResponse:
+ ) -> GPUDropletRetrieveResponse:
"""
To show information about an individual Droplet, send a GET request to
`/v2/droplets/$DROPLET_ID`.
@@ -1178,7 +1308,7 @@ async def retrieve(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=DropletRetrieveResponse,
+ cast_to=GPUDropletRetrieveResponse,
)
async def list(
@@ -1195,7 +1325,7 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListResponse:
+ ) -> GPUDropletListResponse:
"""
To list all Droplets in your account, send a GET request to `/v2/droplets`.
@@ -1252,10 +1382,10 @@ async def list(
"tag_name": tag_name,
"type": type,
},
- droplet_list_params.DropletListParams,
+ gpu_droplet_list_params.GPUDropletListParams,
),
),
- cast_to=DropletListResponse,
+ cast_to=GPUDropletListResponse,
)
async def delete(
@@ -1336,7 +1466,7 @@ async def delete_by_tag(
extra_body=extra_body,
timeout=timeout,
query=await async_maybe_transform(
- {"tag_name": tag_name}, droplet_delete_by_tag_params.DropletDeleteByTagParams
+ {"tag_name": tag_name}, gpu_droplet_delete_by_tag_params.GPUDropletDeleteByTagParams
),
),
cast_to=NoneType,
@@ -1354,7 +1484,7 @@ async def list_firewalls(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListFirewallsResponse:
+ ) -> GPUDropletListFirewallsResponse:
"""
To retrieve a list of all firewalls available to a Droplet, send a GET request
to `/v2/droplets/$DROPLET_ID/firewalls`
@@ -1390,10 +1520,10 @@ async def list_firewalls(
"page": page,
"per_page": per_page,
},
- droplet_list_firewalls_params.DropletListFirewallsParams,
+ gpu_droplet_list_firewalls_params.GPUDropletListFirewallsParams,
),
),
- cast_to=DropletListFirewallsResponse,
+ cast_to=GPUDropletListFirewallsResponse,
)
async def list_kernels(
@@ -1408,7 +1538,7 @@ async def list_kernels(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListKernelsResponse:
+ ) -> GPUDropletListKernelsResponse:
"""
To retrieve a list of all kernels available to a Droplet, send a GET request to
`/v2/droplets/$DROPLET_ID/kernels`
@@ -1444,10 +1574,10 @@ async def list_kernels(
"page": page,
"per_page": per_page,
},
- droplet_list_kernels_params.DropletListKernelsParams,
+ gpu_droplet_list_kernels_params.GPUDropletListKernelsParams,
),
),
- cast_to=DropletListKernelsResponse,
+ cast_to=GPUDropletListKernelsResponse,
)
async def list_neighbors(
@@ -1460,7 +1590,7 @@ async def list_neighbors(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListNeighborsResponse:
+ ) -> GPUDropletListNeighborsResponse:
"""To retrieve a list of any "neighbors" (i.e.
Droplets that are co-located on the
@@ -1488,7 +1618,7 @@ async def list_neighbors(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=DropletListNeighborsResponse,
+ cast_to=GPUDropletListNeighborsResponse,
)
async def list_snapshots(
@@ -1503,7 +1633,7 @@ async def list_snapshots(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> DropletListSnapshotsResponse:
+ ) -> GPUDropletListSnapshotsResponse:
"""
To retrieve the snapshots that have been created from a Droplet, send a GET
request to `/v2/droplets/$DROPLET_ID/snapshots`.
@@ -1539,210 +1669,340 @@ async def list_snapshots(
"page": page,
"per_page": per_page,
},
- droplet_list_snapshots_params.DropletListSnapshotsParams,
+ gpu_droplet_list_snapshots_params.GPUDropletListSnapshotsParams,
),
),
- cast_to=DropletListSnapshotsResponse,
+ cast_to=GPUDropletListSnapshotsResponse,
)
-class DropletsResourceWithRawResponse:
- def __init__(self, droplets: DropletsResource) -> None:
- self._droplets = droplets
+class GPUDropletsResourceWithRawResponse:
+ def __init__(self, gpu_droplets: GPUDropletsResource) -> None:
+ self._gpu_droplets = gpu_droplets
self.create = to_raw_response_wrapper(
- droplets.create,
+ gpu_droplets.create,
)
self.retrieve = to_raw_response_wrapper(
- droplets.retrieve,
+ gpu_droplets.retrieve,
)
self.list = to_raw_response_wrapper(
- droplets.list,
+ gpu_droplets.list,
)
self.delete = to_raw_response_wrapper(
- droplets.delete,
+ gpu_droplets.delete,
)
self.delete_by_tag = to_raw_response_wrapper(
- droplets.delete_by_tag,
+ gpu_droplets.delete_by_tag,
)
self.list_firewalls = to_raw_response_wrapper(
- droplets.list_firewalls,
+ gpu_droplets.list_firewalls,
)
self.list_kernels = to_raw_response_wrapper(
- droplets.list_kernels,
+ gpu_droplets.list_kernels,
)
self.list_neighbors = to_raw_response_wrapper(
- droplets.list_neighbors,
+ gpu_droplets.list_neighbors,
)
self.list_snapshots = to_raw_response_wrapper(
- droplets.list_snapshots,
+ gpu_droplets.list_snapshots,
)
@cached_property
def backups(self) -> BackupsResourceWithRawResponse:
- return BackupsResourceWithRawResponse(self._droplets.backups)
+ return BackupsResourceWithRawResponse(self._gpu_droplets.backups)
@cached_property
def actions(self) -> ActionsResourceWithRawResponse:
- return ActionsResourceWithRawResponse(self._droplets.actions)
+ return ActionsResourceWithRawResponse(self._gpu_droplets.actions)
@cached_property
def destroy_with_associated_resources(self) -> DestroyWithAssociatedResourcesResourceWithRawResponse:
- return DestroyWithAssociatedResourcesResourceWithRawResponse(self._droplets.destroy_with_associated_resources)
+ return DestroyWithAssociatedResourcesResourceWithRawResponse(
+ self._gpu_droplets.destroy_with_associated_resources
+ )
@cached_property
def autoscale(self) -> AutoscaleResourceWithRawResponse:
- return AutoscaleResourceWithRawResponse(self._droplets.autoscale)
+ return AutoscaleResourceWithRawResponse(self._gpu_droplets.autoscale)
+ @cached_property
+ def firewalls(self) -> FirewallsResourceWithRawResponse:
+ return FirewallsResourceWithRawResponse(self._gpu_droplets.firewalls)
-class AsyncDropletsResourceWithRawResponse:
- def __init__(self, droplets: AsyncDropletsResource) -> None:
- self._droplets = droplets
+ @cached_property
+ def floating_ips(self) -> FloatingIPsResourceWithRawResponse:
+ return FloatingIPsResourceWithRawResponse(self._gpu_droplets.floating_ips)
+
+ @cached_property
+ def images(self) -> ImagesResourceWithRawResponse:
+ return ImagesResourceWithRawResponse(self._gpu_droplets.images)
+
+ @cached_property
+ def load_balancers(self) -> LoadBalancersResourceWithRawResponse:
+ return LoadBalancersResourceWithRawResponse(self._gpu_droplets.load_balancers)
+
+ @cached_property
+ def sizes(self) -> SizesResourceWithRawResponse:
+ return SizesResourceWithRawResponse(self._gpu_droplets.sizes)
+
+ @cached_property
+ def snapshots(self) -> SnapshotsResourceWithRawResponse:
+ return SnapshotsResourceWithRawResponse(self._gpu_droplets.snapshots)
+
+ @cached_property
+ def volumes(self) -> VolumesResourceWithRawResponse:
+ return VolumesResourceWithRawResponse(self._gpu_droplets.volumes)
+
+ @cached_property
+ def account(self) -> AccountResourceWithRawResponse:
+ return AccountResourceWithRawResponse(self._gpu_droplets.account)
+
+
+class AsyncGPUDropletsResourceWithRawResponse:
+ def __init__(self, gpu_droplets: AsyncGPUDropletsResource) -> None:
+ self._gpu_droplets = gpu_droplets
self.create = async_to_raw_response_wrapper(
- droplets.create,
+ gpu_droplets.create,
)
self.retrieve = async_to_raw_response_wrapper(
- droplets.retrieve,
+ gpu_droplets.retrieve,
)
self.list = async_to_raw_response_wrapper(
- droplets.list,
+ gpu_droplets.list,
)
self.delete = async_to_raw_response_wrapper(
- droplets.delete,
+ gpu_droplets.delete,
)
self.delete_by_tag = async_to_raw_response_wrapper(
- droplets.delete_by_tag,
+ gpu_droplets.delete_by_tag,
)
self.list_firewalls = async_to_raw_response_wrapper(
- droplets.list_firewalls,
+ gpu_droplets.list_firewalls,
)
self.list_kernels = async_to_raw_response_wrapper(
- droplets.list_kernels,
+ gpu_droplets.list_kernels,
)
self.list_neighbors = async_to_raw_response_wrapper(
- droplets.list_neighbors,
+ gpu_droplets.list_neighbors,
)
self.list_snapshots = async_to_raw_response_wrapper(
- droplets.list_snapshots,
+ gpu_droplets.list_snapshots,
)
@cached_property
def backups(self) -> AsyncBackupsResourceWithRawResponse:
- return AsyncBackupsResourceWithRawResponse(self._droplets.backups)
+ return AsyncBackupsResourceWithRawResponse(self._gpu_droplets.backups)
@cached_property
def actions(self) -> AsyncActionsResourceWithRawResponse:
- return AsyncActionsResourceWithRawResponse(self._droplets.actions)
+ return AsyncActionsResourceWithRawResponse(self._gpu_droplets.actions)
@cached_property
def destroy_with_associated_resources(self) -> AsyncDestroyWithAssociatedResourcesResourceWithRawResponse:
return AsyncDestroyWithAssociatedResourcesResourceWithRawResponse(
- self._droplets.destroy_with_associated_resources
+ self._gpu_droplets.destroy_with_associated_resources
)
@cached_property
def autoscale(self) -> AsyncAutoscaleResourceWithRawResponse:
- return AsyncAutoscaleResourceWithRawResponse(self._droplets.autoscale)
+ return AsyncAutoscaleResourceWithRawResponse(self._gpu_droplets.autoscale)
+
+ @cached_property
+ def firewalls(self) -> AsyncFirewallsResourceWithRawResponse:
+ return AsyncFirewallsResourceWithRawResponse(self._gpu_droplets.firewalls)
+ @cached_property
+ def floating_ips(self) -> AsyncFloatingIPsResourceWithRawResponse:
+ return AsyncFloatingIPsResourceWithRawResponse(self._gpu_droplets.floating_ips)
+
+ @cached_property
+ def images(self) -> AsyncImagesResourceWithRawResponse:
+ return AsyncImagesResourceWithRawResponse(self._gpu_droplets.images)
-class DropletsResourceWithStreamingResponse:
- def __init__(self, droplets: DropletsResource) -> None:
- self._droplets = droplets
+ @cached_property
+ def load_balancers(self) -> AsyncLoadBalancersResourceWithRawResponse:
+ return AsyncLoadBalancersResourceWithRawResponse(self._gpu_droplets.load_balancers)
+
+ @cached_property
+ def sizes(self) -> AsyncSizesResourceWithRawResponse:
+ return AsyncSizesResourceWithRawResponse(self._gpu_droplets.sizes)
+
+ @cached_property
+ def snapshots(self) -> AsyncSnapshotsResourceWithRawResponse:
+ return AsyncSnapshotsResourceWithRawResponse(self._gpu_droplets.snapshots)
+
+ @cached_property
+ def volumes(self) -> AsyncVolumesResourceWithRawResponse:
+ return AsyncVolumesResourceWithRawResponse(self._gpu_droplets.volumes)
+
+ @cached_property
+ def account(self) -> AsyncAccountResourceWithRawResponse:
+ return AsyncAccountResourceWithRawResponse(self._gpu_droplets.account)
+
+
+class GPUDropletsResourceWithStreamingResponse:
+ def __init__(self, gpu_droplets: GPUDropletsResource) -> None:
+ self._gpu_droplets = gpu_droplets
self.create = to_streamed_response_wrapper(
- droplets.create,
+ gpu_droplets.create,
)
self.retrieve = to_streamed_response_wrapper(
- droplets.retrieve,
+ gpu_droplets.retrieve,
)
self.list = to_streamed_response_wrapper(
- droplets.list,
+ gpu_droplets.list,
)
self.delete = to_streamed_response_wrapper(
- droplets.delete,
+ gpu_droplets.delete,
)
self.delete_by_tag = to_streamed_response_wrapper(
- droplets.delete_by_tag,
+ gpu_droplets.delete_by_tag,
)
self.list_firewalls = to_streamed_response_wrapper(
- droplets.list_firewalls,
+ gpu_droplets.list_firewalls,
)
self.list_kernels = to_streamed_response_wrapper(
- droplets.list_kernels,
+ gpu_droplets.list_kernels,
)
self.list_neighbors = to_streamed_response_wrapper(
- droplets.list_neighbors,
+ gpu_droplets.list_neighbors,
)
self.list_snapshots = to_streamed_response_wrapper(
- droplets.list_snapshots,
+ gpu_droplets.list_snapshots,
)
@cached_property
def backups(self) -> BackupsResourceWithStreamingResponse:
- return BackupsResourceWithStreamingResponse(self._droplets.backups)
+ return BackupsResourceWithStreamingResponse(self._gpu_droplets.backups)
@cached_property
def actions(self) -> ActionsResourceWithStreamingResponse:
- return ActionsResourceWithStreamingResponse(self._droplets.actions)
+ return ActionsResourceWithStreamingResponse(self._gpu_droplets.actions)
@cached_property
def destroy_with_associated_resources(self) -> DestroyWithAssociatedResourcesResourceWithStreamingResponse:
return DestroyWithAssociatedResourcesResourceWithStreamingResponse(
- self._droplets.destroy_with_associated_resources
+ self._gpu_droplets.destroy_with_associated_resources
)
@cached_property
def autoscale(self) -> AutoscaleResourceWithStreamingResponse:
- return AutoscaleResourceWithStreamingResponse(self._droplets.autoscale)
+ return AutoscaleResourceWithStreamingResponse(self._gpu_droplets.autoscale)
+ @cached_property
+ def firewalls(self) -> FirewallsResourceWithStreamingResponse:
+ return FirewallsResourceWithStreamingResponse(self._gpu_droplets.firewalls)
+
+ @cached_property
+ def floating_ips(self) -> FloatingIPsResourceWithStreamingResponse:
+ return FloatingIPsResourceWithStreamingResponse(self._gpu_droplets.floating_ips)
-class AsyncDropletsResourceWithStreamingResponse:
- def __init__(self, droplets: AsyncDropletsResource) -> None:
- self._droplets = droplets
+ @cached_property
+ def images(self) -> ImagesResourceWithStreamingResponse:
+ return ImagesResourceWithStreamingResponse(self._gpu_droplets.images)
+
+ @cached_property
+ def load_balancers(self) -> LoadBalancersResourceWithStreamingResponse:
+ return LoadBalancersResourceWithStreamingResponse(self._gpu_droplets.load_balancers)
+
+ @cached_property
+ def sizes(self) -> SizesResourceWithStreamingResponse:
+ return SizesResourceWithStreamingResponse(self._gpu_droplets.sizes)
+
+ @cached_property
+ def snapshots(self) -> SnapshotsResourceWithStreamingResponse:
+ return SnapshotsResourceWithStreamingResponse(self._gpu_droplets.snapshots)
+
+ @cached_property
+ def volumes(self) -> VolumesResourceWithStreamingResponse:
+ return VolumesResourceWithStreamingResponse(self._gpu_droplets.volumes)
+
+ @cached_property
+ def account(self) -> AccountResourceWithStreamingResponse:
+ return AccountResourceWithStreamingResponse(self._gpu_droplets.account)
+
+
+class AsyncGPUDropletsResourceWithStreamingResponse:
+ def __init__(self, gpu_droplets: AsyncGPUDropletsResource) -> None:
+ self._gpu_droplets = gpu_droplets
self.create = async_to_streamed_response_wrapper(
- droplets.create,
+ gpu_droplets.create,
)
self.retrieve = async_to_streamed_response_wrapper(
- droplets.retrieve,
+ gpu_droplets.retrieve,
)
self.list = async_to_streamed_response_wrapper(
- droplets.list,
+ gpu_droplets.list,
)
self.delete = async_to_streamed_response_wrapper(
- droplets.delete,
+ gpu_droplets.delete,
)
self.delete_by_tag = async_to_streamed_response_wrapper(
- droplets.delete_by_tag,
+ gpu_droplets.delete_by_tag,
)
self.list_firewalls = async_to_streamed_response_wrapper(
- droplets.list_firewalls,
+ gpu_droplets.list_firewalls,
)
self.list_kernels = async_to_streamed_response_wrapper(
- droplets.list_kernels,
+ gpu_droplets.list_kernels,
)
self.list_neighbors = async_to_streamed_response_wrapper(
- droplets.list_neighbors,
+ gpu_droplets.list_neighbors,
)
self.list_snapshots = async_to_streamed_response_wrapper(
- droplets.list_snapshots,
+ gpu_droplets.list_snapshots,
)
@cached_property
def backups(self) -> AsyncBackupsResourceWithStreamingResponse:
- return AsyncBackupsResourceWithStreamingResponse(self._droplets.backups)
+ return AsyncBackupsResourceWithStreamingResponse(self._gpu_droplets.backups)
@cached_property
def actions(self) -> AsyncActionsResourceWithStreamingResponse:
- return AsyncActionsResourceWithStreamingResponse(self._droplets.actions)
+ return AsyncActionsResourceWithStreamingResponse(self._gpu_droplets.actions)
@cached_property
def destroy_with_associated_resources(self) -> AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse:
return AsyncDestroyWithAssociatedResourcesResourceWithStreamingResponse(
- self._droplets.destroy_with_associated_resources
+ self._gpu_droplets.destroy_with_associated_resources
)
@cached_property
def autoscale(self) -> AsyncAutoscaleResourceWithStreamingResponse:
- return AsyncAutoscaleResourceWithStreamingResponse(self._droplets.autoscale)
+ return AsyncAutoscaleResourceWithStreamingResponse(self._gpu_droplets.autoscale)
+
+ @cached_property
+ def firewalls(self) -> AsyncFirewallsResourceWithStreamingResponse:
+ return AsyncFirewallsResourceWithStreamingResponse(self._gpu_droplets.firewalls)
+
+ @cached_property
+ def floating_ips(self) -> AsyncFloatingIPsResourceWithStreamingResponse:
+ return AsyncFloatingIPsResourceWithStreamingResponse(self._gpu_droplets.floating_ips)
+
+ @cached_property
+ def images(self) -> AsyncImagesResourceWithStreamingResponse:
+ return AsyncImagesResourceWithStreamingResponse(self._gpu_droplets.images)
+
+ @cached_property
+ def load_balancers(self) -> AsyncLoadBalancersResourceWithStreamingResponse:
+ return AsyncLoadBalancersResourceWithStreamingResponse(self._gpu_droplets.load_balancers)
+
+ @cached_property
+ def sizes(self) -> AsyncSizesResourceWithStreamingResponse:
+ return AsyncSizesResourceWithStreamingResponse(self._gpu_droplets.sizes)
+
+ @cached_property
+ def snapshots(self) -> AsyncSnapshotsResourceWithStreamingResponse:
+ return AsyncSnapshotsResourceWithStreamingResponse(self._gpu_droplets.snapshots)
+
+ @cached_property
+ def volumes(self) -> AsyncVolumesResourceWithStreamingResponse:
+ return AsyncVolumesResourceWithStreamingResponse(self._gpu_droplets.volumes)
+
+ @cached_property
+ def account(self) -> AsyncAccountResourceWithStreamingResponse:
+ return AsyncAccountResourceWithStreamingResponse(self._gpu_droplets.account)
diff --git a/src/do_gradientai/resources/images/__init__.py b/src/do_gradientai/resources/gpu_droplets/images/__init__.py
similarity index 100%
rename from src/do_gradientai/resources/images/__init__.py
rename to src/do_gradientai/resources/gpu_droplets/images/__init__.py
diff --git a/src/do_gradientai/resources/images/actions.py b/src/do_gradientai/resources/gpu_droplets/images/actions.py
similarity index 97%
rename from src/do_gradientai/resources/images/actions.py
rename to src/do_gradientai/resources/gpu_droplets/images/actions.py
index 3f4b4384..9428418b 100644
--- a/src/do_gradientai/resources/images/actions.py
+++ b/src/do_gradientai/resources/gpu_droplets/images/actions.py
@@ -6,20 +6,20 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from ..._utils import required_args, maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._utils import required_args, maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.images import action_create_params
-from ...types.shared.action import Action
-from ...types.images.action_list_response import ActionListResponse
+from ...._base_client import make_request_options
+from ....types.shared.action import Action
+from ....types.gpu_droplets.images import action_create_params
+from ....types.gpu_droplets.images.action_list_response import ActionListResponse
__all__ = ["ActionsResource", "AsyncActionsResource"]
diff --git a/src/do_gradientai/resources/images/images.py b/src/do_gradientai/resources/gpu_droplets/images/images.py
similarity index 97%
rename from src/do_gradientai/resources/images/images.py
rename to src/do_gradientai/resources/gpu_droplets/images/images.py
index f75a1e73..2c70e793 100644
--- a/src/do_gradientai/resources/images/images.py
+++ b/src/do_gradientai/resources/gpu_droplets/images/images.py
@@ -7,7 +7,6 @@
import httpx
-from ...types import image_list_params, image_create_params, image_update_params
from .actions import (
ActionsResource,
AsyncActionsResource,
@@ -16,21 +15,22 @@
ActionsResourceWithStreamingResponse,
AsyncActionsResourceWithStreamingResponse,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.image_list_response import ImageListResponse
-from ...types.image_create_response import ImageCreateResponse
-from ...types.image_update_response import ImageUpdateResponse
-from ...types.image_retrieve_response import ImageRetrieveResponse
+from ...._base_client import make_request_options
+from ....types.gpu_droplets import image_list_params, image_create_params, image_update_params
+from ....types.gpu_droplets.image_list_response import ImageListResponse
+from ....types.gpu_droplets.image_create_response import ImageCreateResponse
+from ....types.gpu_droplets.image_update_response import ImageUpdateResponse
+from ....types.gpu_droplets.image_retrieve_response import ImageRetrieveResponse
__all__ = ["ImagesResource", "AsyncImagesResource"]
diff --git a/src/do_gradientai/resources/load_balancers/__init__.py b/src/do_gradientai/resources/gpu_droplets/load_balancers/__init__.py
similarity index 100%
rename from src/do_gradientai/resources/load_balancers/__init__.py
rename to src/do_gradientai/resources/gpu_droplets/load_balancers/__init__.py
diff --git a/src/do_gradientai/resources/load_balancers/droplets.py b/src/do_gradientai/resources/gpu_droplets/load_balancers/droplets.py
similarity index 96%
rename from src/do_gradientai/resources/load_balancers/droplets.py
rename to src/do_gradientai/resources/gpu_droplets/load_balancers/droplets.py
index 4eb0ed60..2553a729 100644
--- a/src/do_gradientai/resources/load_balancers/droplets.py
+++ b/src/do_gradientai/resources/gpu_droplets/load_balancers/droplets.py
@@ -6,18 +6,18 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.load_balancers import droplet_add_params, droplet_remove_params
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.load_balancers import droplet_add_params, droplet_remove_params
__all__ = ["DropletsResource", "AsyncDropletsResource"]
diff --git a/src/do_gradientai/resources/load_balancers/forwarding_rules.py b/src/do_gradientai/resources/gpu_droplets/load_balancers/forwarding_rules.py
similarity index 96%
rename from src/do_gradientai/resources/load_balancers/forwarding_rules.py
rename to src/do_gradientai/resources/gpu_droplets/load_balancers/forwarding_rules.py
index 6e9757c5..2ba20f88 100644
--- a/src/do_gradientai/resources/load_balancers/forwarding_rules.py
+++ b/src/do_gradientai/resources/gpu_droplets/load_balancers/forwarding_rules.py
@@ -6,19 +6,19 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.load_balancers import forwarding_rule_add_params, forwarding_rule_remove_params
-from ...types.forwarding_rule_param import ForwardingRuleParam
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.load_balancers import forwarding_rule_add_params, forwarding_rule_remove_params
+from ....types.gpu_droplets.forwarding_rule_param import ForwardingRuleParam
__all__ = ["ForwardingRulesResource", "AsyncForwardingRulesResource"]
diff --git a/src/do_gradientai/resources/load_balancers/load_balancers.py b/src/do_gradientai/resources/gpu_droplets/load_balancers/load_balancers.py
similarity index 98%
rename from src/do_gradientai/resources/load_balancers/load_balancers.py
rename to src/do_gradientai/resources/gpu_droplets/load_balancers/load_balancers.py
index 12b9097c..c724b6d9 100644
--- a/src/do_gradientai/resources/load_balancers/load_balancers.py
+++ b/src/do_gradientai/resources/gpu_droplets/load_balancers/load_balancers.py
@@ -7,13 +7,6 @@
import httpx
-from ...types import (
- load_balancer_list_params,
- load_balancer_create_params,
- load_balancer_update_params,
-)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import required_args, maybe_transform, async_maybe_transform
from .droplets import (
DropletsResource,
AsyncDropletsResource,
@@ -22,15 +15,17 @@
DropletsResourceWithStreamingResponse,
AsyncDropletsResourceWithStreamingResponse,
)
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import required_args, maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
+from ...._base_client import make_request_options
from .forwarding_rules import (
ForwardingRulesResource,
AsyncForwardingRulesResource,
@@ -39,16 +34,21 @@
ForwardingRulesResourceWithStreamingResponse,
AsyncForwardingRulesResourceWithStreamingResponse,
)
-from ...types.domains_param import DomainsParam
-from ...types.lb_firewall_param import LbFirewallParam
-from ...types.glb_settings_param import GlbSettingsParam
-from ...types.health_check_param import HealthCheckParam
-from ...types.forwarding_rule_param import ForwardingRuleParam
-from ...types.sticky_sessions_param import StickySessionsParam
-from ...types.load_balancer_list_response import LoadBalancerListResponse
-from ...types.load_balancer_create_response import LoadBalancerCreateResponse
-from ...types.load_balancer_update_response import LoadBalancerUpdateResponse
-from ...types.load_balancer_retrieve_response import LoadBalancerRetrieveResponse
+from ....types.gpu_droplets import (
+ load_balancer_list_params,
+ load_balancer_create_params,
+ load_balancer_update_params,
+)
+from ....types.gpu_droplets.domains_param import DomainsParam
+from ....types.gpu_droplets.lb_firewall_param import LbFirewallParam
+from ....types.gpu_droplets.glb_settings_param import GlbSettingsParam
+from ....types.gpu_droplets.health_check_param import HealthCheckParam
+from ....types.gpu_droplets.forwarding_rule_param import ForwardingRuleParam
+from ....types.gpu_droplets.sticky_sessions_param import StickySessionsParam
+from ....types.gpu_droplets.load_balancer_list_response import LoadBalancerListResponse
+from ....types.gpu_droplets.load_balancer_create_response import LoadBalancerCreateResponse
+from ....types.gpu_droplets.load_balancer_update_response import LoadBalancerUpdateResponse
+from ....types.gpu_droplets.load_balancer_retrieve_response import LoadBalancerRetrieveResponse
__all__ = ["LoadBalancersResource", "AsyncLoadBalancersResource"]
diff --git a/src/do_gradientai/resources/sizes.py b/src/do_gradientai/resources/gpu_droplets/sizes.py
similarity index 94%
rename from src/do_gradientai/resources/sizes.py
rename to src/do_gradientai/resources/gpu_droplets/sizes.py
index a432920e..e37116c7 100644
--- a/src/do_gradientai/resources/sizes.py
+++ b/src/do_gradientai/resources/gpu_droplets/sizes.py
@@ -4,19 +4,19 @@
import httpx
-from ..types import size_list_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from .._utils import maybe_transform, async_maybe_transform
-from .._compat import cached_property
-from .._resource import SyncAPIResource, AsyncAPIResource
-from .._response import (
+from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ..._utils import maybe_transform, async_maybe_transform
+from ..._compat import cached_property
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ..._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from .._base_client import make_request_options
-from ..types.size_list_response import SizeListResponse
+from ..._base_client import make_request_options
+from ...types.gpu_droplets import size_list_params
+from ...types.gpu_droplets.size_list_response import SizeListResponse
__all__ = ["SizesResource", "AsyncSizesResource"]
diff --git a/src/do_gradientai/resources/snapshots.py b/src/do_gradientai/resources/gpu_droplets/snapshots.py
similarity index 96%
rename from src/do_gradientai/resources/snapshots.py
rename to src/do_gradientai/resources/gpu_droplets/snapshots.py
index 2c4d0060..081ab5b8 100644
--- a/src/do_gradientai/resources/snapshots.py
+++ b/src/do_gradientai/resources/gpu_droplets/snapshots.py
@@ -7,20 +7,20 @@
import httpx
-from ..types import snapshot_list_params
-from .._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from .._utils import maybe_transform, async_maybe_transform
-from .._compat import cached_property
-from .._resource import SyncAPIResource, AsyncAPIResource
-from .._response import (
+from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ..._utils import maybe_transform, async_maybe_transform
+from ..._compat import cached_property
+from ..._resource import SyncAPIResource, AsyncAPIResource
+from ..._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from .._base_client import make_request_options
-from ..types.snapshot_list_response import SnapshotListResponse
-from ..types.snapshot_retrieve_response import SnapshotRetrieveResponse
+from ..._base_client import make_request_options
+from ...types.gpu_droplets import snapshot_list_params
+from ...types.gpu_droplets.snapshot_list_response import SnapshotListResponse
+from ...types.gpu_droplets.snapshot_retrieve_response import SnapshotRetrieveResponse
__all__ = ["SnapshotsResource", "AsyncSnapshotsResource"]
diff --git a/src/do_gradientai/resources/volumes/__init__.py b/src/do_gradientai/resources/gpu_droplets/volumes/__init__.py
similarity index 100%
rename from src/do_gradientai/resources/volumes/__init__.py
rename to src/do_gradientai/resources/gpu_droplets/volumes/__init__.py
diff --git a/src/do_gradientai/resources/volumes/actions.py b/src/do_gradientai/resources/gpu_droplets/volumes/actions.py
similarity index 98%
rename from src/do_gradientai/resources/volumes/actions.py
rename to src/do_gradientai/resources/gpu_droplets/volumes/actions.py
index 08b56e53..9d925397 100644
--- a/src/do_gradientai/resources/volumes/actions.py
+++ b/src/do_gradientai/resources/gpu_droplets/volumes/actions.py
@@ -7,27 +7,27 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
-from ..._utils import required_args, maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NotGiven
+from ...._utils import required_args, maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.volumes import (
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.volumes import (
action_list_params,
action_retrieve_params,
action_initiate_by_id_params,
action_initiate_by_name_params,
)
-from ...types.volumes.action_list_response import ActionListResponse
-from ...types.volumes.action_retrieve_response import ActionRetrieveResponse
-from ...types.volumes.action_initiate_by_id_response import ActionInitiateByIDResponse
-from ...types.volumes.action_initiate_by_name_response import ActionInitiateByNameResponse
+from ....types.gpu_droplets.volumes.action_list_response import ActionListResponse
+from ....types.gpu_droplets.volumes.action_retrieve_response import ActionRetrieveResponse
+from ....types.gpu_droplets.volumes.action_initiate_by_id_response import ActionInitiateByIDResponse
+from ....types.gpu_droplets.volumes.action_initiate_by_name_response import ActionInitiateByNameResponse
__all__ = ["ActionsResource", "AsyncActionsResource"]
diff --git a/src/do_gradientai/resources/volumes/snapshots.py b/src/do_gradientai/resources/gpu_droplets/volumes/snapshots.py
similarity index 96%
rename from src/do_gradientai/resources/volumes/snapshots.py
rename to src/do_gradientai/resources/gpu_droplets/volumes/snapshots.py
index 9bb50070..766d9a3a 100644
--- a/src/do_gradientai/resources/volumes/snapshots.py
+++ b/src/do_gradientai/resources/gpu_droplets/volumes/snapshots.py
@@ -6,21 +6,21 @@
import httpx
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import maybe_transform, async_maybe_transform
-from ..._compat import cached_property
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import maybe_transform, async_maybe_transform
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.volumes import snapshot_list_params, snapshot_create_params
-from ...types.volumes.snapshot_list_response import SnapshotListResponse
-from ...types.volumes.snapshot_create_response import SnapshotCreateResponse
-from ...types.volumes.snapshot_retrieve_response import SnapshotRetrieveResponse
+from ...._base_client import make_request_options
+from ....types.gpu_droplets.volumes import snapshot_list_params, snapshot_create_params
+from ....types.gpu_droplets.volumes.snapshot_list_response import SnapshotListResponse
+from ....types.gpu_droplets.volumes.snapshot_create_response import SnapshotCreateResponse
+from ....types.gpu_droplets.volumes.snapshot_retrieve_response import SnapshotRetrieveResponse
__all__ = ["SnapshotsResource", "AsyncSnapshotsResource"]
diff --git a/src/do_gradientai/resources/volumes/volumes.py b/src/do_gradientai/resources/gpu_droplets/volumes/volumes.py
similarity index 98%
rename from src/do_gradientai/resources/volumes/volumes.py
rename to src/do_gradientai/resources/gpu_droplets/volumes/volumes.py
index 04df1bce..efd1d4ae 100644
--- a/src/do_gradientai/resources/volumes/volumes.py
+++ b/src/do_gradientai/resources/gpu_droplets/volumes/volumes.py
@@ -7,7 +7,6 @@
import httpx
-from ...types import volume_list_params, volume_create_params, volume_delete_by_name_params
from .actions import (
ActionsResource,
AsyncActionsResource,
@@ -16,9 +15,8 @@
ActionsResourceWithStreamingResponse,
AsyncActionsResourceWithStreamingResponse,
)
-from ..._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
-from ..._utils import required_args, maybe_transform, async_maybe_transform
-from ..._compat import cached_property
+from ...._types import NOT_GIVEN, Body, Query, Headers, NoneType, NotGiven
+from ...._utils import required_args, maybe_transform, async_maybe_transform
from .snapshots import (
SnapshotsResource,
AsyncSnapshotsResource,
@@ -27,17 +25,19 @@
SnapshotsResourceWithStreamingResponse,
AsyncSnapshotsResourceWithStreamingResponse,
)
-from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
+from ...._compat import cached_property
+from ...._resource import SyncAPIResource, AsyncAPIResource
+from ...._response import (
to_raw_response_wrapper,
to_streamed_response_wrapper,
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
-from ...types.volume_list_response import VolumeListResponse
-from ...types.volume_create_response import VolumeCreateResponse
-from ...types.volume_retrieve_response import VolumeRetrieveResponse
+from ...._base_client import make_request_options
+from ....types.gpu_droplets import volume_list_params, volume_create_params, volume_delete_by_name_params
+from ....types.gpu_droplets.volume_list_response import VolumeListResponse
+from ....types.gpu_droplets.volume_create_response import VolumeCreateResponse
+from ....types.gpu_droplets.volume_retrieve_response import VolumeRetrieveResponse
__all__ = ["VolumesResource", "AsyncVolumesResource"]
diff --git a/src/do_gradientai/types/__init__.py b/src/do_gradientai/types/__init__.py
index e0c8310d..20747fb3 100644
--- a/src/do_gradientai/types/__init__.py
+++ b/src/do_gradientai/types/__init__.py
@@ -22,118 +22,61 @@
ForwardLinks as ForwardLinks,
Subscription as Subscription,
BackwardLinks as BackwardLinks,
- RepositoryTag as RepositoryTag,
MetaProperties as MetaProperties,
- RepositoryBlob as RepositoryBlob,
CompletionUsage as CompletionUsage,
GarbageCollection as GarbageCollection,
FirewallRuleTarget as FirewallRuleTarget,
- RepositoryManifest as RepositoryManifest,
ChatCompletionChunk as ChatCompletionChunk,
SubscriptionTierBase as SubscriptionTierBase,
DropletNextBackupWindow as DropletNextBackupWindow,
ChatCompletionTokenLogprob as ChatCompletionTokenLogprob,
)
-from .domains import Domains as Domains
-from .firewall import Firewall as Firewall
from .api_agent import APIAgent as APIAgent
from .api_model import APIModel as APIModel
-from .floating_ip import FloatingIP as FloatingIP
-from .lb_firewall import LbFirewall as LbFirewall
-from .glb_settings import GlbSettings as GlbSettings
-from .health_check import HealthCheck as HealthCheck
from .api_agreement import APIAgreement as APIAgreement
from .api_workspace import APIWorkspace as APIWorkspace
-from .domains_param import DomainsParam as DomainsParam
-from .load_balancer import LoadBalancer as LoadBalancer
-from .firewall_param import FirewallParam as FirewallParam
from .api_agent_model import APIAgentModel as APIAgentModel
-from .forwarding_rule import ForwardingRule as ForwardingRule
-from .sticky_sessions import StickySessions as StickySessions
-from .size_list_params import SizeListParams as SizeListParams
from .agent_list_params import AgentListParams as AgentListParams
from .api_model_version import APIModelVersion as APIModelVersion
-from .image_list_params import ImageListParams as ImageListParams
-from .lb_firewall_param import LbFirewallParam as LbFirewallParam
from .api_knowledge_base import APIKnowledgeBase as APIKnowledgeBase
-from .glb_settings_param import GlbSettingsParam as GlbSettingsParam
-from .health_check_param import HealthCheckParam as HealthCheckParam
from .region_list_params import RegionListParams as RegionListParams
-from .size_list_response import SizeListResponse as SizeListResponse
-from .volume_list_params import VolumeListParams as VolumeListParams
from .agent_create_params import AgentCreateParams as AgentCreateParams
from .agent_list_response import AgentListResponse as AgentListResponse
from .agent_update_params import AgentUpdateParams as AgentUpdateParams
-from .droplet_list_params import DropletListParams as DropletListParams
-from .image_create_params import ImageCreateParams as ImageCreateParams
-from .image_list_response import ImageListResponse as ImageListResponse
-from .image_update_params import ImageUpdateParams as ImageUpdateParams
from .model_list_response import ModelListResponse as ModelListResponse
from .api_retrieval_method import APIRetrievalMethod as APIRetrievalMethod
-from .firewall_list_params import FirewallListParams as FirewallListParams
from .region_list_response import RegionListResponse as RegionListResponse
-from .snapshot_list_params import SnapshotListParams as SnapshotListParams
-from .volume_create_params import VolumeCreateParams as VolumeCreateParams
-from .volume_list_response import VolumeListResponse as VolumeListResponse
from .agent_create_response import AgentCreateResponse as AgentCreateResponse
from .agent_delete_response import AgentDeleteResponse as AgentDeleteResponse
from .agent_update_response import AgentUpdateResponse as AgentUpdateResponse
from .droplet_backup_policy import DropletBackupPolicy as DropletBackupPolicy
-from .droplet_create_params import DropletCreateParams as DropletCreateParams
-from .droplet_list_response import DropletListResponse as DropletListResponse
-from .forwarding_rule_param import ForwardingRuleParam as ForwardingRuleParam
-from .image_create_response import ImageCreateResponse as ImageCreateResponse
-from .image_update_response import ImageUpdateResponse as ImageUpdateResponse
-from .sticky_sessions_param import StickySessionsParam as StickySessionsParam
from .api_agent_api_key_info import APIAgentAPIKeyInfo as APIAgentAPIKeyInfo
-from .firewall_create_params import FirewallCreateParams as FirewallCreateParams
-from .firewall_list_response import FirewallListResponse as FirewallListResponse
-from .firewall_update_params import FirewallUpdateParams as FirewallUpdateParams
-from .snapshot_list_response import SnapshotListResponse as SnapshotListResponse
-from .volume_create_response import VolumeCreateResponse as VolumeCreateResponse
from .agent_retrieve_response import AgentRetrieveResponse as AgentRetrieveResponse
from .api_openai_api_key_info import APIOpenAIAPIKeyInfo as APIOpenAIAPIKeyInfo
-from .droplet_create_response import DropletCreateResponse as DropletCreateResponse
-from .floating_ip_list_params import FloatingIPListParams as FloatingIPListParams
-from .image_retrieve_response import ImageRetrieveResponse as ImageRetrieveResponse
+from .gpu_droplet_list_params import GPUDropletListParams as GPUDropletListParams
from .model_retrieve_response import ModelRetrieveResponse as ModelRetrieveResponse
-from .firewall_create_response import FirewallCreateResponse as FirewallCreateResponse
-from .firewall_update_response import FirewallUpdateResponse as FirewallUpdateResponse
-from .volume_retrieve_response import VolumeRetrieveResponse as VolumeRetrieveResponse
-from .account_retrieve_response import AccountRetrieveResponse as AccountRetrieveResponse
from .api_deployment_visibility import APIDeploymentVisibility as APIDeploymentVisibility
-from .droplet_retrieve_response import DropletRetrieveResponse as DropletRetrieveResponse
-from .floating_ip_create_params import FloatingIPCreateParams as FloatingIPCreateParams
-from .floating_ip_list_response import FloatingIPListResponse as FloatingIPListResponse
-from .load_balancer_list_params import LoadBalancerListParams as LoadBalancerListParams
+from .gpu_droplet_create_params import GPUDropletCreateParams as GPUDropletCreateParams
+from .gpu_droplet_list_response import GPUDropletListResponse as GPUDropletListResponse
from .agent_update_status_params import AgentUpdateStatusParams as AgentUpdateStatusParams
from .api_anthropic_api_key_info import APIAnthropicAPIKeyInfo as APIAnthropicAPIKeyInfo
-from .firewall_retrieve_response import FirewallRetrieveResponse as FirewallRetrieveResponse
from .knowledge_base_list_params import KnowledgeBaseListParams as KnowledgeBaseListParams
-from .snapshot_retrieve_response import SnapshotRetrieveResponse as SnapshotRetrieveResponse
from .droplet_backup_policy_param import DropletBackupPolicyParam as DropletBackupPolicyParam
-from .droplet_list_kernels_params import DropletListKernelsParams as DropletListKernelsParams
-from .floating_ip_create_response import FloatingIPCreateResponse as FloatingIPCreateResponse
-from .load_balancer_create_params import LoadBalancerCreateParams as LoadBalancerCreateParams
-from .load_balancer_list_response import LoadBalancerListResponse as LoadBalancerListResponse
-from .load_balancer_update_params import LoadBalancerUpdateParams as LoadBalancerUpdateParams
+from .gpu_droplet_create_response import GPUDropletCreateResponse as GPUDropletCreateResponse
from .agent_update_status_response import AgentUpdateStatusResponse as AgentUpdateStatusResponse
-from .droplet_delete_by_tag_params import DropletDeleteByTagParams as DropletDeleteByTagParams
from .knowledge_base_create_params import KnowledgeBaseCreateParams as KnowledgeBaseCreateParams
from .knowledge_base_list_response import KnowledgeBaseListResponse as KnowledgeBaseListResponse
from .knowledge_base_update_params import KnowledgeBaseUpdateParams as KnowledgeBaseUpdateParams
-from .volume_delete_by_name_params import VolumeDeleteByNameParams as VolumeDeleteByNameParams
-from .droplet_list_firewalls_params import DropletListFirewallsParams as DropletListFirewallsParams
-from .droplet_list_kernels_response import DropletListKernelsResponse as DropletListKernelsResponse
-from .droplet_list_snapshots_params import DropletListSnapshotsParams as DropletListSnapshotsParams
-from .floating_ip_retrieve_response import FloatingIPRetrieveResponse as FloatingIPRetrieveResponse
-from .load_balancer_create_response import LoadBalancerCreateResponse as LoadBalancerCreateResponse
-from .load_balancer_update_response import LoadBalancerUpdateResponse as LoadBalancerUpdateResponse
+from .gpu_droplet_retrieve_response import GPUDropletRetrieveResponse as GPUDropletRetrieveResponse
from .knowledge_base_create_response import KnowledgeBaseCreateResponse as KnowledgeBaseCreateResponse
from .knowledge_base_delete_response import KnowledgeBaseDeleteResponse as KnowledgeBaseDeleteResponse
from .knowledge_base_update_response import KnowledgeBaseUpdateResponse as KnowledgeBaseUpdateResponse
-from .droplet_list_firewalls_response import DropletListFirewallsResponse as DropletListFirewallsResponse
-from .droplet_list_neighbors_response import DropletListNeighborsResponse as DropletListNeighborsResponse
-from .droplet_list_snapshots_response import DropletListSnapshotsResponse as DropletListSnapshotsResponse
-from .load_balancer_retrieve_response import LoadBalancerRetrieveResponse as LoadBalancerRetrieveResponse
+from .gpu_droplet_list_kernels_params import GPUDropletListKernelsParams as GPUDropletListKernelsParams
+from .gpu_droplet_delete_by_tag_params import GPUDropletDeleteByTagParams as GPUDropletDeleteByTagParams
from .knowledge_base_retrieve_response import KnowledgeBaseRetrieveResponse as KnowledgeBaseRetrieveResponse
+from .gpu_droplet_list_firewalls_params import GPUDropletListFirewallsParams as GPUDropletListFirewallsParams
+from .gpu_droplet_list_kernels_response import GPUDropletListKernelsResponse as GPUDropletListKernelsResponse
+from .gpu_droplet_list_snapshots_params import GPUDropletListSnapshotsParams as GPUDropletListSnapshotsParams
+from .gpu_droplet_list_firewalls_response import GPUDropletListFirewallsResponse as GPUDropletListFirewallsResponse
+from .gpu_droplet_list_neighbors_response import GPUDropletListNeighborsResponse as GPUDropletListNeighborsResponse
+from .gpu_droplet_list_snapshots_response import GPUDropletListSnapshotsResponse as GPUDropletListSnapshotsResponse
diff --git a/src/do_gradientai/types/account_retrieve_response.py b/src/do_gradientai/types/account_retrieve_response.py
deleted file mode 100644
index 630f33e6..00000000
--- a/src/do_gradientai/types/account_retrieve_response.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Optional
-from typing_extensions import Literal
-
-from .._models import BaseModel
-
-__all__ = ["AccountRetrieveResponse", "Account", "AccountTeam"]
-
-
-class AccountTeam(BaseModel):
- name: Optional[str] = None
- """The name for the current team."""
-
- uuid: Optional[str] = None
- """The unique universal identifier for the current team."""
-
-
-class Account(BaseModel):
- droplet_limit: int
- """The total number of Droplets current user or team may have active at one time.
-
- Requires `droplet:read` scope.
- """
-
- email: str
- """The email address used by the current user to register for DigitalOcean."""
-
- email_verified: bool
- """If true, the user has verified their account via email. False otherwise."""
-
- floating_ip_limit: int
- """The total number of Floating IPs the current user or team may have.
-
- Requires `reserved_ip:read` scope.
- """
-
- status: Literal["active", "warning", "locked"]
- """This value is one of "active", "warning" or "locked"."""
-
- status_message: str
- """A human-readable message giving more details about the status of the account."""
-
- uuid: str
- """The unique universal identifier for the current user."""
-
- name: Optional[str] = None
- """The display name for the current user."""
-
- team: Optional[AccountTeam] = None
- """When authorized in a team context, includes information about the current team."""
-
-
-class AccountRetrieveResponse(BaseModel):
- account: Optional[Account] = None
diff --git a/src/do_gradientai/types/droplets/__init__.py b/src/do_gradientai/types/droplets/__init__.py
deleted file mode 100644
index 4313caa9..00000000
--- a/src/do_gradientai/types/droplets/__init__.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-from .autoscale_pool import AutoscalePool as AutoscalePool
-from .action_list_params import ActionListParams as ActionListParams
-from .backup_list_params import BackupListParams as BackupListParams
-from .associated_resource import AssociatedResource as AssociatedResource
-from .current_utilization import CurrentUtilization as CurrentUtilization
-from .action_list_response import ActionListResponse as ActionListResponse
-from .backup_list_response import BackupListResponse as BackupListResponse
-from .autoscale_list_params import AutoscaleListParams as AutoscaleListParams
-from .action_initiate_params import ActionInitiateParams as ActionInitiateParams
-from .autoscale_create_params import AutoscaleCreateParams as AutoscaleCreateParams
-from .autoscale_list_response import AutoscaleListResponse as AutoscaleListResponse
-from .autoscale_update_params import AutoscaleUpdateParams as AutoscaleUpdateParams
-from .action_initiate_response import ActionInitiateResponse as ActionInitiateResponse
-from .action_retrieve_response import ActionRetrieveResponse as ActionRetrieveResponse
-from .autoscale_create_response import AutoscaleCreateResponse as AutoscaleCreateResponse
-from .autoscale_update_response import AutoscaleUpdateResponse as AutoscaleUpdateResponse
-from .action_bulk_initiate_params import ActionBulkInitiateParams as ActionBulkInitiateParams
-from .autoscale_retrieve_response import AutoscaleRetrieveResponse as AutoscaleRetrieveResponse
-from .backup_list_policies_params import BackupListPoliciesParams as BackupListPoliciesParams
-from .autoscale_pool_static_config import AutoscalePoolStaticConfig as AutoscalePoolStaticConfig
-from .action_bulk_initiate_response import ActionBulkInitiateResponse as ActionBulkInitiateResponse
-from .autoscale_list_history_params import AutoscaleListHistoryParams as AutoscaleListHistoryParams
-from .autoscale_list_members_params import AutoscaleListMembersParams as AutoscaleListMembersParams
-from .autoscale_pool_dynamic_config import AutoscalePoolDynamicConfig as AutoscalePoolDynamicConfig
-from .backup_list_policies_response import BackupListPoliciesResponse as BackupListPoliciesResponse
-from .destroyed_associated_resource import DestroyedAssociatedResource as DestroyedAssociatedResource
-from .autoscale_list_history_response import AutoscaleListHistoryResponse as AutoscaleListHistoryResponse
-from .autoscale_list_members_response import AutoscaleListMembersResponse as AutoscaleListMembersResponse
-from .autoscale_pool_droplet_template import AutoscalePoolDropletTemplate as AutoscalePoolDropletTemplate
-from .backup_retrieve_policy_response import BackupRetrievePolicyResponse as BackupRetrievePolicyResponse
-from .autoscale_pool_static_config_param import AutoscalePoolStaticConfigParam as AutoscalePoolStaticConfigParam
-from .autoscale_pool_dynamic_config_param import AutoscalePoolDynamicConfigParam as AutoscalePoolDynamicConfigParam
-from .autoscale_pool_droplet_template_param import (
- AutoscalePoolDropletTemplateParam as AutoscalePoolDropletTemplateParam,
-)
-from .backup_list_supported_policies_response import (
- BackupListSupportedPoliciesResponse as BackupListSupportedPoliciesResponse,
-)
-from .destroy_with_associated_resource_list_response import (
- DestroyWithAssociatedResourceListResponse as DestroyWithAssociatedResourceListResponse,
-)
-from .destroy_with_associated_resource_check_status_response import (
- DestroyWithAssociatedResourceCheckStatusResponse as DestroyWithAssociatedResourceCheckStatusResponse,
-)
-from .destroy_with_associated_resource_delete_selective_params import (
- DestroyWithAssociatedResourceDeleteSelectiveParams as DestroyWithAssociatedResourceDeleteSelectiveParams,
-)
diff --git a/src/do_gradientai/types/droplet_create_params.py b/src/do_gradientai/types/gpu_droplet_create_params.py
similarity index 97%
rename from src/do_gradientai/types/droplet_create_params.py
rename to src/do_gradientai/types/gpu_droplet_create_params.py
index 750d7c11..f38661fb 100644
--- a/src/do_gradientai/types/droplet_create_params.py
+++ b/src/do_gradientai/types/gpu_droplet_create_params.py
@@ -7,7 +7,7 @@
from .droplet_backup_policy_param import DropletBackupPolicyParam
-__all__ = ["DropletCreateParams", "DropletSingleCreate", "DropletMultiCreate"]
+__all__ = ["GPUDropletCreateParams", "DropletSingleCreate", "DropletMultiCreate"]
class DropletSingleCreate(TypedDict, total=False):
@@ -210,4 +210,4 @@ class DropletMultiCreate(TypedDict, total=False):
"""
-DropletCreateParams: TypeAlias = Union[DropletSingleCreate, DropletMultiCreate]
+GPUDropletCreateParams: TypeAlias = Union[DropletSingleCreate, DropletMultiCreate]
diff --git a/src/do_gradientai/types/droplet_create_response.py b/src/do_gradientai/types/gpu_droplet_create_response.py
similarity index 87%
rename from src/do_gradientai/types/droplet_create_response.py
rename to src/do_gradientai/types/gpu_droplet_create_response.py
index f69eb592..72fafb96 100644
--- a/src/do_gradientai/types/droplet_create_response.py
+++ b/src/do_gradientai/types/gpu_droplet_create_response.py
@@ -8,7 +8,7 @@
from .shared.action_link import ActionLink
__all__ = [
- "DropletCreateResponse",
+ "GPUDropletCreateResponse",
"SingleDropletResponse",
"SingleDropletResponseLinks",
"MultipleDropletResponse",
@@ -36,4 +36,4 @@ class MultipleDropletResponse(BaseModel):
links: MultipleDropletResponseLinks
-DropletCreateResponse: TypeAlias = Union[SingleDropletResponse, MultipleDropletResponse]
+GPUDropletCreateResponse: TypeAlias = Union[SingleDropletResponse, MultipleDropletResponse]
diff --git a/src/do_gradientai/types/droplet_delete_by_tag_params.py b/src/do_gradientai/types/gpu_droplet_delete_by_tag_params.py
similarity index 71%
rename from src/do_gradientai/types/droplet_delete_by_tag_params.py
rename to src/do_gradientai/types/gpu_droplet_delete_by_tag_params.py
index 820b0db6..bc303125 100644
--- a/src/do_gradientai/types/droplet_delete_by_tag_params.py
+++ b/src/do_gradientai/types/gpu_droplet_delete_by_tag_params.py
@@ -4,9 +4,9 @@
from typing_extensions import Required, TypedDict
-__all__ = ["DropletDeleteByTagParams"]
+__all__ = ["GPUDropletDeleteByTagParams"]
-class DropletDeleteByTagParams(TypedDict, total=False):
+class GPUDropletDeleteByTagParams(TypedDict, total=False):
tag_name: Required[str]
"""Specifies Droplets to be deleted by tag."""
diff --git a/src/do_gradientai/types/droplet_list_snapshots_params.py b/src/do_gradientai/types/gpu_droplet_list_firewalls_params.py
similarity index 73%
rename from src/do_gradientai/types/droplet_list_snapshots_params.py
rename to src/do_gradientai/types/gpu_droplet_list_firewalls_params.py
index 9d05be15..1f0111d8 100644
--- a/src/do_gradientai/types/droplet_list_snapshots_params.py
+++ b/src/do_gradientai/types/gpu_droplet_list_firewalls_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["DropletListSnapshotsParams"]
+__all__ = ["GPUDropletListFirewallsParams"]
-class DropletListSnapshotsParams(TypedDict, total=False):
+class GPUDropletListFirewallsParams(TypedDict, total=False):
page: int
"""Which 'page' of paginated results to return."""
diff --git a/src/do_gradientai/types/droplet_list_firewalls_response.py b/src/do_gradientai/types/gpu_droplet_list_firewalls_response.py
similarity index 74%
rename from src/do_gradientai/types/droplet_list_firewalls_response.py
rename to src/do_gradientai/types/gpu_droplet_list_firewalls_response.py
index 5aa00655..617cdf98 100644
--- a/src/do_gradientai/types/droplet_list_firewalls_response.py
+++ b/src/do_gradientai/types/gpu_droplet_list_firewalls_response.py
@@ -3,14 +3,14 @@
from typing import List, Optional
from .._models import BaseModel
-from .firewall import Firewall
from .shared.page_links import PageLinks
+from .gpu_droplets.firewall import Firewall
from .shared.meta_properties import MetaProperties
-__all__ = ["DropletListFirewallsResponse"]
+__all__ = ["GPUDropletListFirewallsResponse"]
-class DropletListFirewallsResponse(BaseModel):
+class GPUDropletListFirewallsResponse(BaseModel):
meta: MetaProperties
"""Information about the response itself."""
diff --git a/src/do_gradientai/types/droplet_list_firewalls_params.py b/src/do_gradientai/types/gpu_droplet_list_kernels_params.py
similarity index 74%
rename from src/do_gradientai/types/droplet_list_firewalls_params.py
rename to src/do_gradientai/types/gpu_droplet_list_kernels_params.py
index 86774e77..7aa73225 100644
--- a/src/do_gradientai/types/droplet_list_firewalls_params.py
+++ b/src/do_gradientai/types/gpu_droplet_list_kernels_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["DropletListFirewallsParams"]
+__all__ = ["GPUDropletListKernelsParams"]
-class DropletListFirewallsParams(TypedDict, total=False):
+class GPUDropletListKernelsParams(TypedDict, total=False):
page: int
"""Which 'page' of paginated results to return."""
diff --git a/src/do_gradientai/types/droplet_list_kernels_response.py b/src/do_gradientai/types/gpu_droplet_list_kernels_response.py
similarity index 83%
rename from src/do_gradientai/types/droplet_list_kernels_response.py
rename to src/do_gradientai/types/gpu_droplet_list_kernels_response.py
index 3352e1f6..5fa9a355 100644
--- a/src/do_gradientai/types/droplet_list_kernels_response.py
+++ b/src/do_gradientai/types/gpu_droplet_list_kernels_response.py
@@ -7,10 +7,10 @@
from .shared.page_links import PageLinks
from .shared.meta_properties import MetaProperties
-__all__ = ["DropletListKernelsResponse"]
+__all__ = ["GPUDropletListKernelsResponse"]
-class DropletListKernelsResponse(BaseModel):
+class GPUDropletListKernelsResponse(BaseModel):
meta: MetaProperties
"""Information about the response itself."""
diff --git a/src/do_gradientai/types/droplet_list_neighbors_response.py b/src/do_gradientai/types/gpu_droplet_list_neighbors_response.py
similarity index 71%
rename from src/do_gradientai/types/droplet_list_neighbors_response.py
rename to src/do_gradientai/types/gpu_droplet_list_neighbors_response.py
index 2f9a84fc..cdfce3e0 100644
--- a/src/do_gradientai/types/droplet_list_neighbors_response.py
+++ b/src/do_gradientai/types/gpu_droplet_list_neighbors_response.py
@@ -5,8 +5,8 @@
from .._models import BaseModel
from .shared.droplet import Droplet
-__all__ = ["DropletListNeighborsResponse"]
+__all__ = ["GPUDropletListNeighborsResponse"]
-class DropletListNeighborsResponse(BaseModel):
+class GPUDropletListNeighborsResponse(BaseModel):
droplets: Optional[List[Droplet]] = None
diff --git a/src/do_gradientai/types/droplet_list_params.py b/src/do_gradientai/types/gpu_droplet_list_params.py
similarity index 90%
rename from src/do_gradientai/types/droplet_list_params.py
rename to src/do_gradientai/types/gpu_droplet_list_params.py
index d0fd62bc..bf6eb793 100644
--- a/src/do_gradientai/types/droplet_list_params.py
+++ b/src/do_gradientai/types/gpu_droplet_list_params.py
@@ -4,10 +4,10 @@
from typing_extensions import Literal, TypedDict
-__all__ = ["DropletListParams"]
+__all__ = ["GPUDropletListParams"]
-class DropletListParams(TypedDict, total=False):
+class GPUDropletListParams(TypedDict, total=False):
name: str
"""Used to filter list response by Droplet name returning only exact matches.
diff --git a/src/do_gradientai/types/droplet_list_response.py b/src/do_gradientai/types/gpu_droplet_list_response.py
similarity index 85%
rename from src/do_gradientai/types/droplet_list_response.py
rename to src/do_gradientai/types/gpu_droplet_list_response.py
index 20dce5d7..73e1e503 100644
--- a/src/do_gradientai/types/droplet_list_response.py
+++ b/src/do_gradientai/types/gpu_droplet_list_response.py
@@ -7,10 +7,10 @@
from .shared.page_links import PageLinks
from .shared.meta_properties import MetaProperties
-__all__ = ["DropletListResponse"]
+__all__ = ["GPUDropletListResponse"]
-class DropletListResponse(BaseModel):
+class GPUDropletListResponse(BaseModel):
meta: MetaProperties
"""Information about the response itself."""
diff --git a/src/do_gradientai/types/droplet_list_kernels_params.py b/src/do_gradientai/types/gpu_droplet_list_snapshots_params.py
similarity index 73%
rename from src/do_gradientai/types/droplet_list_kernels_params.py
rename to src/do_gradientai/types/gpu_droplet_list_snapshots_params.py
index 8fdfe6e1..66e65a36 100644
--- a/src/do_gradientai/types/droplet_list_kernels_params.py
+++ b/src/do_gradientai/types/gpu_droplet_list_snapshots_params.py
@@ -4,10 +4,10 @@
from typing_extensions import TypedDict
-__all__ = ["DropletListKernelsParams"]
+__all__ = ["GPUDropletListSnapshotsParams"]
-class DropletListKernelsParams(TypedDict, total=False):
+class GPUDropletListSnapshotsParams(TypedDict, total=False):
page: int
"""Which 'page' of paginated results to return."""
diff --git a/src/do_gradientai/types/droplet_list_snapshots_response.py b/src/do_gradientai/types/gpu_droplet_list_snapshots_response.py
similarity index 92%
rename from src/do_gradientai/types/droplet_list_snapshots_response.py
rename to src/do_gradientai/types/gpu_droplet_list_snapshots_response.py
index ea6c9296..4b34d670 100644
--- a/src/do_gradientai/types/droplet_list_snapshots_response.py
+++ b/src/do_gradientai/types/gpu_droplet_list_snapshots_response.py
@@ -8,7 +8,7 @@
from .shared.page_links import PageLinks
from .shared.meta_properties import MetaProperties
-__all__ = ["DropletListSnapshotsResponse", "Snapshot"]
+__all__ = ["GPUDropletListSnapshotsResponse", "Snapshot"]
class Snapshot(BaseModel):
@@ -44,7 +44,7 @@ class Snapshot(BaseModel):
"""
-class DropletListSnapshotsResponse(BaseModel):
+class GPUDropletListSnapshotsResponse(BaseModel):
meta: MetaProperties
"""Information about the response itself."""
diff --git a/src/do_gradientai/types/droplet_retrieve_response.py b/src/do_gradientai/types/gpu_droplet_retrieve_response.py
similarity index 72%
rename from src/do_gradientai/types/droplet_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplet_retrieve_response.py
index a3e60721..d8cc0f20 100644
--- a/src/do_gradientai/types/droplet_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplet_retrieve_response.py
@@ -5,8 +5,8 @@
from .._models import BaseModel
from .shared.droplet import Droplet
-__all__ = ["DropletRetrieveResponse"]
+__all__ = ["GPUDropletRetrieveResponse"]
-class DropletRetrieveResponse(BaseModel):
+class GPUDropletRetrieveResponse(BaseModel):
droplet: Optional[Droplet] = None
diff --git a/src/do_gradientai/types/gpu_droplets/__init__.py b/src/do_gradientai/types/gpu_droplets/__init__.py
new file mode 100644
index 00000000..c2f1835f
--- /dev/null
+++ b/src/do_gradientai/types/gpu_droplets/__init__.py
@@ -0,0 +1,104 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from .domains import Domains as Domains
+from .firewall import Firewall as Firewall
+from .floating_ip import FloatingIP as FloatingIP
+from .lb_firewall import LbFirewall as LbFirewall
+from .glb_settings import GlbSettings as GlbSettings
+from .health_check import HealthCheck as HealthCheck
+from .domains_param import DomainsParam as DomainsParam
+from .load_balancer import LoadBalancer as LoadBalancer
+from .autoscale_pool import AutoscalePool as AutoscalePool
+from .firewall_param import FirewallParam as FirewallParam
+from .forwarding_rule import ForwardingRule as ForwardingRule
+from .sticky_sessions import StickySessions as StickySessions
+from .size_list_params import SizeListParams as SizeListParams
+from .image_list_params import ImageListParams as ImageListParams
+from .lb_firewall_param import LbFirewallParam as LbFirewallParam
+from .action_list_params import ActionListParams as ActionListParams
+from .backup_list_params import BackupListParams as BackupListParams
+from .glb_settings_param import GlbSettingsParam as GlbSettingsParam
+from .health_check_param import HealthCheckParam as HealthCheckParam
+from .size_list_response import SizeListResponse as SizeListResponse
+from .volume_list_params import VolumeListParams as VolumeListParams
+from .associated_resource import AssociatedResource as AssociatedResource
+from .current_utilization import CurrentUtilization as CurrentUtilization
+from .image_create_params import ImageCreateParams as ImageCreateParams
+from .image_list_response import ImageListResponse as ImageListResponse
+from .image_update_params import ImageUpdateParams as ImageUpdateParams
+from .action_list_response import ActionListResponse as ActionListResponse
+from .backup_list_response import BackupListResponse as BackupListResponse
+from .firewall_list_params import FirewallListParams as FirewallListParams
+from .snapshot_list_params import SnapshotListParams as SnapshotListParams
+from .volume_create_params import VolumeCreateParams as VolumeCreateParams
+from .volume_list_response import VolumeListResponse as VolumeListResponse
+from .autoscale_list_params import AutoscaleListParams as AutoscaleListParams
+from .forwarding_rule_param import ForwardingRuleParam as ForwardingRuleParam
+from .image_create_response import ImageCreateResponse as ImageCreateResponse
+from .image_update_response import ImageUpdateResponse as ImageUpdateResponse
+from .sticky_sessions_param import StickySessionsParam as StickySessionsParam
+from .action_initiate_params import ActionInitiateParams as ActionInitiateParams
+from .firewall_create_params import FirewallCreateParams as FirewallCreateParams
+from .firewall_list_response import FirewallListResponse as FirewallListResponse
+from .firewall_update_params import FirewallUpdateParams as FirewallUpdateParams
+from .snapshot_list_response import SnapshotListResponse as SnapshotListResponse
+from .volume_create_response import VolumeCreateResponse as VolumeCreateResponse
+from .autoscale_create_params import AutoscaleCreateParams as AutoscaleCreateParams
+from .autoscale_list_response import AutoscaleListResponse as AutoscaleListResponse
+from .autoscale_update_params import AutoscaleUpdateParams as AutoscaleUpdateParams
+from .floating_ip_list_params import FloatingIPListParams as FloatingIPListParams
+from .image_retrieve_response import ImageRetrieveResponse as ImageRetrieveResponse
+from .action_initiate_response import ActionInitiateResponse as ActionInitiateResponse
+from .action_retrieve_response import ActionRetrieveResponse as ActionRetrieveResponse
+from .firewall_create_response import FirewallCreateResponse as FirewallCreateResponse
+from .firewall_update_response import FirewallUpdateResponse as FirewallUpdateResponse
+from .volume_retrieve_response import VolumeRetrieveResponse as VolumeRetrieveResponse
+from .autoscale_create_response import AutoscaleCreateResponse as AutoscaleCreateResponse
+from .autoscale_update_response import AutoscaleUpdateResponse as AutoscaleUpdateResponse
+from .floating_ip_create_params import FloatingIPCreateParams as FloatingIPCreateParams
+from .floating_ip_list_response import FloatingIPListResponse as FloatingIPListResponse
+from .load_balancer_list_params import LoadBalancerListParams as LoadBalancerListParams
+from .firewall_retrieve_response import FirewallRetrieveResponse as FirewallRetrieveResponse
+from .snapshot_retrieve_response import SnapshotRetrieveResponse as SnapshotRetrieveResponse
+from .action_bulk_initiate_params import ActionBulkInitiateParams as ActionBulkInitiateParams
+from .autoscale_retrieve_response import AutoscaleRetrieveResponse as AutoscaleRetrieveResponse
+from .backup_list_policies_params import BackupListPoliciesParams as BackupListPoliciesParams
+from .floating_ip_create_response import FloatingIPCreateResponse as FloatingIPCreateResponse
+from .load_balancer_create_params import LoadBalancerCreateParams as LoadBalancerCreateParams
+from .load_balancer_list_response import LoadBalancerListResponse as LoadBalancerListResponse
+from .load_balancer_update_params import LoadBalancerUpdateParams as LoadBalancerUpdateParams
+from .autoscale_pool_static_config import AutoscalePoolStaticConfig as AutoscalePoolStaticConfig
+from .volume_delete_by_name_params import VolumeDeleteByNameParams as VolumeDeleteByNameParams
+from .action_bulk_initiate_response import ActionBulkInitiateResponse as ActionBulkInitiateResponse
+from .autoscale_list_history_params import AutoscaleListHistoryParams as AutoscaleListHistoryParams
+from .autoscale_list_members_params import AutoscaleListMembersParams as AutoscaleListMembersParams
+from .autoscale_pool_dynamic_config import AutoscalePoolDynamicConfig as AutoscalePoolDynamicConfig
+from .backup_list_policies_response import BackupListPoliciesResponse as BackupListPoliciesResponse
+from .destroyed_associated_resource import DestroyedAssociatedResource as DestroyedAssociatedResource
+from .floating_ip_retrieve_response import FloatingIPRetrieveResponse as FloatingIPRetrieveResponse
+from .load_balancer_create_response import LoadBalancerCreateResponse as LoadBalancerCreateResponse
+from .load_balancer_update_response import LoadBalancerUpdateResponse as LoadBalancerUpdateResponse
+from .autoscale_list_history_response import AutoscaleListHistoryResponse as AutoscaleListHistoryResponse
+from .autoscale_list_members_response import AutoscaleListMembersResponse as AutoscaleListMembersResponse
+from .autoscale_pool_droplet_template import AutoscalePoolDropletTemplate as AutoscalePoolDropletTemplate
+from .backup_retrieve_policy_response import BackupRetrievePolicyResponse as BackupRetrievePolicyResponse
+from .load_balancer_retrieve_response import LoadBalancerRetrieveResponse as LoadBalancerRetrieveResponse
+from .autoscale_pool_static_config_param import AutoscalePoolStaticConfigParam as AutoscalePoolStaticConfigParam
+from .autoscale_pool_dynamic_config_param import AutoscalePoolDynamicConfigParam as AutoscalePoolDynamicConfigParam
+from .autoscale_pool_droplet_template_param import (
+ AutoscalePoolDropletTemplateParam as AutoscalePoolDropletTemplateParam,
+)
+from .backup_list_supported_policies_response import (
+ BackupListSupportedPoliciesResponse as BackupListSupportedPoliciesResponse,
+)
+from .destroy_with_associated_resource_list_response import (
+ DestroyWithAssociatedResourceListResponse as DestroyWithAssociatedResourceListResponse,
+)
+from .destroy_with_associated_resource_check_status_response import (
+ DestroyWithAssociatedResourceCheckStatusResponse as DestroyWithAssociatedResourceCheckStatusResponse,
+)
+from .destroy_with_associated_resource_delete_selective_params import (
+ DestroyWithAssociatedResourceDeleteSelectiveParams as DestroyWithAssociatedResourceDeleteSelectiveParams,
+)
diff --git a/src/do_gradientai/types/account/__init__.py b/src/do_gradientai/types/gpu_droplets/account/__init__.py
similarity index 100%
rename from src/do_gradientai/types/account/__init__.py
rename to src/do_gradientai/types/gpu_droplets/account/__init__.py
diff --git a/src/do_gradientai/types/account/key_create_params.py b/src/do_gradientai/types/gpu_droplets/account/key_create_params.py
similarity index 100%
rename from src/do_gradientai/types/account/key_create_params.py
rename to src/do_gradientai/types/gpu_droplets/account/key_create_params.py
diff --git a/src/do_gradientai/types/account/key_create_response.py b/src/do_gradientai/types/gpu_droplets/account/key_create_response.py
similarity index 96%
rename from src/do_gradientai/types/account/key_create_response.py
rename to src/do_gradientai/types/gpu_droplets/account/key_create_response.py
index 883be88a..9fe566ed 100644
--- a/src/do_gradientai/types/account/key_create_response.py
+++ b/src/do_gradientai/types/gpu_droplets/account/key_create_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from ..._models import BaseModel
+from ...._models import BaseModel
__all__ = ["KeyCreateResponse", "SSHKey"]
diff --git a/src/do_gradientai/types/account/key_list_params.py b/src/do_gradientai/types/gpu_droplets/account/key_list_params.py
similarity index 100%
rename from src/do_gradientai/types/account/key_list_params.py
rename to src/do_gradientai/types/gpu_droplets/account/key_list_params.py
diff --git a/src/do_gradientai/types/account/key_list_response.py b/src/do_gradientai/types/gpu_droplets/account/key_list_response.py
similarity index 89%
rename from src/do_gradientai/types/account/key_list_response.py
rename to src/do_gradientai/types/gpu_droplets/account/key_list_response.py
index 64dc6de8..be4c721c 100644
--- a/src/do_gradientai/types/account/key_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/account/key_list_response.py
@@ -2,9 +2,9 @@
from typing import List, Optional
-from ..._models import BaseModel
-from ..shared.page_links import PageLinks
-from ..shared.meta_properties import MetaProperties
+from ...._models import BaseModel
+from ...shared.page_links import PageLinks
+from ...shared.meta_properties import MetaProperties
__all__ = ["KeyListResponse", "SSHKey"]
diff --git a/src/do_gradientai/types/account/key_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/account/key_retrieve_response.py
similarity index 96%
rename from src/do_gradientai/types/account/key_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/account/key_retrieve_response.py
index 377f57e1..7cd3215e 100644
--- a/src/do_gradientai/types/account/key_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/account/key_retrieve_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from ..._models import BaseModel
+from ...._models import BaseModel
__all__ = ["KeyRetrieveResponse", "SSHKey"]
diff --git a/src/do_gradientai/types/account/key_update_params.py b/src/do_gradientai/types/gpu_droplets/account/key_update_params.py
similarity index 100%
rename from src/do_gradientai/types/account/key_update_params.py
rename to src/do_gradientai/types/gpu_droplets/account/key_update_params.py
diff --git a/src/do_gradientai/types/account/key_update_response.py b/src/do_gradientai/types/gpu_droplets/account/key_update_response.py
similarity index 96%
rename from src/do_gradientai/types/account/key_update_response.py
rename to src/do_gradientai/types/gpu_droplets/account/key_update_response.py
index eee61419..2821e44a 100644
--- a/src/do_gradientai/types/account/key_update_response.py
+++ b/src/do_gradientai/types/gpu_droplets/account/key_update_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from ..._models import BaseModel
+from ...._models import BaseModel
__all__ = ["KeyUpdateResponse", "SSHKey"]
diff --git a/src/do_gradientai/types/droplets/action_bulk_initiate_params.py b/src/do_gradientai/types/gpu_droplets/action_bulk_initiate_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/action_bulk_initiate_params.py
rename to src/do_gradientai/types/gpu_droplets/action_bulk_initiate_params.py
diff --git a/src/do_gradientai/types/droplets/action_bulk_initiate_response.py b/src/do_gradientai/types/gpu_droplets/action_bulk_initiate_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/action_bulk_initiate_response.py
rename to src/do_gradientai/types/gpu_droplets/action_bulk_initiate_response.py
diff --git a/src/do_gradientai/types/droplets/action_initiate_params.py b/src/do_gradientai/types/gpu_droplets/action_initiate_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/action_initiate_params.py
rename to src/do_gradientai/types/gpu_droplets/action_initiate_params.py
diff --git a/src/do_gradientai/types/droplets/action_initiate_response.py b/src/do_gradientai/types/gpu_droplets/action_initiate_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/action_initiate_response.py
rename to src/do_gradientai/types/gpu_droplets/action_initiate_response.py
diff --git a/src/do_gradientai/types/droplets/action_list_params.py b/src/do_gradientai/types/gpu_droplets/action_list_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/action_list_params.py
rename to src/do_gradientai/types/gpu_droplets/action_list_params.py
diff --git a/src/do_gradientai/types/droplets/action_list_response.py b/src/do_gradientai/types/gpu_droplets/action_list_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/action_list_response.py
rename to src/do_gradientai/types/gpu_droplets/action_list_response.py
diff --git a/src/do_gradientai/types/droplets/action_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/action_retrieve_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/action_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/action_retrieve_response.py
diff --git a/src/do_gradientai/types/droplets/associated_resource.py b/src/do_gradientai/types/gpu_droplets/associated_resource.py
similarity index 100%
rename from src/do_gradientai/types/droplets/associated_resource.py
rename to src/do_gradientai/types/gpu_droplets/associated_resource.py
diff --git a/src/do_gradientai/types/droplets/autoscale_create_params.py b/src/do_gradientai/types/gpu_droplets/autoscale_create_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_create_params.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_create_params.py
diff --git a/src/do_gradientai/types/droplets/autoscale_create_response.py b/src/do_gradientai/types/gpu_droplets/autoscale_create_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_create_response.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_create_response.py
diff --git a/src/do_gradientai/types/droplets/autoscale_list_history_params.py b/src/do_gradientai/types/gpu_droplets/autoscale_list_history_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_list_history_params.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_list_history_params.py
diff --git a/src/do_gradientai/types/droplets/autoscale_list_history_response.py b/src/do_gradientai/types/gpu_droplets/autoscale_list_history_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_list_history_response.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_list_history_response.py
diff --git a/src/do_gradientai/types/droplets/autoscale_list_members_params.py b/src/do_gradientai/types/gpu_droplets/autoscale_list_members_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_list_members_params.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_list_members_params.py
diff --git a/src/do_gradientai/types/droplets/autoscale_list_members_response.py b/src/do_gradientai/types/gpu_droplets/autoscale_list_members_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_list_members_response.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_list_members_response.py
diff --git a/src/do_gradientai/types/droplets/autoscale_list_params.py b/src/do_gradientai/types/gpu_droplets/autoscale_list_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_list_params.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_list_params.py
diff --git a/src/do_gradientai/types/droplets/autoscale_list_response.py b/src/do_gradientai/types/gpu_droplets/autoscale_list_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_list_response.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_list_response.py
diff --git a/src/do_gradientai/types/droplets/autoscale_pool.py b/src/do_gradientai/types/gpu_droplets/autoscale_pool.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_pool.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_pool.py
diff --git a/src/do_gradientai/types/droplets/autoscale_pool_droplet_template.py b/src/do_gradientai/types/gpu_droplets/autoscale_pool_droplet_template.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_pool_droplet_template.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_pool_droplet_template.py
diff --git a/src/do_gradientai/types/droplets/autoscale_pool_droplet_template_param.py b/src/do_gradientai/types/gpu_droplets/autoscale_pool_droplet_template_param.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_pool_droplet_template_param.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_pool_droplet_template_param.py
diff --git a/src/do_gradientai/types/droplets/autoscale_pool_dynamic_config.py b/src/do_gradientai/types/gpu_droplets/autoscale_pool_dynamic_config.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_pool_dynamic_config.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_pool_dynamic_config.py
diff --git a/src/do_gradientai/types/droplets/autoscale_pool_dynamic_config_param.py b/src/do_gradientai/types/gpu_droplets/autoscale_pool_dynamic_config_param.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_pool_dynamic_config_param.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_pool_dynamic_config_param.py
diff --git a/src/do_gradientai/types/droplets/autoscale_pool_static_config.py b/src/do_gradientai/types/gpu_droplets/autoscale_pool_static_config.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_pool_static_config.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_pool_static_config.py
diff --git a/src/do_gradientai/types/droplets/autoscale_pool_static_config_param.py b/src/do_gradientai/types/gpu_droplets/autoscale_pool_static_config_param.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_pool_static_config_param.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_pool_static_config_param.py
diff --git a/src/do_gradientai/types/droplets/autoscale_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/autoscale_retrieve_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_retrieve_response.py
diff --git a/src/do_gradientai/types/droplets/autoscale_update_params.py b/src/do_gradientai/types/gpu_droplets/autoscale_update_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_update_params.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_update_params.py
diff --git a/src/do_gradientai/types/droplets/autoscale_update_response.py b/src/do_gradientai/types/gpu_droplets/autoscale_update_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/autoscale_update_response.py
rename to src/do_gradientai/types/gpu_droplets/autoscale_update_response.py
diff --git a/src/do_gradientai/types/droplets/backup_list_params.py b/src/do_gradientai/types/gpu_droplets/backup_list_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/backup_list_params.py
rename to src/do_gradientai/types/gpu_droplets/backup_list_params.py
diff --git a/src/do_gradientai/types/droplets/backup_list_policies_params.py b/src/do_gradientai/types/gpu_droplets/backup_list_policies_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/backup_list_policies_params.py
rename to src/do_gradientai/types/gpu_droplets/backup_list_policies_params.py
diff --git a/src/do_gradientai/types/droplets/backup_list_policies_response.py b/src/do_gradientai/types/gpu_droplets/backup_list_policies_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/backup_list_policies_response.py
rename to src/do_gradientai/types/gpu_droplets/backup_list_policies_response.py
diff --git a/src/do_gradientai/types/droplets/backup_list_response.py b/src/do_gradientai/types/gpu_droplets/backup_list_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/backup_list_response.py
rename to src/do_gradientai/types/gpu_droplets/backup_list_response.py
diff --git a/src/do_gradientai/types/droplets/backup_list_supported_policies_response.py b/src/do_gradientai/types/gpu_droplets/backup_list_supported_policies_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/backup_list_supported_policies_response.py
rename to src/do_gradientai/types/gpu_droplets/backup_list_supported_policies_response.py
diff --git a/src/do_gradientai/types/droplets/backup_retrieve_policy_response.py b/src/do_gradientai/types/gpu_droplets/backup_retrieve_policy_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/backup_retrieve_policy_response.py
rename to src/do_gradientai/types/gpu_droplets/backup_retrieve_policy_response.py
diff --git a/src/do_gradientai/types/droplets/current_utilization.py b/src/do_gradientai/types/gpu_droplets/current_utilization.py
similarity index 100%
rename from src/do_gradientai/types/droplets/current_utilization.py
rename to src/do_gradientai/types/gpu_droplets/current_utilization.py
diff --git a/src/do_gradientai/types/droplets/destroy_with_associated_resource_check_status_response.py b/src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_check_status_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/destroy_with_associated_resource_check_status_response.py
rename to src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_check_status_response.py
diff --git a/src/do_gradientai/types/droplets/destroy_with_associated_resource_delete_selective_params.py b/src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py
similarity index 100%
rename from src/do_gradientai/types/droplets/destroy_with_associated_resource_delete_selective_params.py
rename to src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_delete_selective_params.py
diff --git a/src/do_gradientai/types/droplets/destroy_with_associated_resource_list_response.py b/src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_list_response.py
similarity index 100%
rename from src/do_gradientai/types/droplets/destroy_with_associated_resource_list_response.py
rename to src/do_gradientai/types/gpu_droplets/destroy_with_associated_resource_list_response.py
diff --git a/src/do_gradientai/types/droplets/destroyed_associated_resource.py b/src/do_gradientai/types/gpu_droplets/destroyed_associated_resource.py
similarity index 100%
rename from src/do_gradientai/types/droplets/destroyed_associated_resource.py
rename to src/do_gradientai/types/gpu_droplets/destroyed_associated_resource.py
diff --git a/src/do_gradientai/types/domains.py b/src/do_gradientai/types/gpu_droplets/domains.py
similarity index 94%
rename from src/do_gradientai/types/domains.py
rename to src/do_gradientai/types/gpu_droplets/domains.py
index e5510bdc..6a9400f9 100644
--- a/src/do_gradientai/types/domains.py
+++ b/src/do_gradientai/types/gpu_droplets/domains.py
@@ -2,7 +2,7 @@
from typing import Optional
-from .._models import BaseModel
+from ..._models import BaseModel
__all__ = ["Domains"]
diff --git a/src/do_gradientai/types/domains_param.py b/src/do_gradientai/types/gpu_droplets/domains_param.py
similarity index 100%
rename from src/do_gradientai/types/domains_param.py
rename to src/do_gradientai/types/gpu_droplets/domains_param.py
diff --git a/src/do_gradientai/types/firewall.py b/src/do_gradientai/types/gpu_droplets/firewall.py
similarity index 97%
rename from src/do_gradientai/types/firewall.py
rename to src/do_gradientai/types/gpu_droplets/firewall.py
index 427d53b0..0eb352a1 100644
--- a/src/do_gradientai/types/firewall.py
+++ b/src/do_gradientai/types/gpu_droplets/firewall.py
@@ -4,8 +4,8 @@
from datetime import datetime
from typing_extensions import Literal
-from .._models import BaseModel
-from .shared.firewall_rule_target import FirewallRuleTarget
+from ..._models import BaseModel
+from ..shared.firewall_rule_target import FirewallRuleTarget
__all__ = ["Firewall", "InboundRule", "OutboundRule", "PendingChange"]
diff --git a/src/do_gradientai/types/firewall_create_params.py b/src/do_gradientai/types/gpu_droplets/firewall_create_params.py
similarity index 100%
rename from src/do_gradientai/types/firewall_create_params.py
rename to src/do_gradientai/types/gpu_droplets/firewall_create_params.py
diff --git a/src/do_gradientai/types/firewall_create_response.py b/src/do_gradientai/types/gpu_droplets/firewall_create_response.py
similarity index 89%
rename from src/do_gradientai/types/firewall_create_response.py
rename to src/do_gradientai/types/gpu_droplets/firewall_create_response.py
index 8a9a2ff1..be30113a 100644
--- a/src/do_gradientai/types/firewall_create_response.py
+++ b/src/do_gradientai/types/gpu_droplets/firewall_create_response.py
@@ -2,8 +2,8 @@
from typing import Optional
-from .._models import BaseModel
from .firewall import Firewall
+from ..._models import BaseModel
__all__ = ["FirewallCreateResponse"]
diff --git a/src/do_gradientai/types/firewall_list_params.py b/src/do_gradientai/types/gpu_droplets/firewall_list_params.py
similarity index 100%
rename from src/do_gradientai/types/firewall_list_params.py
rename to src/do_gradientai/types/gpu_droplets/firewall_list_params.py
diff --git a/src/do_gradientai/types/firewall_list_response.py b/src/do_gradientai/types/gpu_droplets/firewall_list_response.py
similarity index 75%
rename from src/do_gradientai/types/firewall_list_response.py
rename to src/do_gradientai/types/gpu_droplets/firewall_list_response.py
index 27768083..ec0af688 100644
--- a/src/do_gradientai/types/firewall_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/firewall_list_response.py
@@ -2,10 +2,10 @@
from typing import List, Optional
-from .._models import BaseModel
from .firewall import Firewall
-from .shared.page_links import PageLinks
-from .shared.meta_properties import MetaProperties
+from ..._models import BaseModel
+from ..shared.page_links import PageLinks
+from ..shared.meta_properties import MetaProperties
__all__ = ["FirewallListResponse"]
diff --git a/src/do_gradientai/types/firewall_param.py b/src/do_gradientai/types/gpu_droplets/firewall_param.py
similarity index 97%
rename from src/do_gradientai/types/firewall_param.py
rename to src/do_gradientai/types/gpu_droplets/firewall_param.py
index c92635d1..1be9cf6a 100644
--- a/src/do_gradientai/types/firewall_param.py
+++ b/src/do_gradientai/types/gpu_droplets/firewall_param.py
@@ -5,7 +5,7 @@
from typing import List, Iterable, Optional
from typing_extensions import Literal, Required, TypedDict
-from .shared_params.firewall_rule_target import FirewallRuleTarget
+from ..shared_params.firewall_rule_target import FirewallRuleTarget
__all__ = ["FirewallParam", "InboundRule", "OutboundRule"]
diff --git a/src/do_gradientai/types/firewall_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/firewall_retrieve_response.py
similarity index 89%
rename from src/do_gradientai/types/firewall_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/firewall_retrieve_response.py
index a8bdfa07..bb29a174 100644
--- a/src/do_gradientai/types/firewall_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/firewall_retrieve_response.py
@@ -2,8 +2,8 @@
from typing import Optional
-from .._models import BaseModel
from .firewall import Firewall
+from ..._models import BaseModel
__all__ = ["FirewallRetrieveResponse"]
diff --git a/src/do_gradientai/types/firewall_update_params.py b/src/do_gradientai/types/gpu_droplets/firewall_update_params.py
similarity index 100%
rename from src/do_gradientai/types/firewall_update_params.py
rename to src/do_gradientai/types/gpu_droplets/firewall_update_params.py
diff --git a/src/do_gradientai/types/firewall_update_response.py b/src/do_gradientai/types/gpu_droplets/firewall_update_response.py
similarity index 89%
rename from src/do_gradientai/types/firewall_update_response.py
rename to src/do_gradientai/types/gpu_droplets/firewall_update_response.py
index d3f96601..cb8ff702 100644
--- a/src/do_gradientai/types/firewall_update_response.py
+++ b/src/do_gradientai/types/gpu_droplets/firewall_update_response.py
@@ -2,8 +2,8 @@
from typing import Optional
-from .._models import BaseModel
from .firewall import Firewall
+from ..._models import BaseModel
__all__ = ["FirewallUpdateResponse"]
diff --git a/src/do_gradientai/types/firewalls/__init__.py b/src/do_gradientai/types/gpu_droplets/firewalls/__init__.py
similarity index 100%
rename from src/do_gradientai/types/firewalls/__init__.py
rename to src/do_gradientai/types/gpu_droplets/firewalls/__init__.py
diff --git a/src/do_gradientai/types/firewalls/droplet_add_params.py b/src/do_gradientai/types/gpu_droplets/firewalls/droplet_add_params.py
similarity index 100%
rename from src/do_gradientai/types/firewalls/droplet_add_params.py
rename to src/do_gradientai/types/gpu_droplets/firewalls/droplet_add_params.py
diff --git a/src/do_gradientai/types/firewalls/droplet_remove_params.py b/src/do_gradientai/types/gpu_droplets/firewalls/droplet_remove_params.py
similarity index 100%
rename from src/do_gradientai/types/firewalls/droplet_remove_params.py
rename to src/do_gradientai/types/gpu_droplets/firewalls/droplet_remove_params.py
diff --git a/src/do_gradientai/types/firewalls/rule_add_params.py b/src/do_gradientai/types/gpu_droplets/firewalls/rule_add_params.py
similarity index 95%
rename from src/do_gradientai/types/firewalls/rule_add_params.py
rename to src/do_gradientai/types/gpu_droplets/firewalls/rule_add_params.py
index fd405c61..1f49e55a 100644
--- a/src/do_gradientai/types/firewalls/rule_add_params.py
+++ b/src/do_gradientai/types/gpu_droplets/firewalls/rule_add_params.py
@@ -5,7 +5,7 @@
from typing import Iterable, Optional
from typing_extensions import Literal, Required, TypedDict
-from ..shared_params.firewall_rule_target import FirewallRuleTarget
+from ...shared_params.firewall_rule_target import FirewallRuleTarget
__all__ = ["RuleAddParams", "InboundRule", "OutboundRule"]
diff --git a/src/do_gradientai/types/firewalls/rule_remove_params.py b/src/do_gradientai/types/gpu_droplets/firewalls/rule_remove_params.py
similarity index 95%
rename from src/do_gradientai/types/firewalls/rule_remove_params.py
rename to src/do_gradientai/types/gpu_droplets/firewalls/rule_remove_params.py
index 93911e8e..b6bb05df 100644
--- a/src/do_gradientai/types/firewalls/rule_remove_params.py
+++ b/src/do_gradientai/types/gpu_droplets/firewalls/rule_remove_params.py
@@ -5,7 +5,7 @@
from typing import Iterable, Optional
from typing_extensions import Literal, Required, TypedDict
-from ..shared_params.firewall_rule_target import FirewallRuleTarget
+from ...shared_params.firewall_rule_target import FirewallRuleTarget
__all__ = ["RuleRemoveParams", "InboundRule", "OutboundRule"]
diff --git a/src/do_gradientai/types/firewalls/tag_add_params.py b/src/do_gradientai/types/gpu_droplets/firewalls/tag_add_params.py
similarity index 100%
rename from src/do_gradientai/types/firewalls/tag_add_params.py
rename to src/do_gradientai/types/gpu_droplets/firewalls/tag_add_params.py
diff --git a/src/do_gradientai/types/firewalls/tag_remove_params.py b/src/do_gradientai/types/gpu_droplets/firewalls/tag_remove_params.py
similarity index 100%
rename from src/do_gradientai/types/firewalls/tag_remove_params.py
rename to src/do_gradientai/types/gpu_droplets/firewalls/tag_remove_params.py
diff --git a/src/do_gradientai/types/floating_ip.py b/src/do_gradientai/types/gpu_droplets/floating_ip.py
similarity index 94%
rename from src/do_gradientai/types/floating_ip.py
rename to src/do_gradientai/types/gpu_droplets/floating_ip.py
index 6bfee5b0..81c58753 100644
--- a/src/do_gradientai/types/floating_ip.py
+++ b/src/do_gradientai/types/gpu_droplets/floating_ip.py
@@ -3,8 +3,8 @@
from typing import Union, Optional
from typing_extensions import TypeAlias
-from .shared import region, droplet
-from .._models import BaseModel
+from ..shared import region, droplet
+from ..._models import BaseModel
__all__ = ["FloatingIP", "Droplet", "Region"]
diff --git a/src/do_gradientai/types/floating_ip_create_params.py b/src/do_gradientai/types/gpu_droplets/floating_ip_create_params.py
similarity index 100%
rename from src/do_gradientai/types/floating_ip_create_params.py
rename to src/do_gradientai/types/gpu_droplets/floating_ip_create_params.py
diff --git a/src/do_gradientai/types/floating_ip_create_response.py b/src/do_gradientai/types/gpu_droplets/floating_ip_create_response.py
similarity index 85%
rename from src/do_gradientai/types/floating_ip_create_response.py
rename to src/do_gradientai/types/gpu_droplets/floating_ip_create_response.py
index fab8c06b..04668b84 100644
--- a/src/do_gradientai/types/floating_ip_create_response.py
+++ b/src/do_gradientai/types/gpu_droplets/floating_ip_create_response.py
@@ -2,9 +2,9 @@
from typing import List, Optional
-from .._models import BaseModel
+from ..._models import BaseModel
from .floating_ip import FloatingIP
-from .shared.action_link import ActionLink
+from ..shared.action_link import ActionLink
__all__ = ["FloatingIPCreateResponse", "Links"]
diff --git a/src/do_gradientai/types/floating_ip_list_params.py b/src/do_gradientai/types/gpu_droplets/floating_ip_list_params.py
similarity index 100%
rename from src/do_gradientai/types/floating_ip_list_params.py
rename to src/do_gradientai/types/gpu_droplets/floating_ip_list_params.py
diff --git a/src/do_gradientai/types/floating_ip_list_response.py b/src/do_gradientai/types/gpu_droplets/floating_ip_list_response.py
similarity index 76%
rename from src/do_gradientai/types/floating_ip_list_response.py
rename to src/do_gradientai/types/gpu_droplets/floating_ip_list_response.py
index 8535fa24..734011d2 100644
--- a/src/do_gradientai/types/floating_ip_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/floating_ip_list_response.py
@@ -2,10 +2,10 @@
from typing import List, Optional
-from .._models import BaseModel
+from ..._models import BaseModel
from .floating_ip import FloatingIP
-from .shared.page_links import PageLinks
-from .shared.meta_properties import MetaProperties
+from ..shared.page_links import PageLinks
+from ..shared.meta_properties import MetaProperties
__all__ = ["FloatingIPListResponse"]
diff --git a/src/do_gradientai/types/floating_ip_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/floating_ip_retrieve_response.py
similarity index 89%
rename from src/do_gradientai/types/floating_ip_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/floating_ip_retrieve_response.py
index 98bbbb2a..b7ec77d4 100644
--- a/src/do_gradientai/types/floating_ip_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/floating_ip_retrieve_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from .._models import BaseModel
+from ..._models import BaseModel
from .floating_ip import FloatingIP
__all__ = ["FloatingIPRetrieveResponse"]
diff --git a/src/do_gradientai/types/floating_ips/__init__.py b/src/do_gradientai/types/gpu_droplets/floating_ips/__init__.py
similarity index 100%
rename from src/do_gradientai/types/floating_ips/__init__.py
rename to src/do_gradientai/types/gpu_droplets/floating_ips/__init__.py
diff --git a/src/do_gradientai/types/floating_ips/action_create_params.py b/src/do_gradientai/types/gpu_droplets/floating_ips/action_create_params.py
similarity index 100%
rename from src/do_gradientai/types/floating_ips/action_create_params.py
rename to src/do_gradientai/types/gpu_droplets/floating_ips/action_create_params.py
diff --git a/src/do_gradientai/types/floating_ips/action_create_response.py b/src/do_gradientai/types/gpu_droplets/floating_ips/action_create_response.py
similarity index 85%
rename from src/do_gradientai/types/floating_ips/action_create_response.py
rename to src/do_gradientai/types/gpu_droplets/floating_ips/action_create_response.py
index 5f68724f..90acd8c9 100644
--- a/src/do_gradientai/types/floating_ips/action_create_response.py
+++ b/src/do_gradientai/types/gpu_droplets/floating_ips/action_create_response.py
@@ -2,8 +2,8 @@
from typing import Optional
-from ..shared import action
-from ..._models import BaseModel
+from ...shared import action
+from ...._models import BaseModel
__all__ = ["ActionCreateResponse", "Action"]
diff --git a/src/do_gradientai/types/gpu_droplets/floating_ips/action_list_response.py b/src/do_gradientai/types/gpu_droplets/floating_ips/action_list_response.py
new file mode 100644
index 00000000..2f4edac5
--- /dev/null
+++ b/src/do_gradientai/types/gpu_droplets/floating_ips/action_list_response.py
@@ -0,0 +1,19 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Optional
+
+from ...._models import BaseModel
+from ...shared.action import Action
+from ...shared.page_links import PageLinks
+from ...shared.meta_properties import MetaProperties
+
+__all__ = ["ActionListResponse"]
+
+
+class ActionListResponse(BaseModel):
+ meta: MetaProperties
+ """Information about the response itself."""
+
+ actions: Optional[List[Action]] = None
+
+ links: Optional[PageLinks] = None
diff --git a/src/do_gradientai/types/floating_ips/action_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/floating_ips/action_retrieve_response.py
similarity index 86%
rename from src/do_gradientai/types/floating_ips/action_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/floating_ips/action_retrieve_response.py
index 493b62a7..d94554be 100644
--- a/src/do_gradientai/types/floating_ips/action_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/floating_ips/action_retrieve_response.py
@@ -2,8 +2,8 @@
from typing import Optional
-from ..shared import action
-from ..._models import BaseModel
+from ...shared import action
+from ...._models import BaseModel
__all__ = ["ActionRetrieveResponse", "Action"]
diff --git a/src/do_gradientai/types/forwarding_rule.py b/src/do_gradientai/types/gpu_droplets/forwarding_rule.py
similarity index 98%
rename from src/do_gradientai/types/forwarding_rule.py
rename to src/do_gradientai/types/gpu_droplets/forwarding_rule.py
index 38da45d8..40a310ab 100644
--- a/src/do_gradientai/types/forwarding_rule.py
+++ b/src/do_gradientai/types/gpu_droplets/forwarding_rule.py
@@ -3,7 +3,7 @@
from typing import Optional
from typing_extensions import Literal
-from .._models import BaseModel
+from ..._models import BaseModel
__all__ = ["ForwardingRule"]
diff --git a/src/do_gradientai/types/forwarding_rule_param.py b/src/do_gradientai/types/gpu_droplets/forwarding_rule_param.py
similarity index 100%
rename from src/do_gradientai/types/forwarding_rule_param.py
rename to src/do_gradientai/types/gpu_droplets/forwarding_rule_param.py
diff --git a/src/do_gradientai/types/glb_settings.py b/src/do_gradientai/types/gpu_droplets/glb_settings.py
similarity index 97%
rename from src/do_gradientai/types/glb_settings.py
rename to src/do_gradientai/types/gpu_droplets/glb_settings.py
index 164b75af..9aa790d8 100644
--- a/src/do_gradientai/types/glb_settings.py
+++ b/src/do_gradientai/types/gpu_droplets/glb_settings.py
@@ -3,7 +3,7 @@
from typing import Dict, Optional
from typing_extensions import Literal
-from .._models import BaseModel
+from ..._models import BaseModel
__all__ = ["GlbSettings", "Cdn"]
diff --git a/src/do_gradientai/types/glb_settings_param.py b/src/do_gradientai/types/gpu_droplets/glb_settings_param.py
similarity index 100%
rename from src/do_gradientai/types/glb_settings_param.py
rename to src/do_gradientai/types/gpu_droplets/glb_settings_param.py
diff --git a/src/do_gradientai/types/health_check.py b/src/do_gradientai/types/gpu_droplets/health_check.py
similarity index 97%
rename from src/do_gradientai/types/health_check.py
rename to src/do_gradientai/types/gpu_droplets/health_check.py
index 3f167fb8..db44d84e 100644
--- a/src/do_gradientai/types/health_check.py
+++ b/src/do_gradientai/types/gpu_droplets/health_check.py
@@ -3,7 +3,7 @@
from typing import Optional
from typing_extensions import Literal
-from .._models import BaseModel
+from ..._models import BaseModel
__all__ = ["HealthCheck"]
diff --git a/src/do_gradientai/types/health_check_param.py b/src/do_gradientai/types/gpu_droplets/health_check_param.py
similarity index 100%
rename from src/do_gradientai/types/health_check_param.py
rename to src/do_gradientai/types/gpu_droplets/health_check_param.py
diff --git a/src/do_gradientai/types/image_create_params.py b/src/do_gradientai/types/gpu_droplets/image_create_params.py
similarity index 100%
rename from src/do_gradientai/types/image_create_params.py
rename to src/do_gradientai/types/gpu_droplets/image_create_params.py
diff --git a/src/do_gradientai/types/image_create_response.py b/src/do_gradientai/types/gpu_droplets/image_create_response.py
similarity index 77%
rename from src/do_gradientai/types/image_create_response.py
rename to src/do_gradientai/types/gpu_droplets/image_create_response.py
index 57c96cf5..87ebbb01 100644
--- a/src/do_gradientai/types/image_create_response.py
+++ b/src/do_gradientai/types/gpu_droplets/image_create_response.py
@@ -2,8 +2,8 @@
from typing import Optional
-from .._models import BaseModel
-from .shared.image import Image
+from ..._models import BaseModel
+from ..shared.image import Image
__all__ = ["ImageCreateResponse"]
diff --git a/src/do_gradientai/types/image_list_params.py b/src/do_gradientai/types/gpu_droplets/image_list_params.py
similarity index 100%
rename from src/do_gradientai/types/image_list_params.py
rename to src/do_gradientai/types/gpu_droplets/image_list_params.py
diff --git a/src/do_gradientai/types/floating_ips/action_list_response.py b/src/do_gradientai/types/gpu_droplets/image_list_response.py
similarity index 71%
rename from src/do_gradientai/types/floating_ips/action_list_response.py
rename to src/do_gradientai/types/gpu_droplets/image_list_response.py
index 1a20f780..d4bb5697 100644
--- a/src/do_gradientai/types/floating_ips/action_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/image_list_response.py
@@ -3,17 +3,17 @@
from typing import List, Optional
from ..._models import BaseModel
-from ..shared.action import Action
+from ..shared.image import Image
from ..shared.page_links import PageLinks
from ..shared.meta_properties import MetaProperties
-__all__ = ["ActionListResponse"]
+__all__ = ["ImageListResponse"]
-class ActionListResponse(BaseModel):
+class ImageListResponse(BaseModel):
+ images: List[Image]
+
meta: MetaProperties
"""Information about the response itself."""
- actions: Optional[List[Action]] = None
-
links: Optional[PageLinks] = None
diff --git a/src/do_gradientai/types/image_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/image_retrieve_response.py
similarity index 73%
rename from src/do_gradientai/types/image_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/image_retrieve_response.py
index 761d6184..394dd384 100644
--- a/src/do_gradientai/types/image_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/image_retrieve_response.py
@@ -1,7 +1,7 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from .._models import BaseModel
-from .shared.image import Image
+from ..._models import BaseModel
+from ..shared.image import Image
__all__ = ["ImageRetrieveResponse"]
diff --git a/src/do_gradientai/types/image_update_params.py b/src/do_gradientai/types/gpu_droplets/image_update_params.py
similarity index 100%
rename from src/do_gradientai/types/image_update_params.py
rename to src/do_gradientai/types/gpu_droplets/image_update_params.py
diff --git a/src/do_gradientai/types/image_update_response.py b/src/do_gradientai/types/gpu_droplets/image_update_response.py
similarity index 73%
rename from src/do_gradientai/types/image_update_response.py
rename to src/do_gradientai/types/gpu_droplets/image_update_response.py
index 22db593b..3d07f5ac 100644
--- a/src/do_gradientai/types/image_update_response.py
+++ b/src/do_gradientai/types/gpu_droplets/image_update_response.py
@@ -1,7 +1,7 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from .._models import BaseModel
-from .shared.image import Image
+from ..._models import BaseModel
+from ..shared.image import Image
__all__ = ["ImageUpdateResponse"]
diff --git a/src/do_gradientai/types/images/__init__.py b/src/do_gradientai/types/gpu_droplets/images/__init__.py
similarity index 100%
rename from src/do_gradientai/types/images/__init__.py
rename to src/do_gradientai/types/gpu_droplets/images/__init__.py
diff --git a/src/do_gradientai/types/images/action_create_params.py b/src/do_gradientai/types/gpu_droplets/images/action_create_params.py
similarity index 100%
rename from src/do_gradientai/types/images/action_create_params.py
rename to src/do_gradientai/types/gpu_droplets/images/action_create_params.py
diff --git a/src/do_gradientai/types/gpu_droplets/images/action_list_response.py b/src/do_gradientai/types/gpu_droplets/images/action_list_response.py
new file mode 100644
index 00000000..2f4edac5
--- /dev/null
+++ b/src/do_gradientai/types/gpu_droplets/images/action_list_response.py
@@ -0,0 +1,19 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import List, Optional
+
+from ...._models import BaseModel
+from ...shared.action import Action
+from ...shared.page_links import PageLinks
+from ...shared.meta_properties import MetaProperties
+
+__all__ = ["ActionListResponse"]
+
+
+class ActionListResponse(BaseModel):
+ meta: MetaProperties
+ """Information about the response itself."""
+
+ actions: Optional[List[Action]] = None
+
+ links: Optional[PageLinks] = None
diff --git a/src/do_gradientai/types/lb_firewall.py b/src/do_gradientai/types/gpu_droplets/lb_firewall.py
similarity index 93%
rename from src/do_gradientai/types/lb_firewall.py
rename to src/do_gradientai/types/gpu_droplets/lb_firewall.py
index b02efa3e..aea1887c 100644
--- a/src/do_gradientai/types/lb_firewall.py
+++ b/src/do_gradientai/types/gpu_droplets/lb_firewall.py
@@ -2,7 +2,7 @@
from typing import List, Optional
-from .._models import BaseModel
+from ..._models import BaseModel
__all__ = ["LbFirewall"]
diff --git a/src/do_gradientai/types/lb_firewall_param.py b/src/do_gradientai/types/gpu_droplets/lb_firewall_param.py
similarity index 100%
rename from src/do_gradientai/types/lb_firewall_param.py
rename to src/do_gradientai/types/gpu_droplets/lb_firewall_param.py
diff --git a/src/do_gradientai/types/load_balancer.py b/src/do_gradientai/types/gpu_droplets/load_balancer.py
similarity index 98%
rename from src/do_gradientai/types/load_balancer.py
rename to src/do_gradientai/types/gpu_droplets/load_balancer.py
index 9d63222b..d0e7597a 100644
--- a/src/do_gradientai/types/load_balancer.py
+++ b/src/do_gradientai/types/gpu_droplets/load_balancer.py
@@ -5,11 +5,11 @@
from typing_extensions import Literal
from .domains import Domains
-from .._models import BaseModel
+from ..._models import BaseModel
from .lb_firewall import LbFirewall
from .glb_settings import GlbSettings
from .health_check import HealthCheck
-from .shared.region import Region
+from ..shared.region import Region
from .forwarding_rule import ForwardingRule
from .sticky_sessions import StickySessions
diff --git a/src/do_gradientai/types/load_balancer_create_params.py b/src/do_gradientai/types/gpu_droplets/load_balancer_create_params.py
similarity index 100%
rename from src/do_gradientai/types/load_balancer_create_params.py
rename to src/do_gradientai/types/gpu_droplets/load_balancer_create_params.py
diff --git a/src/do_gradientai/types/load_balancer_create_response.py b/src/do_gradientai/types/gpu_droplets/load_balancer_create_response.py
similarity index 89%
rename from src/do_gradientai/types/load_balancer_create_response.py
rename to src/do_gradientai/types/gpu_droplets/load_balancer_create_response.py
index 8d90c217..ed4f2211 100644
--- a/src/do_gradientai/types/load_balancer_create_response.py
+++ b/src/do_gradientai/types/gpu_droplets/load_balancer_create_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from .._models import BaseModel
+from ..._models import BaseModel
from .load_balancer import LoadBalancer
__all__ = ["LoadBalancerCreateResponse"]
diff --git a/src/do_gradientai/types/load_balancer_list_params.py b/src/do_gradientai/types/gpu_droplets/load_balancer_list_params.py
similarity index 100%
rename from src/do_gradientai/types/load_balancer_list_params.py
rename to src/do_gradientai/types/gpu_droplets/load_balancer_list_params.py
diff --git a/src/do_gradientai/types/load_balancer_list_response.py b/src/do_gradientai/types/gpu_droplets/load_balancer_list_response.py
similarity index 76%
rename from src/do_gradientai/types/load_balancer_list_response.py
rename to src/do_gradientai/types/gpu_droplets/load_balancer_list_response.py
index 64ec8e91..d5d0b4ac 100644
--- a/src/do_gradientai/types/load_balancer_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/load_balancer_list_response.py
@@ -2,10 +2,10 @@
from typing import List, Optional
-from .._models import BaseModel
+from ..._models import BaseModel
from .load_balancer import LoadBalancer
-from .shared.page_links import PageLinks
-from .shared.meta_properties import MetaProperties
+from ..shared.page_links import PageLinks
+from ..shared.meta_properties import MetaProperties
__all__ = ["LoadBalancerListResponse"]
diff --git a/src/do_gradientai/types/load_balancer_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/load_balancer_retrieve_response.py
similarity index 90%
rename from src/do_gradientai/types/load_balancer_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/load_balancer_retrieve_response.py
index f4df6ae3..779e9693 100644
--- a/src/do_gradientai/types/load_balancer_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/load_balancer_retrieve_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from .._models import BaseModel
+from ..._models import BaseModel
from .load_balancer import LoadBalancer
__all__ = ["LoadBalancerRetrieveResponse"]
diff --git a/src/do_gradientai/types/load_balancer_update_params.py b/src/do_gradientai/types/gpu_droplets/load_balancer_update_params.py
similarity index 100%
rename from src/do_gradientai/types/load_balancer_update_params.py
rename to src/do_gradientai/types/gpu_droplets/load_balancer_update_params.py
diff --git a/src/do_gradientai/types/load_balancer_update_response.py b/src/do_gradientai/types/gpu_droplets/load_balancer_update_response.py
similarity index 89%
rename from src/do_gradientai/types/load_balancer_update_response.py
rename to src/do_gradientai/types/gpu_droplets/load_balancer_update_response.py
index e1a58a3f..2b24b376 100644
--- a/src/do_gradientai/types/load_balancer_update_response.py
+++ b/src/do_gradientai/types/gpu_droplets/load_balancer_update_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from .._models import BaseModel
+from ..._models import BaseModel
from .load_balancer import LoadBalancer
__all__ = ["LoadBalancerUpdateResponse"]
diff --git a/src/do_gradientai/types/load_balancers/__init__.py b/src/do_gradientai/types/gpu_droplets/load_balancers/__init__.py
similarity index 100%
rename from src/do_gradientai/types/load_balancers/__init__.py
rename to src/do_gradientai/types/gpu_droplets/load_balancers/__init__.py
diff --git a/src/do_gradientai/types/load_balancers/droplet_add_params.py b/src/do_gradientai/types/gpu_droplets/load_balancers/droplet_add_params.py
similarity index 100%
rename from src/do_gradientai/types/load_balancers/droplet_add_params.py
rename to src/do_gradientai/types/gpu_droplets/load_balancers/droplet_add_params.py
diff --git a/src/do_gradientai/types/load_balancers/droplet_remove_params.py b/src/do_gradientai/types/gpu_droplets/load_balancers/droplet_remove_params.py
similarity index 100%
rename from src/do_gradientai/types/load_balancers/droplet_remove_params.py
rename to src/do_gradientai/types/gpu_droplets/load_balancers/droplet_remove_params.py
diff --git a/src/do_gradientai/types/load_balancers/forwarding_rule_add_params.py b/src/do_gradientai/types/gpu_droplets/load_balancers/forwarding_rule_add_params.py
similarity index 100%
rename from src/do_gradientai/types/load_balancers/forwarding_rule_add_params.py
rename to src/do_gradientai/types/gpu_droplets/load_balancers/forwarding_rule_add_params.py
diff --git a/src/do_gradientai/types/load_balancers/forwarding_rule_remove_params.py b/src/do_gradientai/types/gpu_droplets/load_balancers/forwarding_rule_remove_params.py
similarity index 100%
rename from src/do_gradientai/types/load_balancers/forwarding_rule_remove_params.py
rename to src/do_gradientai/types/gpu_droplets/load_balancers/forwarding_rule_remove_params.py
diff --git a/src/do_gradientai/types/size_list_params.py b/src/do_gradientai/types/gpu_droplets/size_list_params.py
similarity index 100%
rename from src/do_gradientai/types/size_list_params.py
rename to src/do_gradientai/types/gpu_droplets/size_list_params.py
diff --git a/src/do_gradientai/types/images/action_list_response.py b/src/do_gradientai/types/gpu_droplets/size_list_response.py
similarity index 71%
rename from src/do_gradientai/types/images/action_list_response.py
rename to src/do_gradientai/types/gpu_droplets/size_list_response.py
index 1a20f780..c0c600b4 100644
--- a/src/do_gradientai/types/images/action_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/size_list_response.py
@@ -3,17 +3,17 @@
from typing import List, Optional
from ..._models import BaseModel
-from ..shared.action import Action
+from ..shared.size import Size
from ..shared.page_links import PageLinks
from ..shared.meta_properties import MetaProperties
-__all__ = ["ActionListResponse"]
+__all__ = ["SizeListResponse"]
-class ActionListResponse(BaseModel):
+class SizeListResponse(BaseModel):
meta: MetaProperties
"""Information about the response itself."""
- actions: Optional[List[Action]] = None
+ sizes: List[Size]
links: Optional[PageLinks] = None
diff --git a/src/do_gradientai/types/snapshot_list_params.py b/src/do_gradientai/types/gpu_droplets/snapshot_list_params.py
similarity index 100%
rename from src/do_gradientai/types/snapshot_list_params.py
rename to src/do_gradientai/types/gpu_droplets/snapshot_list_params.py
diff --git a/src/do_gradientai/types/volumes/snapshot_list_response.py b/src/do_gradientai/types/gpu_droplets/snapshot_list_response.py
similarity index 100%
rename from src/do_gradientai/types/volumes/snapshot_list_response.py
rename to src/do_gradientai/types/gpu_droplets/snapshot_list_response.py
diff --git a/src/do_gradientai/types/volumes/snapshot_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/snapshot_retrieve_response.py
similarity index 100%
rename from src/do_gradientai/types/volumes/snapshot_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/snapshot_retrieve_response.py
diff --git a/src/do_gradientai/types/sticky_sessions.py b/src/do_gradientai/types/gpu_droplets/sticky_sessions.py
similarity index 96%
rename from src/do_gradientai/types/sticky_sessions.py
rename to src/do_gradientai/types/gpu_droplets/sticky_sessions.py
index 5245d712..78debc07 100644
--- a/src/do_gradientai/types/sticky_sessions.py
+++ b/src/do_gradientai/types/gpu_droplets/sticky_sessions.py
@@ -3,7 +3,7 @@
from typing import Optional
from typing_extensions import Literal
-from .._models import BaseModel
+from ..._models import BaseModel
__all__ = ["StickySessions"]
diff --git a/src/do_gradientai/types/sticky_sessions_param.py b/src/do_gradientai/types/gpu_droplets/sticky_sessions_param.py
similarity index 100%
rename from src/do_gradientai/types/sticky_sessions_param.py
rename to src/do_gradientai/types/gpu_droplets/sticky_sessions_param.py
diff --git a/src/do_gradientai/types/volume_create_params.py b/src/do_gradientai/types/gpu_droplets/volume_create_params.py
similarity index 100%
rename from src/do_gradientai/types/volume_create_params.py
rename to src/do_gradientai/types/gpu_droplets/volume_create_params.py
diff --git a/src/do_gradientai/types/volume_create_response.py b/src/do_gradientai/types/gpu_droplets/volume_create_response.py
similarity index 96%
rename from src/do_gradientai/types/volume_create_response.py
rename to src/do_gradientai/types/gpu_droplets/volume_create_response.py
index cc3a560a..1bca9965 100644
--- a/src/do_gradientai/types/volume_create_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volume_create_response.py
@@ -2,8 +2,8 @@
from typing import List, Optional
-from .._models import BaseModel
-from .shared.region import Region
+from ..._models import BaseModel
+from ..shared.region import Region
__all__ = ["VolumeCreateResponse", "Volume"]
diff --git a/src/do_gradientai/types/volume_delete_by_name_params.py b/src/do_gradientai/types/gpu_droplets/volume_delete_by_name_params.py
similarity index 100%
rename from src/do_gradientai/types/volume_delete_by_name_params.py
rename to src/do_gradientai/types/gpu_droplets/volume_delete_by_name_params.py
diff --git a/src/do_gradientai/types/volume_list_params.py b/src/do_gradientai/types/gpu_droplets/volume_list_params.py
similarity index 100%
rename from src/do_gradientai/types/volume_list_params.py
rename to src/do_gradientai/types/gpu_droplets/volume_list_params.py
diff --git a/src/do_gradientai/types/volume_list_response.py b/src/do_gradientai/types/gpu_droplets/volume_list_response.py
similarity index 92%
rename from src/do_gradientai/types/volume_list_response.py
rename to src/do_gradientai/types/gpu_droplets/volume_list_response.py
index f8a97439..69ff421a 100644
--- a/src/do_gradientai/types/volume_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volume_list_response.py
@@ -2,10 +2,10 @@
from typing import List, Optional
-from .._models import BaseModel
-from .shared.region import Region
-from .shared.page_links import PageLinks
-from .shared.meta_properties import MetaProperties
+from ..._models import BaseModel
+from ..shared.region import Region
+from ..shared.page_links import PageLinks
+from ..shared.meta_properties import MetaProperties
__all__ = ["VolumeListResponse", "Volume"]
diff --git a/src/do_gradientai/types/volume_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/volume_retrieve_response.py
similarity index 96%
rename from src/do_gradientai/types/volume_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/volume_retrieve_response.py
index c9653f9e..3efe8de7 100644
--- a/src/do_gradientai/types/volume_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volume_retrieve_response.py
@@ -2,8 +2,8 @@
from typing import List, Optional
-from .._models import BaseModel
-from .shared.region import Region
+from ..._models import BaseModel
+from ..shared.region import Region
__all__ = ["VolumeRetrieveResponse", "Volume"]
diff --git a/src/do_gradientai/types/volumes/__init__.py b/src/do_gradientai/types/gpu_droplets/volumes/__init__.py
similarity index 100%
rename from src/do_gradientai/types/volumes/__init__.py
rename to src/do_gradientai/types/gpu_droplets/volumes/__init__.py
diff --git a/src/do_gradientai/types/volumes/action_initiate_by_id_params.py b/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_params.py
similarity index 100%
rename from src/do_gradientai/types/volumes/action_initiate_by_id_params.py
rename to src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_params.py
diff --git a/src/do_gradientai/types/volumes/action_initiate_by_id_response.py b/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_response.py
similarity index 89%
rename from src/do_gradientai/types/volumes/action_initiate_by_id_response.py
rename to src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_response.py
index 23484c97..d8460f22 100644
--- a/src/do_gradientai/types/volumes/action_initiate_by_id_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_id_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from ..._models import BaseModel
+from ...._models import BaseModel
from .volume_action import VolumeAction
__all__ = ["ActionInitiateByIDResponse"]
diff --git a/src/do_gradientai/types/volumes/action_initiate_by_name_params.py b/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_params.py
similarity index 100%
rename from src/do_gradientai/types/volumes/action_initiate_by_name_params.py
rename to src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_params.py
diff --git a/src/do_gradientai/types/volumes/action_initiate_by_name_response.py b/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_response.py
similarity index 89%
rename from src/do_gradientai/types/volumes/action_initiate_by_name_response.py
rename to src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_response.py
index 0b84be25..9a935bdf 100644
--- a/src/do_gradientai/types/volumes/action_initiate_by_name_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volumes/action_initiate_by_name_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from ..._models import BaseModel
+from ...._models import BaseModel
from .volume_action import VolumeAction
__all__ = ["ActionInitiateByNameResponse"]
diff --git a/src/do_gradientai/types/volumes/action_list_params.py b/src/do_gradientai/types/gpu_droplets/volumes/action_list_params.py
similarity index 100%
rename from src/do_gradientai/types/volumes/action_list_params.py
rename to src/do_gradientai/types/gpu_droplets/volumes/action_list_params.py
diff --git a/src/do_gradientai/types/volumes/action_list_response.py b/src/do_gradientai/types/gpu_droplets/volumes/action_list_response.py
similarity index 75%
rename from src/do_gradientai/types/volumes/action_list_response.py
rename to src/do_gradientai/types/gpu_droplets/volumes/action_list_response.py
index ddb17e23..35964633 100644
--- a/src/do_gradientai/types/volumes/action_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volumes/action_list_response.py
@@ -2,10 +2,10 @@
from typing import List, Optional
-from ..._models import BaseModel
+from ...._models import BaseModel
from .volume_action import VolumeAction
-from ..shared.page_links import PageLinks
-from ..shared.meta_properties import MetaProperties
+from ...shared.page_links import PageLinks
+from ...shared.meta_properties import MetaProperties
__all__ = ["ActionListResponse"]
diff --git a/src/do_gradientai/types/volumes/action_retrieve_params.py b/src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_params.py
similarity index 100%
rename from src/do_gradientai/types/volumes/action_retrieve_params.py
rename to src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_params.py
diff --git a/src/do_gradientai/types/volumes/action_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_response.py
similarity index 89%
rename from src/do_gradientai/types/volumes/action_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_response.py
index 9f43cabe..cd47f37e 100644
--- a/src/do_gradientai/types/volumes/action_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volumes/action_retrieve_response.py
@@ -2,7 +2,7 @@
from typing import Optional
-from ..._models import BaseModel
+from ...._models import BaseModel
from .volume_action import VolumeAction
__all__ = ["ActionRetrieveResponse"]
diff --git a/src/do_gradientai/types/volumes/snapshot_create_params.py b/src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_params.py
similarity index 100%
rename from src/do_gradientai/types/volumes/snapshot_create_params.py
rename to src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_params.py
diff --git a/src/do_gradientai/types/volumes/snapshot_create_response.py b/src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_response.py
similarity index 75%
rename from src/do_gradientai/types/volumes/snapshot_create_response.py
rename to src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_response.py
index 4c7049d1..41701795 100644
--- a/src/do_gradientai/types/volumes/snapshot_create_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volumes/snapshot_create_response.py
@@ -2,8 +2,8 @@
from typing import Optional
-from ..._models import BaseModel
-from ..shared.snapshots import Snapshots
+from ...._models import BaseModel
+from ...shared.snapshots import Snapshots
__all__ = ["SnapshotCreateResponse"]
diff --git a/src/do_gradientai/types/volumes/snapshot_list_params.py b/src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_params.py
similarity index 100%
rename from src/do_gradientai/types/volumes/snapshot_list_params.py
rename to src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_params.py
diff --git a/src/do_gradientai/types/snapshot_list_response.py b/src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_response.py
similarity index 67%
rename from src/do_gradientai/types/snapshot_list_response.py
rename to src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_response.py
index 1b55b099..25d91ed2 100644
--- a/src/do_gradientai/types/snapshot_list_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volumes/snapshot_list_response.py
@@ -2,10 +2,10 @@
from typing import List, Optional
-from .._models import BaseModel
-from .shared.snapshots import Snapshots
-from .shared.page_links import PageLinks
-from .shared.meta_properties import MetaProperties
+from ...._models import BaseModel
+from ...shared.snapshots import Snapshots
+from ...shared.page_links import PageLinks
+from ...shared.meta_properties import MetaProperties
__all__ = ["SnapshotListResponse"]
diff --git a/src/do_gradientai/types/snapshot_retrieve_response.py b/src/do_gradientai/types/gpu_droplets/volumes/snapshot_retrieve_response.py
similarity index 76%
rename from src/do_gradientai/types/snapshot_retrieve_response.py
rename to src/do_gradientai/types/gpu_droplets/volumes/snapshot_retrieve_response.py
index f9ec12bc..3defa47d 100644
--- a/src/do_gradientai/types/snapshot_retrieve_response.py
+++ b/src/do_gradientai/types/gpu_droplets/volumes/snapshot_retrieve_response.py
@@ -2,8 +2,8 @@
from typing import Optional
-from .._models import BaseModel
-from .shared.snapshots import Snapshots
+from ...._models import BaseModel
+from ...shared.snapshots import Snapshots
__all__ = ["SnapshotRetrieveResponse"]
diff --git a/src/do_gradientai/types/volumes/volume_action.py b/src/do_gradientai/types/gpu_droplets/volumes/volume_action.py
similarity index 92%
rename from src/do_gradientai/types/volumes/volume_action.py
rename to src/do_gradientai/types/gpu_droplets/volumes/volume_action.py
index 4d9adf3b..e1c01f6c 100644
--- a/src/do_gradientai/types/volumes/volume_action.py
+++ b/src/do_gradientai/types/gpu_droplets/volumes/volume_action.py
@@ -2,7 +2,7 @@
from typing import Optional
-from ..shared.action import Action
+from ...shared.action import Action
__all__ = ["VolumeAction"]
diff --git a/src/do_gradientai/types/image_list_response.py b/src/do_gradientai/types/image_list_response.py
deleted file mode 100644
index 0e3f7324..00000000
--- a/src/do_gradientai/types/image_list_response.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import List, Optional
-
-from .._models import BaseModel
-from .shared.image import Image
-from .shared.page_links import PageLinks
-from .shared.meta_properties import MetaProperties
-
-__all__ = ["ImageListResponse"]
-
-
-class ImageListResponse(BaseModel):
- images: List[Image]
-
- meta: MetaProperties
- """Information about the response itself."""
-
- links: Optional[PageLinks] = None
diff --git a/src/do_gradientai/types/shared/__init__.py b/src/do_gradientai/types/shared/__init__.py
index 881ec31a..6d90845f 100644
--- a/src/do_gradientai/types/shared/__init__.py
+++ b/src/do_gradientai/types/shared/__init__.py
@@ -19,12 +19,9 @@
from .subscription import Subscription as Subscription
from .forward_links import ForwardLinks as ForwardLinks
from .backward_links import BackwardLinks as BackwardLinks
-from .repository_tag import RepositoryTag as RepositoryTag
from .meta_properties import MetaProperties as MetaProperties
-from .repository_blob import RepositoryBlob as RepositoryBlob
from .completion_usage import CompletionUsage as CompletionUsage
from .garbage_collection import GarbageCollection as GarbageCollection
-from .repository_manifest import RepositoryManifest as RepositoryManifest
from .firewall_rule_target import FirewallRuleTarget as FirewallRuleTarget
from .chat_completion_chunk import ChatCompletionChunk as ChatCompletionChunk
from .subscription_tier_base import SubscriptionTierBase as SubscriptionTierBase
diff --git a/src/do_gradientai/types/shared/repository_blob.py b/src/do_gradientai/types/shared/repository_blob.py
deleted file mode 100644
index aae5702b..00000000
--- a/src/do_gradientai/types/shared/repository_blob.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Optional
-
-from ..._models import BaseModel
-
-__all__ = ["RepositoryBlob"]
-
-
-class RepositoryBlob(BaseModel):
- compressed_size_bytes: Optional[int] = None
- """The compressed size of the blob in bytes."""
-
- digest: Optional[str] = None
- """The digest of the blob"""
diff --git a/src/do_gradientai/types/shared/repository_manifest.py b/src/do_gradientai/types/shared/repository_manifest.py
deleted file mode 100644
index babbbea2..00000000
--- a/src/do_gradientai/types/shared/repository_manifest.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import List, Optional
-from datetime import datetime
-
-from ..._models import BaseModel
-from .repository_blob import RepositoryBlob
-
-__all__ = ["RepositoryManifest"]
-
-
-class RepositoryManifest(BaseModel):
- blobs: Optional[List[RepositoryBlob]] = None
- """All blobs associated with this manifest"""
-
- compressed_size_bytes: Optional[int] = None
- """The compressed size of the manifest in bytes."""
-
- digest: Optional[str] = None
- """The manifest digest"""
-
- registry_name: Optional[str] = None
- """The name of the container registry."""
-
- repository: Optional[str] = None
- """The name of the repository."""
-
- size_bytes: Optional[int] = None
- """
- The uncompressed size of the manifest in bytes (this size is calculated
- asynchronously so it may not be immediately available).
- """
-
- tags: Optional[List[str]] = None
- """All tags associated with this manifest"""
-
- updated_at: Optional[datetime] = None
- """The time the manifest was last updated."""
diff --git a/src/do_gradientai/types/shared/repository_tag.py b/src/do_gradientai/types/shared/repository_tag.py
deleted file mode 100644
index a06ec6bb..00000000
--- a/src/do_gradientai/types/shared/repository_tag.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import Optional
-from datetime import datetime
-
-from ..._models import BaseModel
-
-__all__ = ["RepositoryTag"]
-
-
-class RepositoryTag(BaseModel):
- compressed_size_bytes: Optional[int] = None
- """The compressed size of the tag in bytes."""
-
- manifest_digest: Optional[str] = None
- """The digest of the manifest associated with the tag."""
-
- registry_name: Optional[str] = None
- """The name of the container registry."""
-
- repository: Optional[str] = None
- """The name of the repository."""
-
- size_bytes: Optional[int] = None
- """
- The uncompressed size of the tag in bytes (this size is calculated
- asynchronously so it may not be immediately available).
- """
-
- tag: Optional[str] = None
- """The name of the tag."""
-
- updated_at: Optional[datetime] = None
- """The time the tag was last updated."""
diff --git a/src/do_gradientai/types/size_list_response.py b/src/do_gradientai/types/size_list_response.py
deleted file mode 100644
index 4ef8078b..00000000
--- a/src/do_gradientai/types/size_list_response.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import List, Optional
-
-from .._models import BaseModel
-from .shared.size import Size
-from .shared.page_links import PageLinks
-from .shared.meta_properties import MetaProperties
-
-__all__ = ["SizeListResponse"]
-
-
-class SizeListResponse(BaseModel):
- meta: MetaProperties
- """Information about the response itself."""
-
- sizes: List[Size]
-
- links: Optional[PageLinks] = None
diff --git a/tests/api_resources/agents/chat/test_completions.py b/tests/api_resources/agents/chat/test_completions.py
index 2ec29fc3..de43cc34 100644
--- a/tests/api_resources/agents/chat/test_completions.py
+++ b/tests/api_resources/agents/chat/test_completions.py
@@ -15,9 +15,7 @@
class TestCompletions:
- parametrize = pytest.mark.parametrize(
- "client", [False, True], indirect=True, ids=["loose", "strict"]
- )
+ parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@pytest.mark.skip()
@parametrize
@@ -216,9 +214,7 @@ class TestAsyncCompletions:
@pytest.mark.skip()
@parametrize
- async def test_method_create_overload_1(
- self, async_client: AsyncGradientAI
- ) -> None:
+ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None:
completion = await async_client.agents.chat.completions.create(
messages=[
{
@@ -232,9 +228,7 @@ async def test_method_create_overload_1(
@pytest.mark.skip()
@parametrize
- async def test_method_create_with_all_params_overload_1(
- self, async_client: AsyncGradientAI
- ) -> None:
+ async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
completion = await async_client.agents.chat.completions.create(
messages=[
{
@@ -274,9 +268,7 @@ async def test_method_create_with_all_params_overload_1(
@pytest.mark.skip()
@parametrize
- async def test_raw_response_create_overload_1(
- self, async_client: AsyncGradientAI
- ) -> None:
+ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
response = await async_client.agents.chat.completions.with_raw_response.create(
messages=[
{
@@ -294,9 +286,7 @@ async def test_raw_response_create_overload_1(
@pytest.mark.skip()
@parametrize
- async def test_streaming_response_create_overload_1(
- self, async_client: AsyncGradientAI
- ) -> None:
+ async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
async with async_client.agents.chat.completions.with_streaming_response.create(
messages=[
{
@@ -316,9 +306,7 @@ async def test_streaming_response_create_overload_1(
@pytest.mark.skip()
@parametrize
- async def test_method_create_overload_2(
- self, async_client: AsyncGradientAI
- ) -> None:
+ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None:
completion_stream = await async_client.agents.chat.completions.create(
messages=[
{
@@ -333,9 +321,7 @@ async def test_method_create_overload_2(
@pytest.mark.skip()
@parametrize
- async def test_method_create_with_all_params_overload_2(
- self, async_client: AsyncGradientAI
- ) -> None:
+ async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
completion_stream = await async_client.agents.chat.completions.create(
messages=[
{
@@ -375,9 +361,7 @@ async def test_method_create_with_all_params_overload_2(
@pytest.mark.skip()
@parametrize
- async def test_raw_response_create_overload_2(
- self, async_client: AsyncGradientAI
- ) -> None:
+ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
response = await async_client.agents.chat.completions.with_raw_response.create(
messages=[
{
@@ -395,9 +379,7 @@ async def test_raw_response_create_overload_2(
@pytest.mark.skip()
@parametrize
- async def test_streaming_response_create_overload_2(
- self, async_client: AsyncGradientAI
- ) -> None:
+ async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
async with async_client.agents.chat.completions.with_streaming_response.create(
messages=[
{
diff --git a/tests/api_resources/account/__init__.py b/tests/api_resources/gpu_droplets/__init__.py
similarity index 100%
rename from tests/api_resources/account/__init__.py
rename to tests/api_resources/gpu_droplets/__init__.py
diff --git a/tests/api_resources/droplets/__init__.py b/tests/api_resources/gpu_droplets/account/__init__.py
similarity index 100%
rename from tests/api_resources/droplets/__init__.py
rename to tests/api_resources/gpu_droplets/account/__init__.py
diff --git a/tests/api_resources/account/test_keys.py b/tests/api_resources/gpu_droplets/account/test_keys.py
similarity index 83%
rename from tests/api_resources/account/test_keys.py
rename to tests/api_resources/gpu_droplets/account/test_keys.py
index 38318d6a..cf168f61 100644
--- a/tests/api_resources/account/test_keys.py
+++ b/tests/api_resources/gpu_droplets/account/test_keys.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.account import (
+from do_gradientai.types.gpu_droplets.account import (
KeyListResponse,
KeyCreateResponse,
KeyUpdateResponse,
@@ -25,7 +25,7 @@ class TestKeys:
@pytest.mark.skip()
@parametrize
def test_method_create(self, client: GradientAI) -> None:
- key = client.account.keys.create(
+ key = client.gpu_droplets.account.keys.create(
name="My SSH Public Key",
public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example",
)
@@ -34,7 +34,7 @@ def test_method_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create(self, client: GradientAI) -> None:
- response = client.account.keys.with_raw_response.create(
+ response = client.gpu_droplets.account.keys.with_raw_response.create(
name="My SSH Public Key",
public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example",
)
@@ -47,7 +47,7 @@ def test_raw_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create(self, client: GradientAI) -> None:
- with client.account.keys.with_streaming_response.create(
+ with client.gpu_droplets.account.keys.with_streaming_response.create(
name="My SSH Public Key",
public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example",
) as response:
@@ -62,7 +62,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- key = client.account.keys.retrieve(
+ key = client.gpu_droplets.account.keys.retrieve(
512189,
)
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
@@ -70,7 +70,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.account.keys.with_raw_response.retrieve(
+ response = client.gpu_droplets.account.keys.with_raw_response.retrieve(
512189,
)
@@ -82,7 +82,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.account.keys.with_streaming_response.retrieve(
+ with client.gpu_droplets.account.keys.with_streaming_response.retrieve(
512189,
) as response:
assert not response.is_closed
@@ -96,7 +96,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update(self, client: GradientAI) -> None:
- key = client.account.keys.update(
+ key = client.gpu_droplets.account.keys.update(
ssh_key_identifier=512189,
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
@@ -104,7 +104,7 @@ def test_method_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update_with_all_params(self, client: GradientAI) -> None:
- key = client.account.keys.update(
+ key = client.gpu_droplets.account.keys.update(
ssh_key_identifier=512189,
name="My SSH Public Key",
)
@@ -113,7 +113,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_update(self, client: GradientAI) -> None:
- response = client.account.keys.with_raw_response.update(
+ response = client.gpu_droplets.account.keys.with_raw_response.update(
ssh_key_identifier=512189,
)
@@ -125,7 +125,7 @@ def test_raw_response_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_update(self, client: GradientAI) -> None:
- with client.account.keys.with_streaming_response.update(
+ with client.gpu_droplets.account.keys.with_streaming_response.update(
ssh_key_identifier=512189,
) as response:
assert not response.is_closed
@@ -139,13 +139,13 @@ def test_streaming_response_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- key = client.account.keys.list()
+ key = client.gpu_droplets.account.keys.list()
assert_matches_type(KeyListResponse, key, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- key = client.account.keys.list(
+ key = client.gpu_droplets.account.keys.list(
page=1,
per_page=1,
)
@@ -154,7 +154,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.account.keys.with_raw_response.list()
+ response = client.gpu_droplets.account.keys.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -164,7 +164,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.account.keys.with_streaming_response.list() as response:
+ with client.gpu_droplets.account.keys.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -176,7 +176,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- key = client.account.keys.delete(
+ key = client.gpu_droplets.account.keys.delete(
512189,
)
assert key is None
@@ -184,7 +184,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.account.keys.with_raw_response.delete(
+ response = client.gpu_droplets.account.keys.with_raw_response.delete(
512189,
)
@@ -196,7 +196,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.account.keys.with_streaming_response.delete(
+ with client.gpu_droplets.account.keys.with_streaming_response.delete(
512189,
) as response:
assert not response.is_closed
@@ -216,7 +216,7 @@ class TestAsyncKeys:
@pytest.mark.skip()
@parametrize
async def test_method_create(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.account.keys.create(
+ key = await async_client.gpu_droplets.account.keys.create(
name="My SSH Public Key",
public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example",
)
@@ -225,7 +225,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.account.keys.with_raw_response.create(
+ response = await async_client.gpu_droplets.account.keys.with_raw_response.create(
name="My SSH Public Key",
public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example",
)
@@ -238,7 +238,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None:
- async with async_client.account.keys.with_streaming_response.create(
+ async with async_client.gpu_droplets.account.keys.with_streaming_response.create(
name="My SSH Public Key",
public_key="ssh-rsa AEXAMPLEaC1yc2EAAAADAQABAAAAQQDDHr/jh2Jy4yALcK4JyWbVkPRaWmhck3IgCoeOO3z1e2dBowLh64QAM+Qb72pxekALga2oi4GvT+TlWNhzPH4V example",
) as response:
@@ -253,7 +253,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.account.keys.retrieve(
+ key = await async_client.gpu_droplets.account.keys.retrieve(
512189,
)
assert_matches_type(KeyRetrieveResponse, key, path=["response"])
@@ -261,7 +261,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.account.keys.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.account.keys.with_raw_response.retrieve(
512189,
)
@@ -273,7 +273,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.account.keys.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.account.keys.with_streaming_response.retrieve(
512189,
) as response:
assert not response.is_closed
@@ -287,7 +287,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_method_update(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.account.keys.update(
+ key = await async_client.gpu_droplets.account.keys.update(
ssh_key_identifier=512189,
)
assert_matches_type(KeyUpdateResponse, key, path=["response"])
@@ -295,7 +295,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.account.keys.update(
+ key = await async_client.gpu_droplets.account.keys.update(
ssh_key_identifier=512189,
name="My SSH Public Key",
)
@@ -304,7 +304,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.account.keys.with_raw_response.update(
+ response = await async_client.gpu_droplets.account.keys.with_raw_response.update(
ssh_key_identifier=512189,
)
@@ -316,7 +316,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None:
- async with async_client.account.keys.with_streaming_response.update(
+ async with async_client.gpu_droplets.account.keys.with_streaming_response.update(
ssh_key_identifier=512189,
) as response:
assert not response.is_closed
@@ -330,13 +330,13 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.account.keys.list()
+ key = await async_client.gpu_droplets.account.keys.list()
assert_matches_type(KeyListResponse, key, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.account.keys.list(
+ key = await async_client.gpu_droplets.account.keys.list(
page=1,
per_page=1,
)
@@ -345,7 +345,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.account.keys.with_raw_response.list()
+ response = await async_client.gpu_droplets.account.keys.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -355,7 +355,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.account.keys.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.account.keys.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -367,7 +367,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- key = await async_client.account.keys.delete(
+ key = await async_client.gpu_droplets.account.keys.delete(
512189,
)
assert key is None
@@ -375,7 +375,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.account.keys.with_raw_response.delete(
+ response = await async_client.gpu_droplets.account.keys.with_raw_response.delete(
512189,
)
@@ -387,7 +387,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.account.keys.with_streaming_response.delete(
+ async with async_client.gpu_droplets.account.keys.with_streaming_response.delete(
512189,
) as response:
assert not response.is_closed
diff --git a/tests/api_resources/firewalls/__init__.py b/tests/api_resources/gpu_droplets/firewalls/__init__.py
similarity index 100%
rename from tests/api_resources/firewalls/__init__.py
rename to tests/api_resources/gpu_droplets/firewalls/__init__.py
diff --git a/tests/api_resources/firewalls/test_droplets.py b/tests/api_resources/gpu_droplets/firewalls/test_droplets.py
similarity index 83%
rename from tests/api_resources/firewalls/test_droplets.py
rename to tests/api_resources/gpu_droplets/firewalls/test_droplets.py
index 3fb117f9..819a5e6e 100644
--- a/tests/api_resources/firewalls/test_droplets.py
+++ b/tests/api_resources/gpu_droplets/firewalls/test_droplets.py
@@ -18,7 +18,7 @@ class TestDroplets:
@pytest.mark.skip()
@parametrize
def test_method_add(self, client: GradientAI) -> None:
- droplet = client.firewalls.droplets.add(
+ droplet = client.gpu_droplets.firewalls.droplets.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
)
@@ -27,7 +27,7 @@ def test_method_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_add(self, client: GradientAI) -> None:
- response = client.firewalls.droplets.with_raw_response.add(
+ response = client.gpu_droplets.firewalls.droplets.with_raw_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
)
@@ -40,7 +40,7 @@ def test_raw_response_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_add(self, client: GradientAI) -> None:
- with client.firewalls.droplets.with_streaming_response.add(
+ with client.gpu_droplets.firewalls.droplets.with_streaming_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
) as response:
@@ -56,7 +56,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None:
@parametrize
def test_path_params_add(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.droplets.with_raw_response.add(
+ client.gpu_droplets.firewalls.droplets.with_raw_response.add(
firewall_id="",
droplet_ids=[49696269],
)
@@ -64,7 +64,7 @@ def test_path_params_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_remove(self, client: GradientAI) -> None:
- droplet = client.firewalls.droplets.remove(
+ droplet = client.gpu_droplets.firewalls.droplets.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
)
@@ -73,7 +73,7 @@ def test_method_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_remove(self, client: GradientAI) -> None:
- response = client.firewalls.droplets.with_raw_response.remove(
+ response = client.gpu_droplets.firewalls.droplets.with_raw_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
)
@@ -86,7 +86,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_remove(self, client: GradientAI) -> None:
- with client.firewalls.droplets.with_streaming_response.remove(
+ with client.gpu_droplets.firewalls.droplets.with_streaming_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
) as response:
@@ -102,7 +102,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None:
@parametrize
def test_path_params_remove(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.droplets.with_raw_response.remove(
+ client.gpu_droplets.firewalls.droplets.with_raw_response.remove(
firewall_id="",
droplet_ids=[49696269],
)
@@ -116,7 +116,7 @@ class TestAsyncDroplets:
@pytest.mark.skip()
@parametrize
async def test_method_add(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.firewalls.droplets.add(
+ droplet = await async_client.gpu_droplets.firewalls.droplets.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
)
@@ -125,7 +125,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.droplets.with_raw_response.add(
+ response = await async_client.gpu_droplets.firewalls.droplets.with_raw_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
)
@@ -138,7 +138,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.droplets.with_streaming_response.add(
+ async with async_client.gpu_droplets.firewalls.droplets.with_streaming_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
) as response:
@@ -154,7 +154,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No
@parametrize
async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.droplets.with_raw_response.add(
+ await async_client.gpu_droplets.firewalls.droplets.with_raw_response.add(
firewall_id="",
droplet_ids=[49696269],
)
@@ -162,7 +162,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.firewalls.droplets.remove(
+ droplet = await async_client.gpu_droplets.firewalls.droplets.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
)
@@ -171,7 +171,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.droplets.with_raw_response.remove(
+ response = await async_client.gpu_droplets.firewalls.droplets.with_raw_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
)
@@ -184,7 +184,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.droplets.with_streaming_response.remove(
+ async with async_client.gpu_droplets.firewalls.droplets.with_streaming_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
droplet_ids=[49696269],
) as response:
@@ -200,7 +200,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.droplets.with_raw_response.remove(
+ await async_client.gpu_droplets.firewalls.droplets.with_raw_response.remove(
firewall_id="",
droplet_ids=[49696269],
)
diff --git a/tests/api_resources/firewalls/test_rules.py b/tests/api_resources/gpu_droplets/firewalls/test_rules.py
similarity index 88%
rename from tests/api_resources/firewalls/test_rules.py
rename to tests/api_resources/gpu_droplets/firewalls/test_rules.py
index 6f82e253..b2eab40c 100644
--- a/tests/api_resources/firewalls/test_rules.py
+++ b/tests/api_resources/gpu_droplets/firewalls/test_rules.py
@@ -18,7 +18,7 @@ class TestRules:
@pytest.mark.skip()
@parametrize
def test_method_add(self, client: GradientAI) -> None:
- rule = client.firewalls.rules.add(
+ rule = client.gpu_droplets.firewalls.rules.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
)
assert rule is None
@@ -26,7 +26,7 @@ def test_method_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_add_with_all_params(self, client: GradientAI) -> None:
- rule = client.firewalls.rules.add(
+ rule = client.gpu_droplets.firewalls.rules.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
inbound_rules=[
{
@@ -60,7 +60,7 @@ def test_method_add_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_add(self, client: GradientAI) -> None:
- response = client.firewalls.rules.with_raw_response.add(
+ response = client.gpu_droplets.firewalls.rules.with_raw_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
)
@@ -72,7 +72,7 @@ def test_raw_response_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_add(self, client: GradientAI) -> None:
- with client.firewalls.rules.with_streaming_response.add(
+ with client.gpu_droplets.firewalls.rules.with_streaming_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
) as response:
assert not response.is_closed
@@ -87,14 +87,14 @@ def test_streaming_response_add(self, client: GradientAI) -> None:
@parametrize
def test_path_params_add(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.rules.with_raw_response.add(
+ client.gpu_droplets.firewalls.rules.with_raw_response.add(
firewall_id="",
)
@pytest.mark.skip()
@parametrize
def test_method_remove(self, client: GradientAI) -> None:
- rule = client.firewalls.rules.remove(
+ rule = client.gpu_droplets.firewalls.rules.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
)
assert rule is None
@@ -102,7 +102,7 @@ def test_method_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_remove_with_all_params(self, client: GradientAI) -> None:
- rule = client.firewalls.rules.remove(
+ rule = client.gpu_droplets.firewalls.rules.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
inbound_rules=[
{
@@ -136,7 +136,7 @@ def test_method_remove_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_remove(self, client: GradientAI) -> None:
- response = client.firewalls.rules.with_raw_response.remove(
+ response = client.gpu_droplets.firewalls.rules.with_raw_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
)
@@ -148,7 +148,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_remove(self, client: GradientAI) -> None:
- with client.firewalls.rules.with_streaming_response.remove(
+ with client.gpu_droplets.firewalls.rules.with_streaming_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
) as response:
assert not response.is_closed
@@ -163,7 +163,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None:
@parametrize
def test_path_params_remove(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.rules.with_raw_response.remove(
+ client.gpu_droplets.firewalls.rules.with_raw_response.remove(
firewall_id="",
)
@@ -176,7 +176,7 @@ class TestAsyncRules:
@pytest.mark.skip()
@parametrize
async def test_method_add(self, async_client: AsyncGradientAI) -> None:
- rule = await async_client.firewalls.rules.add(
+ rule = await async_client.gpu_droplets.firewalls.rules.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
)
assert rule is None
@@ -184,7 +184,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_add_with_all_params(self, async_client: AsyncGradientAI) -> None:
- rule = await async_client.firewalls.rules.add(
+ rule = await async_client.gpu_droplets.firewalls.rules.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
inbound_rules=[
{
@@ -218,7 +218,7 @@ async def test_method_add_with_all_params(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.rules.with_raw_response.add(
+ response = await async_client.gpu_droplets.firewalls.rules.with_raw_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
)
@@ -230,7 +230,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.rules.with_streaming_response.add(
+ async with async_client.gpu_droplets.firewalls.rules.with_streaming_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
) as response:
assert not response.is_closed
@@ -245,14 +245,14 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No
@parametrize
async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.rules.with_raw_response.add(
+ await async_client.gpu_droplets.firewalls.rules.with_raw_response.add(
firewall_id="",
)
@pytest.mark.skip()
@parametrize
async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
- rule = await async_client.firewalls.rules.remove(
+ rule = await async_client.gpu_droplets.firewalls.rules.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
)
assert rule is None
@@ -260,7 +260,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_remove_with_all_params(self, async_client: AsyncGradientAI) -> None:
- rule = await async_client.firewalls.rules.remove(
+ rule = await async_client.gpu_droplets.firewalls.rules.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
inbound_rules=[
{
@@ -294,7 +294,7 @@ async def test_method_remove_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.rules.with_raw_response.remove(
+ response = await async_client.gpu_droplets.firewalls.rules.with_raw_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
)
@@ -306,7 +306,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.rules.with_streaming_response.remove(
+ async with async_client.gpu_droplets.firewalls.rules.with_streaming_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
) as response:
assert not response.is_closed
@@ -321,6 +321,6 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.rules.with_raw_response.remove(
+ await async_client.gpu_droplets.firewalls.rules.with_raw_response.remove(
firewall_id="",
)
diff --git a/tests/api_resources/firewalls/test_tags.py b/tests/api_resources/gpu_droplets/firewalls/test_tags.py
similarity index 83%
rename from tests/api_resources/firewalls/test_tags.py
rename to tests/api_resources/gpu_droplets/firewalls/test_tags.py
index 04aa0975..25c9362b 100644
--- a/tests/api_resources/firewalls/test_tags.py
+++ b/tests/api_resources/gpu_droplets/firewalls/test_tags.py
@@ -18,7 +18,7 @@ class TestTags:
@pytest.mark.skip()
@parametrize
def test_method_add(self, client: GradientAI) -> None:
- tag = client.firewalls.tags.add(
+ tag = client.gpu_droplets.firewalls.tags.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
)
@@ -27,7 +27,7 @@ def test_method_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_add(self, client: GradientAI) -> None:
- response = client.firewalls.tags.with_raw_response.add(
+ response = client.gpu_droplets.firewalls.tags.with_raw_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
)
@@ -40,7 +40,7 @@ def test_raw_response_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_add(self, client: GradientAI) -> None:
- with client.firewalls.tags.with_streaming_response.add(
+ with client.gpu_droplets.firewalls.tags.with_streaming_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
) as response:
@@ -56,7 +56,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None:
@parametrize
def test_path_params_add(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.tags.with_raw_response.add(
+ client.gpu_droplets.firewalls.tags.with_raw_response.add(
firewall_id="",
tags=["frontend"],
)
@@ -64,7 +64,7 @@ def test_path_params_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_remove(self, client: GradientAI) -> None:
- tag = client.firewalls.tags.remove(
+ tag = client.gpu_droplets.firewalls.tags.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
)
@@ -73,7 +73,7 @@ def test_method_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_remove(self, client: GradientAI) -> None:
- response = client.firewalls.tags.with_raw_response.remove(
+ response = client.gpu_droplets.firewalls.tags.with_raw_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
)
@@ -86,7 +86,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_remove(self, client: GradientAI) -> None:
- with client.firewalls.tags.with_streaming_response.remove(
+ with client.gpu_droplets.firewalls.tags.with_streaming_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
) as response:
@@ -102,7 +102,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None:
@parametrize
def test_path_params_remove(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.tags.with_raw_response.remove(
+ client.gpu_droplets.firewalls.tags.with_raw_response.remove(
firewall_id="",
tags=["frontend"],
)
@@ -116,7 +116,7 @@ class TestAsyncTags:
@pytest.mark.skip()
@parametrize
async def test_method_add(self, async_client: AsyncGradientAI) -> None:
- tag = await async_client.firewalls.tags.add(
+ tag = await async_client.gpu_droplets.firewalls.tags.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
)
@@ -125,7 +125,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.tags.with_raw_response.add(
+ response = await async_client.gpu_droplets.firewalls.tags.with_raw_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
)
@@ -138,7 +138,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.tags.with_streaming_response.add(
+ async with async_client.gpu_droplets.firewalls.tags.with_streaming_response.add(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
) as response:
@@ -154,7 +154,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No
@parametrize
async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.tags.with_raw_response.add(
+ await async_client.gpu_droplets.firewalls.tags.with_raw_response.add(
firewall_id="",
tags=["frontend"],
)
@@ -162,7 +162,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
- tag = await async_client.firewalls.tags.remove(
+ tag = await async_client.gpu_droplets.firewalls.tags.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
)
@@ -171,7 +171,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.tags.with_raw_response.remove(
+ response = await async_client.gpu_droplets.firewalls.tags.with_raw_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
)
@@ -184,7 +184,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.tags.with_streaming_response.remove(
+ async with async_client.gpu_droplets.firewalls.tags.with_streaming_response.remove(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
tags=["frontend"],
) as response:
@@ -200,7 +200,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.tags.with_raw_response.remove(
+ await async_client.gpu_droplets.firewalls.tags.with_raw_response.remove(
firewall_id="",
tags=["frontend"],
)
diff --git a/tests/api_resources/floating_ips/__init__.py b/tests/api_resources/gpu_droplets/floating_ips/__init__.py
similarity index 100%
rename from tests/api_resources/floating_ips/__init__.py
rename to tests/api_resources/gpu_droplets/floating_ips/__init__.py
diff --git a/tests/api_resources/floating_ips/test_actions.py b/tests/api_resources/gpu_droplets/floating_ips/test_actions.py
similarity index 82%
rename from tests/api_resources/floating_ips/test_actions.py
rename to tests/api_resources/gpu_droplets/floating_ips/test_actions.py
index e2e3c45e..ad26db8a 100644
--- a/tests/api_resources/floating_ips/test_actions.py
+++ b/tests/api_resources/gpu_droplets/floating_ips/test_actions.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.floating_ips import (
+from do_gradientai.types.gpu_droplets.floating_ips import (
ActionListResponse,
ActionCreateResponse,
ActionRetrieveResponse,
@@ -24,7 +24,7 @@ class TestActions:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_1(self, client: GradientAI) -> None:
- action = client.floating_ips.actions.create(
+ action = client.gpu_droplets.floating_ips.actions.create(
floating_ip="45.55.96.47",
type="assign",
)
@@ -33,7 +33,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
- response = client.floating_ips.actions.with_raw_response.create(
+ response = client.gpu_droplets.floating_ips.actions.with_raw_response.create(
floating_ip="45.55.96.47",
type="assign",
)
@@ -46,7 +46,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
- with client.floating_ips.actions.with_streaming_response.create(
+ with client.gpu_droplets.floating_ips.actions.with_streaming_response.create(
floating_ip="45.55.96.47",
type="assign",
) as response:
@@ -62,7 +62,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
@parametrize
def test_path_params_create_overload_1(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- client.floating_ips.actions.with_raw_response.create(
+ client.gpu_droplets.floating_ips.actions.with_raw_response.create(
floating_ip="",
type="assign",
)
@@ -70,7 +70,7 @@ def test_path_params_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_2(self, client: GradientAI) -> None:
- action = client.floating_ips.actions.create(
+ action = client.gpu_droplets.floating_ips.actions.create(
floating_ip="45.55.96.47",
droplet_id=758604968,
type="assign",
@@ -80,7 +80,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
- response = client.floating_ips.actions.with_raw_response.create(
+ response = client.gpu_droplets.floating_ips.actions.with_raw_response.create(
floating_ip="45.55.96.47",
droplet_id=758604968,
type="assign",
@@ -94,7 +94,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
- with client.floating_ips.actions.with_streaming_response.create(
+ with client.gpu_droplets.floating_ips.actions.with_streaming_response.create(
floating_ip="45.55.96.47",
droplet_id=758604968,
type="assign",
@@ -111,7 +111,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
@parametrize
def test_path_params_create_overload_2(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- client.floating_ips.actions.with_raw_response.create(
+ client.gpu_droplets.floating_ips.actions.with_raw_response.create(
floating_ip="",
droplet_id=758604968,
type="assign",
@@ -120,7 +120,7 @@ def test_path_params_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- action = client.floating_ips.actions.retrieve(
+ action = client.gpu_droplets.floating_ips.actions.retrieve(
action_id=36804636,
floating_ip="45.55.96.47",
)
@@ -129,7 +129,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.floating_ips.actions.with_raw_response.retrieve(
+ response = client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve(
action_id=36804636,
floating_ip="45.55.96.47",
)
@@ -142,7 +142,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.floating_ips.actions.with_streaming_response.retrieve(
+ with client.gpu_droplets.floating_ips.actions.with_streaming_response.retrieve(
action_id=36804636,
floating_ip="45.55.96.47",
) as response:
@@ -158,7 +158,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- client.floating_ips.actions.with_raw_response.retrieve(
+ client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve(
action_id=36804636,
floating_ip="",
)
@@ -166,7 +166,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- action = client.floating_ips.actions.list(
+ action = client.gpu_droplets.floating_ips.actions.list(
"192.168.1.1",
)
assert_matches_type(ActionListResponse, action, path=["response"])
@@ -174,7 +174,7 @@ def test_method_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.floating_ips.actions.with_raw_response.list(
+ response = client.gpu_droplets.floating_ips.actions.with_raw_response.list(
"192.168.1.1",
)
@@ -186,7 +186,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.floating_ips.actions.with_streaming_response.list(
+ with client.gpu_droplets.floating_ips.actions.with_streaming_response.list(
"192.168.1.1",
) as response:
assert not response.is_closed
@@ -201,7 +201,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@parametrize
def test_path_params_list(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- client.floating_ips.actions.with_raw_response.list(
+ client.gpu_droplets.floating_ips.actions.with_raw_response.list(
"",
)
@@ -214,7 +214,7 @@ class TestAsyncActions:
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.floating_ips.actions.create(
+ action = await async_client.gpu_droplets.floating_ips.actions.create(
floating_ip="45.55.96.47",
type="assign",
)
@@ -223,7 +223,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.actions.with_raw_response.create(
+ response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create(
floating_ip="45.55.96.47",
type="assign",
)
@@ -236,7 +236,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.actions.with_streaming_response.create(
+ async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.create(
floating_ip="45.55.96.47",
type="assign",
) as response:
@@ -252,7 +252,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
@parametrize
async def test_path_params_create_overload_1(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- await async_client.floating_ips.actions.with_raw_response.create(
+ await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create(
floating_ip="",
type="assign",
)
@@ -260,7 +260,7 @@ async def test_path_params_create_overload_1(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.floating_ips.actions.create(
+ action = await async_client.gpu_droplets.floating_ips.actions.create(
floating_ip="45.55.96.47",
droplet_id=758604968,
type="assign",
@@ -270,7 +270,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.actions.with_raw_response.create(
+ response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create(
floating_ip="45.55.96.47",
droplet_id=758604968,
type="assign",
@@ -284,7 +284,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.actions.with_streaming_response.create(
+ async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.create(
floating_ip="45.55.96.47",
droplet_id=758604968,
type="assign",
@@ -301,7 +301,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
@parametrize
async def test_path_params_create_overload_2(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- await async_client.floating_ips.actions.with_raw_response.create(
+ await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create(
floating_ip="",
droplet_id=758604968,
type="assign",
@@ -310,7 +310,7 @@ async def test_path_params_create_overload_2(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.floating_ips.actions.retrieve(
+ action = await async_client.gpu_droplets.floating_ips.actions.retrieve(
action_id=36804636,
floating_ip="45.55.96.47",
)
@@ -319,7 +319,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.actions.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve(
action_id=36804636,
floating_ip="45.55.96.47",
)
@@ -332,7 +332,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.actions.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.retrieve(
action_id=36804636,
floating_ip="45.55.96.47",
) as response:
@@ -348,7 +348,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- await async_client.floating_ips.actions.with_raw_response.retrieve(
+ await async_client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve(
action_id=36804636,
floating_ip="",
)
@@ -356,7 +356,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.floating_ips.actions.list(
+ action = await async_client.gpu_droplets.floating_ips.actions.list(
"192.168.1.1",
)
assert_matches_type(ActionListResponse, action, path=["response"])
@@ -364,7 +364,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.actions.with_raw_response.list(
+ response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.list(
"192.168.1.1",
)
@@ -376,7 +376,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.actions.with_streaming_response.list(
+ async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.list(
"192.168.1.1",
) as response:
assert not response.is_closed
@@ -391,6 +391,6 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@parametrize
async def test_path_params_list(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- await async_client.floating_ips.actions.with_raw_response.list(
+ await async_client.gpu_droplets.floating_ips.actions.with_raw_response.list(
"",
)
diff --git a/tests/api_resources/images/__init__.py b/tests/api_resources/gpu_droplets/images/__init__.py
similarity index 100%
rename from tests/api_resources/images/__init__.py
rename to tests/api_resources/gpu_droplets/images/__init__.py
diff --git a/tests/api_resources/images/test_actions.py b/tests/api_resources/gpu_droplets/images/test_actions.py
similarity index 83%
rename from tests/api_resources/images/test_actions.py
rename to tests/api_resources/gpu_droplets/images/test_actions.py
index 93603a0a..35861bcb 100644
--- a/tests/api_resources/images/test_actions.py
+++ b/tests/api_resources/gpu_droplets/images/test_actions.py
@@ -9,8 +9,8 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.images import ActionListResponse
from do_gradientai.types.shared import Action
+from do_gradientai.types.gpu_droplets.images import ActionListResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -21,7 +21,7 @@ class TestActions:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_1(self, client: GradientAI) -> None:
- action = client.images.actions.create(
+ action = client.gpu_droplets.images.actions.create(
image_id=62137902,
type="convert",
)
@@ -30,7 +30,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
- response = client.images.actions.with_raw_response.create(
+ response = client.gpu_droplets.images.actions.with_raw_response.create(
image_id=62137902,
type="convert",
)
@@ -43,7 +43,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
- with client.images.actions.with_streaming_response.create(
+ with client.gpu_droplets.images.actions.with_streaming_response.create(
image_id=62137902,
type="convert",
) as response:
@@ -58,7 +58,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_2(self, client: GradientAI) -> None:
- action = client.images.actions.create(
+ action = client.gpu_droplets.images.actions.create(
image_id=62137902,
region="nyc3",
type="convert",
@@ -68,7 +68,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
- response = client.images.actions.with_raw_response.create(
+ response = client.gpu_droplets.images.actions.with_raw_response.create(
image_id=62137902,
region="nyc3",
type="convert",
@@ -82,7 +82,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
- with client.images.actions.with_streaming_response.create(
+ with client.gpu_droplets.images.actions.with_streaming_response.create(
image_id=62137902,
region="nyc3",
type="convert",
@@ -98,7 +98,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- action = client.images.actions.retrieve(
+ action = client.gpu_droplets.images.actions.retrieve(
action_id=36804636,
image_id=62137902,
)
@@ -107,7 +107,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.images.actions.with_raw_response.retrieve(
+ response = client.gpu_droplets.images.actions.with_raw_response.retrieve(
action_id=36804636,
image_id=62137902,
)
@@ -120,7 +120,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.images.actions.with_streaming_response.retrieve(
+ with client.gpu_droplets.images.actions.with_streaming_response.retrieve(
action_id=36804636,
image_id=62137902,
) as response:
@@ -135,7 +135,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- action = client.images.actions.list(
+ action = client.gpu_droplets.images.actions.list(
0,
)
assert_matches_type(ActionListResponse, action, path=["response"])
@@ -143,7 +143,7 @@ def test_method_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.images.actions.with_raw_response.list(
+ response = client.gpu_droplets.images.actions.with_raw_response.list(
0,
)
@@ -155,7 +155,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.images.actions.with_streaming_response.list(
+ with client.gpu_droplets.images.actions.with_streaming_response.list(
0,
) as response:
assert not response.is_closed
@@ -175,7 +175,7 @@ class TestAsyncActions:
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.images.actions.create(
+ action = await async_client.gpu_droplets.images.actions.create(
image_id=62137902,
type="convert",
)
@@ -184,7 +184,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.actions.with_raw_response.create(
+ response = await async_client.gpu_droplets.images.actions.with_raw_response.create(
image_id=62137902,
type="convert",
)
@@ -197,7 +197,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.actions.with_streaming_response.create(
+ async with async_client.gpu_droplets.images.actions.with_streaming_response.create(
image_id=62137902,
type="convert",
) as response:
@@ -212,7 +212,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.images.actions.create(
+ action = await async_client.gpu_droplets.images.actions.create(
image_id=62137902,
region="nyc3",
type="convert",
@@ -222,7 +222,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.actions.with_raw_response.create(
+ response = await async_client.gpu_droplets.images.actions.with_raw_response.create(
image_id=62137902,
region="nyc3",
type="convert",
@@ -236,7 +236,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.actions.with_streaming_response.create(
+ async with async_client.gpu_droplets.images.actions.with_streaming_response.create(
image_id=62137902,
region="nyc3",
type="convert",
@@ -252,7 +252,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.images.actions.retrieve(
+ action = await async_client.gpu_droplets.images.actions.retrieve(
action_id=36804636,
image_id=62137902,
)
@@ -261,7 +261,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.actions.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.images.actions.with_raw_response.retrieve(
action_id=36804636,
image_id=62137902,
)
@@ -274,7 +274,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.actions.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.images.actions.with_streaming_response.retrieve(
action_id=36804636,
image_id=62137902,
) as response:
@@ -289,7 +289,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.images.actions.list(
+ action = await async_client.gpu_droplets.images.actions.list(
0,
)
assert_matches_type(ActionListResponse, action, path=["response"])
@@ -297,7 +297,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.actions.with_raw_response.list(
+ response = await async_client.gpu_droplets.images.actions.with_raw_response.list(
0,
)
@@ -309,7 +309,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.actions.with_streaming_response.list(
+ async with async_client.gpu_droplets.images.actions.with_streaming_response.list(
0,
) as response:
assert not response.is_closed
diff --git a/tests/api_resources/load_balancers/__init__.py b/tests/api_resources/gpu_droplets/load_balancers/__init__.py
similarity index 100%
rename from tests/api_resources/load_balancers/__init__.py
rename to tests/api_resources/gpu_droplets/load_balancers/__init__.py
diff --git a/tests/api_resources/load_balancers/test_droplets.py b/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py
similarity index 82%
rename from tests/api_resources/load_balancers/test_droplets.py
rename to tests/api_resources/gpu_droplets/load_balancers/test_droplets.py
index cb439411..f22213e2 100644
--- a/tests/api_resources/load_balancers/test_droplets.py
+++ b/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py
@@ -18,7 +18,7 @@ class TestDroplets:
@pytest.mark.skip()
@parametrize
def test_method_add(self, client: GradientAI) -> None:
- droplet = client.load_balancers.droplets.add(
+ droplet = client.gpu_droplets.load_balancers.droplets.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
)
@@ -27,7 +27,7 @@ def test_method_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_add(self, client: GradientAI) -> None:
- response = client.load_balancers.droplets.with_raw_response.add(
+ response = client.gpu_droplets.load_balancers.droplets.with_raw_response.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
)
@@ -40,7 +40,7 @@ def test_raw_response_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_add(self, client: GradientAI) -> None:
- with client.load_balancers.droplets.with_streaming_response.add(
+ with client.gpu_droplets.load_balancers.droplets.with_streaming_response.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
) as response:
@@ -56,7 +56,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None:
@parametrize
def test_path_params_add(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.droplets.with_raw_response.add(
+ client.gpu_droplets.load_balancers.droplets.with_raw_response.add(
lb_id="",
droplet_ids=[3164444, 3164445],
)
@@ -64,7 +64,7 @@ def test_path_params_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_remove(self, client: GradientAI) -> None:
- droplet = client.load_balancers.droplets.remove(
+ droplet = client.gpu_droplets.load_balancers.droplets.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
)
@@ -73,7 +73,7 @@ def test_method_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_remove(self, client: GradientAI) -> None:
- response = client.load_balancers.droplets.with_raw_response.remove(
+ response = client.gpu_droplets.load_balancers.droplets.with_raw_response.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
)
@@ -86,7 +86,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_remove(self, client: GradientAI) -> None:
- with client.load_balancers.droplets.with_streaming_response.remove(
+ with client.gpu_droplets.load_balancers.droplets.with_streaming_response.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
) as response:
@@ -102,7 +102,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None:
@parametrize
def test_path_params_remove(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.droplets.with_raw_response.remove(
+ client.gpu_droplets.load_balancers.droplets.with_raw_response.remove(
lb_id="",
droplet_ids=[3164444, 3164445],
)
@@ -116,7 +116,7 @@ class TestAsyncDroplets:
@pytest.mark.skip()
@parametrize
async def test_method_add(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.load_balancers.droplets.add(
+ droplet = await async_client.gpu_droplets.load_balancers.droplets.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
)
@@ -125,7 +125,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.droplets.with_raw_response.add(
+ response = await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
)
@@ -138,7 +138,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.droplets.with_streaming_response.add(
+ async with async_client.gpu_droplets.load_balancers.droplets.with_streaming_response.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
) as response:
@@ -154,7 +154,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No
@parametrize
async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.droplets.with_raw_response.add(
+ await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.add(
lb_id="",
droplet_ids=[3164444, 3164445],
)
@@ -162,7 +162,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.load_balancers.droplets.remove(
+ droplet = await async_client.gpu_droplets.load_balancers.droplets.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
)
@@ -171,7 +171,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.droplets.with_raw_response.remove(
+ response = await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
)
@@ -184,7 +184,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.droplets.with_streaming_response.remove(
+ async with async_client.gpu_droplets.load_balancers.droplets.with_streaming_response.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
droplet_ids=[3164444, 3164445],
) as response:
@@ -200,7 +200,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.droplets.with_raw_response.remove(
+ await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.remove(
lb_id="",
droplet_ids=[3164444, 3164445],
)
diff --git a/tests/api_resources/load_balancers/test_forwarding_rules.py b/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py
similarity index 86%
rename from tests/api_resources/load_balancers/test_forwarding_rules.py
rename to tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py
index 2f09fa8a..d53bd0db 100644
--- a/tests/api_resources/load_balancers/test_forwarding_rules.py
+++ b/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py
@@ -18,7 +18,7 @@ class TestForwardingRules:
@pytest.mark.skip()
@parametrize
def test_method_add(self, client: GradientAI) -> None:
- forwarding_rule = client.load_balancers.forwarding_rules.add(
+ forwarding_rule = client.gpu_droplets.load_balancers.forwarding_rules.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -34,7 +34,7 @@ def test_method_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_add(self, client: GradientAI) -> None:
- response = client.load_balancers.forwarding_rules.with_raw_response.add(
+ response = client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -54,7 +54,7 @@ def test_raw_response_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_add(self, client: GradientAI) -> None:
- with client.load_balancers.forwarding_rules.with_streaming_response.add(
+ with client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -77,7 +77,7 @@ def test_streaming_response_add(self, client: GradientAI) -> None:
@parametrize
def test_path_params_add(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.forwarding_rules.with_raw_response.add(
+ client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add(
lb_id="",
forwarding_rules=[
{
@@ -92,7 +92,7 @@ def test_path_params_add(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_remove(self, client: GradientAI) -> None:
- forwarding_rule = client.load_balancers.forwarding_rules.remove(
+ forwarding_rule = client.gpu_droplets.load_balancers.forwarding_rules.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -108,7 +108,7 @@ def test_method_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_remove(self, client: GradientAI) -> None:
- response = client.load_balancers.forwarding_rules.with_raw_response.remove(
+ response = client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -128,7 +128,7 @@ def test_raw_response_remove(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_remove(self, client: GradientAI) -> None:
- with client.load_balancers.forwarding_rules.with_streaming_response.remove(
+ with client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -151,7 +151,7 @@ def test_streaming_response_remove(self, client: GradientAI) -> None:
@parametrize
def test_path_params_remove(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.forwarding_rules.with_raw_response.remove(
+ client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove(
lb_id="",
forwarding_rules=[
{
@@ -172,7 +172,7 @@ class TestAsyncForwardingRules:
@pytest.mark.skip()
@parametrize
async def test_method_add(self, async_client: AsyncGradientAI) -> None:
- forwarding_rule = await async_client.load_balancers.forwarding_rules.add(
+ forwarding_rule = await async_client.gpu_droplets.load_balancers.forwarding_rules.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -188,7 +188,7 @@ async def test_method_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.forwarding_rules.with_raw_response.add(
+ response = await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -208,7 +208,7 @@ async def test_raw_response_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.forwarding_rules.with_streaming_response.add(
+ async with async_client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.add(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -231,7 +231,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradientAI) -> No
@parametrize
async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.forwarding_rules.with_raw_response.add(
+ await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add(
lb_id="",
forwarding_rules=[
{
@@ -246,7 +246,7 @@ async def test_path_params_add(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
- forwarding_rule = await async_client.load_balancers.forwarding_rules.remove(
+ forwarding_rule = await async_client.gpu_droplets.load_balancers.forwarding_rules.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -262,7 +262,7 @@ async def test_method_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.forwarding_rules.with_raw_response.remove(
+ response = await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -282,7 +282,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_remove(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.forwarding_rules.with_streaming_response.remove(
+ async with async_client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.remove(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -305,7 +305,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_remove(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.forwarding_rules.with_raw_response.remove(
+ await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove(
lb_id="",
forwarding_rules=[
{
diff --git a/tests/api_resources/droplets/test_actions.py b/tests/api_resources/gpu_droplets/test_actions.py
similarity index 84%
rename from tests/api_resources/droplets/test_actions.py
rename to tests/api_resources/gpu_droplets/test_actions.py
index e5696d6c..74e45b44 100644
--- a/tests/api_resources/droplets/test_actions.py
+++ b/tests/api_resources/gpu_droplets/test_actions.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.droplets import (
+from do_gradientai.types.gpu_droplets import (
ActionListResponse,
ActionInitiateResponse,
ActionRetrieveResponse,
@@ -25,7 +25,7 @@ class TestActions:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- action = client.droplets.actions.retrieve(
+ action = client.gpu_droplets.actions.retrieve(
action_id=36804636,
droplet_id=3164444,
)
@@ -34,7 +34,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.retrieve(
+ response = client.gpu_droplets.actions.with_raw_response.retrieve(
action_id=36804636,
droplet_id=3164444,
)
@@ -47,7 +47,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.retrieve(
+ with client.gpu_droplets.actions.with_streaming_response.retrieve(
action_id=36804636,
droplet_id=3164444,
) as response:
@@ -62,7 +62,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- action = client.droplets.actions.list(
+ action = client.gpu_droplets.actions.list(
droplet_id=3164444,
)
assert_matches_type(ActionListResponse, action, path=["response"])
@@ -70,7 +70,7 @@ def test_method_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- action = client.droplets.actions.list(
+ action = client.gpu_droplets.actions.list(
droplet_id=3164444,
page=1,
per_page=1,
@@ -80,7 +80,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.list(
+ response = client.gpu_droplets.actions.with_raw_response.list(
droplet_id=3164444,
)
@@ -92,7 +92,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.list(
+ with client.gpu_droplets.actions.with_streaming_response.list(
droplet_id=3164444,
) as response:
assert not response.is_closed
@@ -106,7 +106,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_bulk_initiate_overload_1(self, client: GradientAI) -> None:
- action = client.droplets.actions.bulk_initiate(
+ action = client.gpu_droplets.actions.bulk_initiate(
type="reboot",
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
@@ -114,7 +114,7 @@ def test_method_bulk_initiate_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_bulk_initiate_with_all_params_overload_1(self, client: GradientAI) -> None:
- action = client.droplets.actions.bulk_initiate(
+ action = client.gpu_droplets.actions.bulk_initiate(
type="reboot",
tag_name="tag_name",
)
@@ -123,7 +123,7 @@ def test_method_bulk_initiate_with_all_params_overload_1(self, client: GradientA
@pytest.mark.skip()
@parametrize
def test_raw_response_bulk_initiate_overload_1(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.bulk_initiate(
+ response = client.gpu_droplets.actions.with_raw_response.bulk_initiate(
type="reboot",
)
@@ -135,7 +135,7 @@ def test_raw_response_bulk_initiate_overload_1(self, client: GradientAI) -> None
@pytest.mark.skip()
@parametrize
def test_streaming_response_bulk_initiate_overload_1(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.bulk_initiate(
+ with client.gpu_droplets.actions.with_streaming_response.bulk_initiate(
type="reboot",
) as response:
assert not response.is_closed
@@ -149,7 +149,7 @@ def test_streaming_response_bulk_initiate_overload_1(self, client: GradientAI) -
@pytest.mark.skip()
@parametrize
def test_method_bulk_initiate_overload_2(self, client: GradientAI) -> None:
- action = client.droplets.actions.bulk_initiate(
+ action = client.gpu_droplets.actions.bulk_initiate(
type="reboot",
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
@@ -157,7 +157,7 @@ def test_method_bulk_initiate_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_bulk_initiate_with_all_params_overload_2(self, client: GradientAI) -> None:
- action = client.droplets.actions.bulk_initiate(
+ action = client.gpu_droplets.actions.bulk_initiate(
type="reboot",
tag_name="tag_name",
name="Nifty New Snapshot",
@@ -167,7 +167,7 @@ def test_method_bulk_initiate_with_all_params_overload_2(self, client: GradientA
@pytest.mark.skip()
@parametrize
def test_raw_response_bulk_initiate_overload_2(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.bulk_initiate(
+ response = client.gpu_droplets.actions.with_raw_response.bulk_initiate(
type="reboot",
)
@@ -179,7 +179,7 @@ def test_raw_response_bulk_initiate_overload_2(self, client: GradientAI) -> None
@pytest.mark.skip()
@parametrize
def test_streaming_response_bulk_initiate_overload_2(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.bulk_initiate(
+ with client.gpu_droplets.actions.with_streaming_response.bulk_initiate(
type="reboot",
) as response:
assert not response.is_closed
@@ -193,7 +193,7 @@ def test_streaming_response_bulk_initiate_overload_2(self, client: GradientAI) -
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_1(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -202,7 +202,7 @@ def test_method_initiate_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_1(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -215,7 +215,7 @@ def test_raw_response_initiate_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_1(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -230,7 +230,7 @@ def test_streaming_response_initiate_overload_1(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_2(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="enable_backups",
)
@@ -239,7 +239,7 @@ def test_method_initiate_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_with_all_params_overload_2(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="enable_backups",
backup_policy={
@@ -253,7 +253,7 @@ def test_method_initiate_with_all_params_overload_2(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_2(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="enable_backups",
)
@@ -266,7 +266,7 @@ def test_raw_response_initiate_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_2(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="enable_backups",
) as response:
@@ -281,7 +281,7 @@ def test_streaming_response_initiate_overload_2(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_3(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="enable_backups",
)
@@ -290,7 +290,7 @@ def test_method_initiate_overload_3(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_with_all_params_overload_3(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="enable_backups",
backup_policy={
@@ -304,7 +304,7 @@ def test_method_initiate_with_all_params_overload_3(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_3(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="enable_backups",
)
@@ -317,7 +317,7 @@ def test_raw_response_initiate_overload_3(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_3(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="enable_backups",
) as response:
@@ -332,7 +332,7 @@ def test_streaming_response_initiate_overload_3(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_4(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -341,7 +341,7 @@ def test_method_initiate_overload_4(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_with_all_params_overload_4(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
image=12389723,
@@ -351,7 +351,7 @@ def test_method_initiate_with_all_params_overload_4(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_4(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -364,7 +364,7 @@ def test_raw_response_initiate_overload_4(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_4(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -379,7 +379,7 @@ def test_streaming_response_initiate_overload_4(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_5(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -388,7 +388,7 @@ def test_method_initiate_overload_5(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_with_all_params_overload_5(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
disk=True,
@@ -399,7 +399,7 @@ def test_method_initiate_with_all_params_overload_5(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_5(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -412,7 +412,7 @@ def test_raw_response_initiate_overload_5(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_5(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -427,7 +427,7 @@ def test_streaming_response_initiate_overload_5(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_6(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -436,7 +436,7 @@ def test_method_initiate_overload_6(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_with_all_params_overload_6(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
image="ubuntu-20-04-x64",
@@ -446,7 +446,7 @@ def test_method_initiate_with_all_params_overload_6(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_6(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -459,7 +459,7 @@ def test_raw_response_initiate_overload_6(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_6(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -474,7 +474,7 @@ def test_streaming_response_initiate_overload_6(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_7(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -483,7 +483,7 @@ def test_method_initiate_overload_7(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_with_all_params_overload_7(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
name="nifty-new-name",
@@ -493,7 +493,7 @@ def test_method_initiate_with_all_params_overload_7(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_7(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -506,7 +506,7 @@ def test_raw_response_initiate_overload_7(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_7(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -521,7 +521,7 @@ def test_streaming_response_initiate_overload_7(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_8(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -530,7 +530,7 @@ def test_method_initiate_overload_8(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_with_all_params_overload_8(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
kernel=12389723,
@@ -540,7 +540,7 @@ def test_method_initiate_with_all_params_overload_8(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_8(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -553,7 +553,7 @@ def test_raw_response_initiate_overload_8(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_8(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -568,7 +568,7 @@ def test_streaming_response_initiate_overload_8(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_method_initiate_overload_9(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -577,7 +577,7 @@ def test_method_initiate_overload_9(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_with_all_params_overload_9(self, client: GradientAI) -> None:
- action = client.droplets.actions.initiate(
+ action = client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
name="Nifty New Snapshot",
@@ -587,7 +587,7 @@ def test_method_initiate_with_all_params_overload_9(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_overload_9(self, client: GradientAI) -> None:
- response = client.droplets.actions.with_raw_response.initiate(
+ response = client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -600,7 +600,7 @@ def test_raw_response_initiate_overload_9(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_overload_9(self, client: GradientAI) -> None:
- with client.droplets.actions.with_streaming_response.initiate(
+ with client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -621,7 +621,7 @@ class TestAsyncActions:
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.retrieve(
+ action = await async_client.gpu_droplets.actions.retrieve(
action_id=36804636,
droplet_id=3164444,
)
@@ -630,7 +630,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.actions.with_raw_response.retrieve(
action_id=36804636,
droplet_id=3164444,
)
@@ -643,7 +643,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.actions.with_streaming_response.retrieve(
action_id=36804636,
droplet_id=3164444,
) as response:
@@ -658,7 +658,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.list(
+ action = await async_client.gpu_droplets.actions.list(
droplet_id=3164444,
)
assert_matches_type(ActionListResponse, action, path=["response"])
@@ -666,7 +666,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.list(
+ action = await async_client.gpu_droplets.actions.list(
droplet_id=3164444,
page=1,
per_page=1,
@@ -676,7 +676,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.list(
+ response = await async_client.gpu_droplets.actions.with_raw_response.list(
droplet_id=3164444,
)
@@ -688,7 +688,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.list(
+ async with async_client.gpu_droplets.actions.with_streaming_response.list(
droplet_id=3164444,
) as response:
assert not response.is_closed
@@ -702,7 +702,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.bulk_initiate(
+ action = await async_client.gpu_droplets.actions.bulk_initiate(
type="reboot",
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
@@ -710,7 +710,7 @@ async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradient
@pytest.mark.skip()
@parametrize
async def test_method_bulk_initiate_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.bulk_initiate(
+ action = await async_client.gpu_droplets.actions.bulk_initiate(
type="reboot",
tag_name="tag_name",
)
@@ -719,7 +719,7 @@ async def test_method_bulk_initiate_with_all_params_overload_1(self, async_clien
@pytest.mark.skip()
@parametrize
async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.bulk_initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.bulk_initiate(
type="reboot",
)
@@ -731,7 +731,7 @@ async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGr
@pytest.mark.skip()
@parametrize
async def test_streaming_response_bulk_initiate_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.bulk_initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.bulk_initiate(
type="reboot",
) as response:
assert not response.is_closed
@@ -745,7 +745,7 @@ async def test_streaming_response_bulk_initiate_overload_1(self, async_client: A
@pytest.mark.skip()
@parametrize
async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.bulk_initiate(
+ action = await async_client.gpu_droplets.actions.bulk_initiate(
type="reboot",
)
assert_matches_type(ActionBulkInitiateResponse, action, path=["response"])
@@ -753,7 +753,7 @@ async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradient
@pytest.mark.skip()
@parametrize
async def test_method_bulk_initiate_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.bulk_initiate(
+ action = await async_client.gpu_droplets.actions.bulk_initiate(
type="reboot",
tag_name="tag_name",
name="Nifty New Snapshot",
@@ -763,7 +763,7 @@ async def test_method_bulk_initiate_with_all_params_overload_2(self, async_clien
@pytest.mark.skip()
@parametrize
async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.bulk_initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.bulk_initiate(
type="reboot",
)
@@ -775,7 +775,7 @@ async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGr
@pytest.mark.skip()
@parametrize
async def test_streaming_response_bulk_initiate_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.bulk_initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.bulk_initiate(
type="reboot",
) as response:
assert not response.is_closed
@@ -789,7 +789,7 @@ async def test_streaming_response_bulk_initiate_overload_2(self, async_client: A
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -798,7 +798,7 @@ async def test_method_initiate_overload_1(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -811,7 +811,7 @@ async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -826,7 +826,7 @@ async def test_streaming_response_initiate_overload_1(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="enable_backups",
)
@@ -835,7 +835,7 @@ async def test_method_initiate_overload_2(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_method_initiate_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="enable_backups",
backup_policy={
@@ -849,7 +849,7 @@ async def test_method_initiate_with_all_params_overload_2(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="enable_backups",
)
@@ -862,7 +862,7 @@ async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="enable_backups",
) as response:
@@ -877,7 +877,7 @@ async def test_streaming_response_initiate_overload_2(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_3(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="enable_backups",
)
@@ -886,7 +886,7 @@ async def test_method_initiate_overload_3(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_method_initiate_with_all_params_overload_3(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="enable_backups",
backup_policy={
@@ -900,7 +900,7 @@ async def test_method_initiate_with_all_params_overload_3(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="enable_backups",
)
@@ -913,7 +913,7 @@ async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_3(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="enable_backups",
) as response:
@@ -928,7 +928,7 @@ async def test_streaming_response_initiate_overload_3(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_4(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -937,7 +937,7 @@ async def test_method_initiate_overload_4(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_method_initiate_with_all_params_overload_4(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
image=12389723,
@@ -947,7 +947,7 @@ async def test_method_initiate_with_all_params_overload_4(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -960,7 +960,7 @@ async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_4(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -975,7 +975,7 @@ async def test_streaming_response_initiate_overload_4(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_5(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -984,7 +984,7 @@ async def test_method_initiate_overload_5(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_method_initiate_with_all_params_overload_5(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
disk=True,
@@ -995,7 +995,7 @@ async def test_method_initiate_with_all_params_overload_5(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1008,7 +1008,7 @@ async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_5(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -1023,7 +1023,7 @@ async def test_streaming_response_initiate_overload_5(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_6(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1032,7 +1032,7 @@ async def test_method_initiate_overload_6(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_method_initiate_with_all_params_overload_6(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
image="ubuntu-20-04-x64",
@@ -1042,7 +1042,7 @@ async def test_method_initiate_with_all_params_overload_6(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1055,7 +1055,7 @@ async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_6(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -1070,7 +1070,7 @@ async def test_streaming_response_initiate_overload_6(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_7(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1079,7 +1079,7 @@ async def test_method_initiate_overload_7(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_method_initiate_with_all_params_overload_7(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
name="nifty-new-name",
@@ -1089,7 +1089,7 @@ async def test_method_initiate_with_all_params_overload_7(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1102,7 +1102,7 @@ async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_7(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -1117,7 +1117,7 @@ async def test_streaming_response_initiate_overload_7(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_8(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1126,7 +1126,7 @@ async def test_method_initiate_overload_8(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_method_initiate_with_all_params_overload_8(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
kernel=12389723,
@@ -1136,7 +1136,7 @@ async def test_method_initiate_with_all_params_overload_8(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1149,7 +1149,7 @@ async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_8(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
@@ -1164,7 +1164,7 @@ async def test_streaming_response_initiate_overload_8(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_method_initiate_overload_9(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1173,7 +1173,7 @@ async def test_method_initiate_overload_9(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_method_initiate_with_all_params_overload_9(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.droplets.actions.initiate(
+ action = await async_client.gpu_droplets.actions.initiate(
droplet_id=3164444,
type="reboot",
name="Nifty New Snapshot",
@@ -1183,7 +1183,7 @@ async def test_method_initiate_with_all_params_overload_9(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.actions.with_raw_response.initiate(
+ response = await async_client.gpu_droplets.actions.with_raw_response.initiate(
droplet_id=3164444,
type="reboot",
)
@@ -1196,7 +1196,7 @@ async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_overload_9(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.actions.with_streaming_response.initiate(
+ async with async_client.gpu_droplets.actions.with_streaming_response.initiate(
droplet_id=3164444,
type="reboot",
) as response:
diff --git a/tests/api_resources/droplets/test_autoscale.py b/tests/api_resources/gpu_droplets/test_autoscale.py
similarity index 86%
rename from tests/api_resources/droplets/test_autoscale.py
rename to tests/api_resources/gpu_droplets/test_autoscale.py
index 4f6ce219..cec0371d 100644
--- a/tests/api_resources/droplets/test_autoscale.py
+++ b/tests/api_resources/gpu_droplets/test_autoscale.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.droplets import (
+from do_gradientai.types.gpu_droplets import (
AutoscaleListResponse,
AutoscaleCreateResponse,
AutoscaleUpdateResponse,
@@ -27,7 +27,7 @@ class TestAutoscale:
@pytest.mark.skip()
@parametrize
def test_method_create(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.create(
+ autoscale = client.gpu_droplets.autoscale.create(
config={
"max_instances": 5,
"min_instances": 1,
@@ -45,7 +45,7 @@ def test_method_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.create(
+ autoscale = client.gpu_droplets.autoscale.create(
config={
"max_instances": 5,
"min_instances": 1,
@@ -73,7 +73,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create(self, client: GradientAI) -> None:
- response = client.droplets.autoscale.with_raw_response.create(
+ response = client.gpu_droplets.autoscale.with_raw_response.create(
config={
"max_instances": 5,
"min_instances": 1,
@@ -95,7 +95,7 @@ def test_raw_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create(self, client: GradientAI) -> None:
- with client.droplets.autoscale.with_streaming_response.create(
+ with client.gpu_droplets.autoscale.with_streaming_response.create(
config={
"max_instances": 5,
"min_instances": 1,
@@ -119,7 +119,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.retrieve(
+ autoscale = client.gpu_droplets.autoscale.retrieve(
"autoscale_pool_id",
)
assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"])
@@ -127,7 +127,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.droplets.autoscale.with_raw_response.retrieve(
+ response = client.gpu_droplets.autoscale.with_raw_response.retrieve(
"autoscale_pool_id",
)
@@ -139,7 +139,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.droplets.autoscale.with_streaming_response.retrieve(
+ with client.gpu_droplets.autoscale.with_streaming_response.retrieve(
"autoscale_pool_id",
) as response:
assert not response.is_closed
@@ -154,14 +154,14 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- client.droplets.autoscale.with_raw_response.retrieve(
+ client.gpu_droplets.autoscale.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_update(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.update(
+ autoscale = client.gpu_droplets.autoscale.update(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
config={"target_number_instances": 2},
droplet_template={
@@ -177,7 +177,7 @@ def test_method_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update_with_all_params(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.update(
+ autoscale = client.gpu_droplets.autoscale.update(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
config={"target_number_instances": 2},
droplet_template={
@@ -200,7 +200,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_update(self, client: GradientAI) -> None:
- response = client.droplets.autoscale.with_raw_response.update(
+ response = client.gpu_droplets.autoscale.with_raw_response.update(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
config={"target_number_instances": 2},
droplet_template={
@@ -220,7 +220,7 @@ def test_raw_response_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_update(self, client: GradientAI) -> None:
- with client.droplets.autoscale.with_streaming_response.update(
+ with client.gpu_droplets.autoscale.with_streaming_response.update(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
config={"target_number_instances": 2},
droplet_template={
@@ -243,7 +243,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None:
@parametrize
def test_path_params_update(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- client.droplets.autoscale.with_raw_response.update(
+ client.gpu_droplets.autoscale.with_raw_response.update(
autoscale_pool_id="",
config={"target_number_instances": 2},
droplet_template={
@@ -258,13 +258,13 @@ def test_path_params_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.list()
+ autoscale = client.gpu_droplets.autoscale.list()
assert_matches_type(AutoscaleListResponse, autoscale, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.list(
+ autoscale = client.gpu_droplets.autoscale.list(
name="name",
page=1,
per_page=1,
@@ -274,7 +274,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.droplets.autoscale.with_raw_response.list()
+ response = client.gpu_droplets.autoscale.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -284,7 +284,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.droplets.autoscale.with_streaming_response.list() as response:
+ with client.gpu_droplets.autoscale.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -296,7 +296,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.delete(
+ autoscale = client.gpu_droplets.autoscale.delete(
"autoscale_pool_id",
)
assert autoscale is None
@@ -304,7 +304,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.droplets.autoscale.with_raw_response.delete(
+ response = client.gpu_droplets.autoscale.with_raw_response.delete(
"autoscale_pool_id",
)
@@ -316,7 +316,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.droplets.autoscale.with_streaming_response.delete(
+ with client.gpu_droplets.autoscale.with_streaming_response.delete(
"autoscale_pool_id",
) as response:
assert not response.is_closed
@@ -331,14 +331,14 @@ def test_streaming_response_delete(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- client.droplets.autoscale.with_raw_response.delete(
+ client.gpu_droplets.autoscale.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_delete_dangerous(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.delete_dangerous(
+ autoscale = client.gpu_droplets.autoscale.delete_dangerous(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
x_dangerous=True,
)
@@ -347,7 +347,7 @@ def test_method_delete_dangerous(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete_dangerous(self, client: GradientAI) -> None:
- response = client.droplets.autoscale.with_raw_response.delete_dangerous(
+ response = client.gpu_droplets.autoscale.with_raw_response.delete_dangerous(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
x_dangerous=True,
)
@@ -360,7 +360,7 @@ def test_raw_response_delete_dangerous(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete_dangerous(self, client: GradientAI) -> None:
- with client.droplets.autoscale.with_streaming_response.delete_dangerous(
+ with client.gpu_droplets.autoscale.with_streaming_response.delete_dangerous(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
x_dangerous=True,
) as response:
@@ -376,7 +376,7 @@ def test_streaming_response_delete_dangerous(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete_dangerous(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- client.droplets.autoscale.with_raw_response.delete_dangerous(
+ client.gpu_droplets.autoscale.with_raw_response.delete_dangerous(
autoscale_pool_id="",
x_dangerous=True,
)
@@ -384,7 +384,7 @@ def test_path_params_delete_dangerous(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_history(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.list_history(
+ autoscale = client.gpu_droplets.autoscale.list_history(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
)
assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"])
@@ -392,7 +392,7 @@ def test_method_list_history(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_history_with_all_params(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.list_history(
+ autoscale = client.gpu_droplets.autoscale.list_history(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
page=1,
per_page=1,
@@ -402,7 +402,7 @@ def test_method_list_history_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list_history(self, client: GradientAI) -> None:
- response = client.droplets.autoscale.with_raw_response.list_history(
+ response = client.gpu_droplets.autoscale.with_raw_response.list_history(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
)
@@ -414,7 +414,7 @@ def test_raw_response_list_history(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_history(self, client: GradientAI) -> None:
- with client.droplets.autoscale.with_streaming_response.list_history(
+ with client.gpu_droplets.autoscale.with_streaming_response.list_history(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
) as response:
assert not response.is_closed
@@ -429,14 +429,14 @@ def test_streaming_response_list_history(self, client: GradientAI) -> None:
@parametrize
def test_path_params_list_history(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- client.droplets.autoscale.with_raw_response.list_history(
+ client.gpu_droplets.autoscale.with_raw_response.list_history(
autoscale_pool_id="",
)
@pytest.mark.skip()
@parametrize
def test_method_list_members(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.list_members(
+ autoscale = client.gpu_droplets.autoscale.list_members(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
)
assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"])
@@ -444,7 +444,7 @@ def test_method_list_members(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_members_with_all_params(self, client: GradientAI) -> None:
- autoscale = client.droplets.autoscale.list_members(
+ autoscale = client.gpu_droplets.autoscale.list_members(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
page=1,
per_page=1,
@@ -454,7 +454,7 @@ def test_method_list_members_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list_members(self, client: GradientAI) -> None:
- response = client.droplets.autoscale.with_raw_response.list_members(
+ response = client.gpu_droplets.autoscale.with_raw_response.list_members(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
)
@@ -466,7 +466,7 @@ def test_raw_response_list_members(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_members(self, client: GradientAI) -> None:
- with client.droplets.autoscale.with_streaming_response.list_members(
+ with client.gpu_droplets.autoscale.with_streaming_response.list_members(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
) as response:
assert not response.is_closed
@@ -481,7 +481,7 @@ def test_streaming_response_list_members(self, client: GradientAI) -> None:
@parametrize
def test_path_params_list_members(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- client.droplets.autoscale.with_raw_response.list_members(
+ client.gpu_droplets.autoscale.with_raw_response.list_members(
autoscale_pool_id="",
)
@@ -494,7 +494,7 @@ class TestAsyncAutoscale:
@pytest.mark.skip()
@parametrize
async def test_method_create(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.create(
+ autoscale = await async_client.gpu_droplets.autoscale.create(
config={
"max_instances": 5,
"min_instances": 1,
@@ -512,7 +512,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.create(
+ autoscale = await async_client.gpu_droplets.autoscale.create(
config={
"max_instances": 5,
"min_instances": 1,
@@ -540,7 +540,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.autoscale.with_raw_response.create(
+ response = await async_client.gpu_droplets.autoscale.with_raw_response.create(
config={
"max_instances": 5,
"min_instances": 1,
@@ -562,7 +562,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.autoscale.with_streaming_response.create(
+ async with async_client.gpu_droplets.autoscale.with_streaming_response.create(
config={
"max_instances": 5,
"min_instances": 1,
@@ -586,7 +586,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.retrieve(
+ autoscale = await async_client.gpu_droplets.autoscale.retrieve(
"autoscale_pool_id",
)
assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"])
@@ -594,7 +594,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.autoscale.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.autoscale.with_raw_response.retrieve(
"autoscale_pool_id",
)
@@ -606,7 +606,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.autoscale.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.autoscale.with_streaming_response.retrieve(
"autoscale_pool_id",
) as response:
assert not response.is_closed
@@ -621,14 +621,14 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- await async_client.droplets.autoscale.with_raw_response.retrieve(
+ await async_client.gpu_droplets.autoscale.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_update(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.update(
+ autoscale = await async_client.gpu_droplets.autoscale.update(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
config={"target_number_instances": 2},
droplet_template={
@@ -644,7 +644,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.update(
+ autoscale = await async_client.gpu_droplets.autoscale.update(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
config={"target_number_instances": 2},
droplet_template={
@@ -667,7 +667,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.autoscale.with_raw_response.update(
+ response = await async_client.gpu_droplets.autoscale.with_raw_response.update(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
config={"target_number_instances": 2},
droplet_template={
@@ -687,7 +687,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.autoscale.with_streaming_response.update(
+ async with async_client.gpu_droplets.autoscale.with_streaming_response.update(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
config={"target_number_instances": 2},
droplet_template={
@@ -710,7 +710,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_update(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- await async_client.droplets.autoscale.with_raw_response.update(
+ await async_client.gpu_droplets.autoscale.with_raw_response.update(
autoscale_pool_id="",
config={"target_number_instances": 2},
droplet_template={
@@ -725,13 +725,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.list()
+ autoscale = await async_client.gpu_droplets.autoscale.list()
assert_matches_type(AutoscaleListResponse, autoscale, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.list(
+ autoscale = await async_client.gpu_droplets.autoscale.list(
name="name",
page=1,
per_page=1,
@@ -741,7 +741,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.autoscale.with_raw_response.list()
+ response = await async_client.gpu_droplets.autoscale.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -751,7 +751,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.autoscale.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.autoscale.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -763,7 +763,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.delete(
+ autoscale = await async_client.gpu_droplets.autoscale.delete(
"autoscale_pool_id",
)
assert autoscale is None
@@ -771,7 +771,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.autoscale.with_raw_response.delete(
+ response = await async_client.gpu_droplets.autoscale.with_raw_response.delete(
"autoscale_pool_id",
)
@@ -783,7 +783,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.autoscale.with_streaming_response.delete(
+ async with async_client.gpu_droplets.autoscale.with_streaming_response.delete(
"autoscale_pool_id",
) as response:
assert not response.is_closed
@@ -798,14 +798,14 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- await async_client.droplets.autoscale.with_raw_response.delete(
+ await async_client.gpu_droplets.autoscale.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_delete_dangerous(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.delete_dangerous(
+ autoscale = await async_client.gpu_droplets.autoscale.delete_dangerous(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
x_dangerous=True,
)
@@ -814,7 +814,7 @@ async def test_method_delete_dangerous(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete_dangerous(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.autoscale.with_raw_response.delete_dangerous(
+ response = await async_client.gpu_droplets.autoscale.with_raw_response.delete_dangerous(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
x_dangerous=True,
)
@@ -827,7 +827,7 @@ async def test_raw_response_delete_dangerous(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.autoscale.with_streaming_response.delete_dangerous(
+ async with async_client.gpu_droplets.autoscale.with_streaming_response.delete_dangerous(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
x_dangerous=True,
) as response:
@@ -843,7 +843,7 @@ async def test_streaming_response_delete_dangerous(self, async_client: AsyncGrad
@parametrize
async def test_path_params_delete_dangerous(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- await async_client.droplets.autoscale.with_raw_response.delete_dangerous(
+ await async_client.gpu_droplets.autoscale.with_raw_response.delete_dangerous(
autoscale_pool_id="",
x_dangerous=True,
)
@@ -851,7 +851,7 @@ async def test_path_params_delete_dangerous(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_method_list_history(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.list_history(
+ autoscale = await async_client.gpu_droplets.autoscale.list_history(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
)
assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"])
@@ -859,7 +859,7 @@ async def test_method_list_history(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_list_history_with_all_params(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.list_history(
+ autoscale = await async_client.gpu_droplets.autoscale.list_history(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
page=1,
per_page=1,
@@ -869,7 +869,7 @@ async def test_method_list_history_with_all_params(self, async_client: AsyncGrad
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_history(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.autoscale.with_raw_response.list_history(
+ response = await async_client.gpu_droplets.autoscale.with_raw_response.list_history(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
)
@@ -881,7 +881,7 @@ async def test_raw_response_list_history(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_history(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.autoscale.with_streaming_response.list_history(
+ async with async_client.gpu_droplets.autoscale.with_streaming_response.list_history(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
) as response:
assert not response.is_closed
@@ -896,14 +896,14 @@ async def test_streaming_response_list_history(self, async_client: AsyncGradient
@parametrize
async def test_path_params_list_history(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- await async_client.droplets.autoscale.with_raw_response.list_history(
+ await async_client.gpu_droplets.autoscale.with_raw_response.list_history(
autoscale_pool_id="",
)
@pytest.mark.skip()
@parametrize
async def test_method_list_members(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.list_members(
+ autoscale = await async_client.gpu_droplets.autoscale.list_members(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
)
assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"])
@@ -911,7 +911,7 @@ async def test_method_list_members(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_list_members_with_all_params(self, async_client: AsyncGradientAI) -> None:
- autoscale = await async_client.droplets.autoscale.list_members(
+ autoscale = await async_client.gpu_droplets.autoscale.list_members(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
page=1,
per_page=1,
@@ -921,7 +921,7 @@ async def test_method_list_members_with_all_params(self, async_client: AsyncGrad
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_members(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.autoscale.with_raw_response.list_members(
+ response = await async_client.gpu_droplets.autoscale.with_raw_response.list_members(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
)
@@ -933,7 +933,7 @@ async def test_raw_response_list_members(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_members(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.autoscale.with_streaming_response.list_members(
+ async with async_client.gpu_droplets.autoscale.with_streaming_response.list_members(
autoscale_pool_id="0d3db13e-a604-4944-9827-7ec2642d32ac",
) as response:
assert not response.is_closed
@@ -948,6 +948,6 @@ async def test_streaming_response_list_members(self, async_client: AsyncGradient
@parametrize
async def test_path_params_list_members(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"):
- await async_client.droplets.autoscale.with_raw_response.list_members(
+ await async_client.gpu_droplets.autoscale.with_raw_response.list_members(
autoscale_pool_id="",
)
diff --git a/tests/api_resources/droplets/test_backups.py b/tests/api_resources/gpu_droplets/test_backups.py
similarity index 82%
rename from tests/api_resources/droplets/test_backups.py
rename to tests/api_resources/gpu_droplets/test_backups.py
index abb95c19..334c701f 100644
--- a/tests/api_resources/droplets/test_backups.py
+++ b/tests/api_resources/gpu_droplets/test_backups.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.droplets import (
+from do_gradientai.types.gpu_droplets import (
BackupListResponse,
BackupListPoliciesResponse,
BackupRetrievePolicyResponse,
@@ -25,7 +25,7 @@ class TestBackups:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- backup = client.droplets.backups.list(
+ backup = client.gpu_droplets.backups.list(
droplet_id=3164444,
)
assert_matches_type(BackupListResponse, backup, path=["response"])
@@ -33,7 +33,7 @@ def test_method_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- backup = client.droplets.backups.list(
+ backup = client.gpu_droplets.backups.list(
droplet_id=3164444,
page=1,
per_page=1,
@@ -43,7 +43,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.droplets.backups.with_raw_response.list(
+ response = client.gpu_droplets.backups.with_raw_response.list(
droplet_id=3164444,
)
@@ -55,7 +55,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.droplets.backups.with_streaming_response.list(
+ with client.gpu_droplets.backups.with_streaming_response.list(
droplet_id=3164444,
) as response:
assert not response.is_closed
@@ -69,13 +69,13 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_policies(self, client: GradientAI) -> None:
- backup = client.droplets.backups.list_policies()
+ backup = client.gpu_droplets.backups.list_policies()
assert_matches_type(BackupListPoliciesResponse, backup, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_policies_with_all_params(self, client: GradientAI) -> None:
- backup = client.droplets.backups.list_policies(
+ backup = client.gpu_droplets.backups.list_policies(
page=1,
per_page=1,
)
@@ -84,7 +84,7 @@ def test_method_list_policies_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list_policies(self, client: GradientAI) -> None:
- response = client.droplets.backups.with_raw_response.list_policies()
+ response = client.gpu_droplets.backups.with_raw_response.list_policies()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -94,7 +94,7 @@ def test_raw_response_list_policies(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_policies(self, client: GradientAI) -> None:
- with client.droplets.backups.with_streaming_response.list_policies() as response:
+ with client.gpu_droplets.backups.with_streaming_response.list_policies() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -106,13 +106,13 @@ def test_streaming_response_list_policies(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_supported_policies(self, client: GradientAI) -> None:
- backup = client.droplets.backups.list_supported_policies()
+ backup = client.gpu_droplets.backups.list_supported_policies()
assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list_supported_policies(self, client: GradientAI) -> None:
- response = client.droplets.backups.with_raw_response.list_supported_policies()
+ response = client.gpu_droplets.backups.with_raw_response.list_supported_policies()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -122,7 +122,7 @@ def test_raw_response_list_supported_policies(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_supported_policies(self, client: GradientAI) -> None:
- with client.droplets.backups.with_streaming_response.list_supported_policies() as response:
+ with client.gpu_droplets.backups.with_streaming_response.list_supported_policies() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -134,7 +134,7 @@ def test_streaming_response_list_supported_policies(self, client: GradientAI) ->
@pytest.mark.skip()
@parametrize
def test_method_retrieve_policy(self, client: GradientAI) -> None:
- backup = client.droplets.backups.retrieve_policy(
+ backup = client.gpu_droplets.backups.retrieve_policy(
1,
)
assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"])
@@ -142,7 +142,7 @@ def test_method_retrieve_policy(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve_policy(self, client: GradientAI) -> None:
- response = client.droplets.backups.with_raw_response.retrieve_policy(
+ response = client.gpu_droplets.backups.with_raw_response.retrieve_policy(
1,
)
@@ -154,7 +154,7 @@ def test_raw_response_retrieve_policy(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve_policy(self, client: GradientAI) -> None:
- with client.droplets.backups.with_streaming_response.retrieve_policy(
+ with client.gpu_droplets.backups.with_streaming_response.retrieve_policy(
1,
) as response:
assert not response.is_closed
@@ -174,7 +174,7 @@ class TestAsyncBackups:
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- backup = await async_client.droplets.backups.list(
+ backup = await async_client.gpu_droplets.backups.list(
droplet_id=3164444,
)
assert_matches_type(BackupListResponse, backup, path=["response"])
@@ -182,7 +182,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- backup = await async_client.droplets.backups.list(
+ backup = await async_client.gpu_droplets.backups.list(
droplet_id=3164444,
page=1,
per_page=1,
@@ -192,7 +192,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.backups.with_raw_response.list(
+ response = await async_client.gpu_droplets.backups.with_raw_response.list(
droplet_id=3164444,
)
@@ -204,7 +204,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.backups.with_streaming_response.list(
+ async with async_client.gpu_droplets.backups.with_streaming_response.list(
droplet_id=3164444,
) as response:
assert not response.is_closed
@@ -218,13 +218,13 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_list_policies(self, async_client: AsyncGradientAI) -> None:
- backup = await async_client.droplets.backups.list_policies()
+ backup = await async_client.gpu_droplets.backups.list_policies()
assert_matches_type(BackupListPoliciesResponse, backup, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_policies_with_all_params(self, async_client: AsyncGradientAI) -> None:
- backup = await async_client.droplets.backups.list_policies(
+ backup = await async_client.gpu_droplets.backups.list_policies(
page=1,
per_page=1,
)
@@ -233,7 +233,7 @@ async def test_method_list_policies_with_all_params(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_policies(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.backups.with_raw_response.list_policies()
+ response = await async_client.gpu_droplets.backups.with_raw_response.list_policies()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -243,7 +243,7 @@ async def test_raw_response_list_policies(self, async_client: AsyncGradientAI) -
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_policies(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.backups.with_streaming_response.list_policies() as response:
+ async with async_client.gpu_droplets.backups.with_streaming_response.list_policies() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -255,13 +255,13 @@ async def test_streaming_response_list_policies(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_method_list_supported_policies(self, async_client: AsyncGradientAI) -> None:
- backup = await async_client.droplets.backups.list_supported_policies()
+ backup = await async_client.gpu_droplets.backups.list_supported_policies()
assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_supported_policies(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.backups.with_raw_response.list_supported_policies()
+ response = await async_client.gpu_droplets.backups.with_raw_response.list_supported_policies()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -271,7 +271,7 @@ async def test_raw_response_list_supported_policies(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_supported_policies(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.backups.with_streaming_response.list_supported_policies() as response:
+ async with async_client.gpu_droplets.backups.with_streaming_response.list_supported_policies() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -283,7 +283,7 @@ async def test_streaming_response_list_supported_policies(self, async_client: As
@pytest.mark.skip()
@parametrize
async def test_method_retrieve_policy(self, async_client: AsyncGradientAI) -> None:
- backup = await async_client.droplets.backups.retrieve_policy(
+ backup = await async_client.gpu_droplets.backups.retrieve_policy(
1,
)
assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"])
@@ -291,7 +291,7 @@ async def test_method_retrieve_policy(self, async_client: AsyncGradientAI) -> No
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve_policy(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.backups.with_raw_response.retrieve_policy(
+ response = await async_client.gpu_droplets.backups.with_raw_response.retrieve_policy(
1,
)
@@ -303,7 +303,7 @@ async def test_raw_response_retrieve_policy(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve_policy(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.backups.with_streaming_response.retrieve_policy(
+ async with async_client.gpu_droplets.backups.with_streaming_response.retrieve_policy(
1,
) as response:
assert not response.is_closed
diff --git a/tests/api_resources/droplets/test_destroy_with_associated_resources.py b/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py
similarity index 79%
rename from tests/api_resources/droplets/test_destroy_with_associated_resources.py
rename to tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py
index 9ad3c1dc..2aef1fce 100644
--- a/tests/api_resources/droplets/test_destroy_with_associated_resources.py
+++ b/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.droplets import (
+from do_gradientai.types.gpu_droplets import (
DestroyWithAssociatedResourceListResponse,
DestroyWithAssociatedResourceCheckStatusResponse,
)
@@ -23,7 +23,7 @@ class TestDestroyWithAssociatedResources:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- destroy_with_associated_resource = client.droplets.destroy_with_associated_resources.list(
+ destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.list(
1,
)
assert_matches_type(
@@ -33,7 +33,7 @@ def test_method_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.droplets.destroy_with_associated_resources.with_raw_response.list(
+ response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.list(
1,
)
@@ -47,7 +47,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.droplets.destroy_with_associated_resources.with_streaming_response.list(
+ with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.list(
1,
) as response:
assert not response.is_closed
@@ -63,7 +63,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_check_status(self, client: GradientAI) -> None:
- destroy_with_associated_resource = client.droplets.destroy_with_associated_resources.check_status(
+ destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.check_status(
1,
)
assert_matches_type(
@@ -73,7 +73,7 @@ def test_method_check_status(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_check_status(self, client: GradientAI) -> None:
- response = client.droplets.destroy_with_associated_resources.with_raw_response.check_status(
+ response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.check_status(
1,
)
@@ -87,7 +87,7 @@ def test_raw_response_check_status(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_check_status(self, client: GradientAI) -> None:
- with client.droplets.destroy_with_associated_resources.with_streaming_response.check_status(
+ with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.check_status(
1,
) as response:
assert not response.is_closed
@@ -103,7 +103,7 @@ def test_streaming_response_check_status(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete_dangerous(self, client: GradientAI) -> None:
- destroy_with_associated_resource = client.droplets.destroy_with_associated_resources.delete_dangerous(
+ destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_dangerous(
droplet_id=3164444,
x_dangerous=True,
)
@@ -112,7 +112,7 @@ def test_method_delete_dangerous(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete_dangerous(self, client: GradientAI) -> None:
- response = client.droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous(
+ response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous(
droplet_id=3164444,
x_dangerous=True,
)
@@ -125,7 +125,7 @@ def test_raw_response_delete_dangerous(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete_dangerous(self, client: GradientAI) -> None:
- with client.droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous(
+ with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous(
droplet_id=3164444,
x_dangerous=True,
) as response:
@@ -140,7 +140,7 @@ def test_streaming_response_delete_dangerous(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete_selective(self, client: GradientAI) -> None:
- destroy_with_associated_resource = client.droplets.destroy_with_associated_resources.delete_selective(
+ destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_selective(
droplet_id=3164444,
)
assert destroy_with_associated_resource is None
@@ -148,7 +148,7 @@ def test_method_delete_selective(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete_selective_with_all_params(self, client: GradientAI) -> None:
- destroy_with_associated_resource = client.droplets.destroy_with_associated_resources.delete_selective(
+ destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_selective(
droplet_id=3164444,
floating_ips=["6186916"],
reserved_ips=["6186916"],
@@ -161,7 +161,7 @@ def test_method_delete_selective_with_all_params(self, client: GradientAI) -> No
@pytest.mark.skip()
@parametrize
def test_raw_response_delete_selective(self, client: GradientAI) -> None:
- response = client.droplets.destroy_with_associated_resources.with_raw_response.delete_selective(
+ response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_selective(
droplet_id=3164444,
)
@@ -173,7 +173,7 @@ def test_raw_response_delete_selective(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete_selective(self, client: GradientAI) -> None:
- with client.droplets.destroy_with_associated_resources.with_streaming_response.delete_selective(
+ with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_selective(
droplet_id=3164444,
) as response:
assert not response.is_closed
@@ -187,7 +187,7 @@ def test_streaming_response_delete_selective(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retry(self, client: GradientAI) -> None:
- destroy_with_associated_resource = client.droplets.destroy_with_associated_resources.retry(
+ destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.retry(
1,
)
assert destroy_with_associated_resource is None
@@ -195,7 +195,7 @@ def test_method_retry(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retry(self, client: GradientAI) -> None:
- response = client.droplets.destroy_with_associated_resources.with_raw_response.retry(
+ response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.retry(
1,
)
@@ -207,7 +207,7 @@ def test_raw_response_retry(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retry(self, client: GradientAI) -> None:
- with client.droplets.destroy_with_associated_resources.with_streaming_response.retry(
+ with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.retry(
1,
) as response:
assert not response.is_closed
@@ -227,7 +227,7 @@ class TestAsyncDestroyWithAssociatedResources:
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- destroy_with_associated_resource = await async_client.droplets.destroy_with_associated_resources.list(
+ destroy_with_associated_resource = await async_client.gpu_droplets.destroy_with_associated_resources.list(
1,
)
assert_matches_type(
@@ -237,7 +237,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.destroy_with_associated_resources.with_raw_response.list(
+ response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.list(
1,
)
@@ -251,7 +251,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.destroy_with_associated_resources.with_streaming_response.list(
+ async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.list(
1,
) as response:
assert not response.is_closed
@@ -267,8 +267,10 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_check_status(self, async_client: AsyncGradientAI) -> None:
- destroy_with_associated_resource = await async_client.droplets.destroy_with_associated_resources.check_status(
- 1,
+ destroy_with_associated_resource = (
+ await async_client.gpu_droplets.destroy_with_associated_resources.check_status(
+ 1,
+ )
)
assert_matches_type(
DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"]
@@ -277,7 +279,7 @@ async def test_method_check_status(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_check_status(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.destroy_with_associated_resources.with_raw_response.check_status(
+ response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.check_status(
1,
)
@@ -291,7 +293,7 @@ async def test_raw_response_check_status(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_streaming_response_check_status(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.destroy_with_associated_resources.with_streaming_response.check_status(
+ async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.check_status(
1,
) as response:
assert not response.is_closed
@@ -308,7 +310,7 @@ async def test_streaming_response_check_status(self, async_client: AsyncGradient
@parametrize
async def test_method_delete_dangerous(self, async_client: AsyncGradientAI) -> None:
destroy_with_associated_resource = (
- await async_client.droplets.destroy_with_associated_resources.delete_dangerous(
+ await async_client.gpu_droplets.destroy_with_associated_resources.delete_dangerous(
droplet_id=3164444,
x_dangerous=True,
)
@@ -318,7 +320,7 @@ async def test_method_delete_dangerous(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete_dangerous(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous(
+ response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous(
droplet_id=3164444,
x_dangerous=True,
)
@@ -331,7 +333,7 @@ async def test_raw_response_delete_dangerous(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous(
+ async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous(
droplet_id=3164444,
x_dangerous=True,
) as response:
@@ -347,7 +349,7 @@ async def test_streaming_response_delete_dangerous(self, async_client: AsyncGrad
@parametrize
async def test_method_delete_selective(self, async_client: AsyncGradientAI) -> None:
destroy_with_associated_resource = (
- await async_client.droplets.destroy_with_associated_resources.delete_selective(
+ await async_client.gpu_droplets.destroy_with_associated_resources.delete_selective(
droplet_id=3164444,
)
)
@@ -357,7 +359,7 @@ async def test_method_delete_selective(self, async_client: AsyncGradientAI) -> N
@parametrize
async def test_method_delete_selective_with_all_params(self, async_client: AsyncGradientAI) -> None:
destroy_with_associated_resource = (
- await async_client.droplets.destroy_with_associated_resources.delete_selective(
+ await async_client.gpu_droplets.destroy_with_associated_resources.delete_selective(
droplet_id=3164444,
floating_ips=["6186916"],
reserved_ips=["6186916"],
@@ -371,7 +373,7 @@ async def test_method_delete_selective_with_all_params(self, async_client: Async
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete_selective(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.destroy_with_associated_resources.with_raw_response.delete_selective(
+ response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_selective(
droplet_id=3164444,
)
@@ -383,7 +385,7 @@ async def test_raw_response_delete_selective(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete_selective(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.destroy_with_associated_resources.with_streaming_response.delete_selective(
+ async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_selective(
droplet_id=3164444,
) as response:
assert not response.is_closed
@@ -397,7 +399,7 @@ async def test_streaming_response_delete_selective(self, async_client: AsyncGrad
@pytest.mark.skip()
@parametrize
async def test_method_retry(self, async_client: AsyncGradientAI) -> None:
- destroy_with_associated_resource = await async_client.droplets.destroy_with_associated_resources.retry(
+ destroy_with_associated_resource = await async_client.gpu_droplets.destroy_with_associated_resources.retry(
1,
)
assert destroy_with_associated_resource is None
@@ -405,7 +407,7 @@ async def test_method_retry(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retry(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.destroy_with_associated_resources.with_raw_response.retry(
+ response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.retry(
1,
)
@@ -417,7 +419,7 @@ async def test_raw_response_retry(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retry(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.destroy_with_associated_resources.with_streaming_response.retry(
+ async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.retry(
1,
) as response:
assert not response.is_closed
diff --git a/tests/api_resources/test_firewalls.py b/tests/api_resources/gpu_droplets/test_firewalls.py
similarity index 87%
rename from tests/api_resources/test_firewalls.py
rename to tests/api_resources/gpu_droplets/test_firewalls.py
index fb41eb37..6d98ebe8 100644
--- a/tests/api_resources/test_firewalls.py
+++ b/tests/api_resources/gpu_droplets/test_firewalls.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
FirewallListResponse,
FirewallCreateResponse,
FirewallUpdateResponse,
@@ -25,13 +25,13 @@ class TestFirewalls:
@pytest.mark.skip()
@parametrize
def test_method_create(self, client: GradientAI) -> None:
- firewall = client.firewalls.create()
+ firewall = client.gpu_droplets.firewalls.create()
assert_matches_type(FirewallCreateResponse, firewall, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params(self, client: GradientAI) -> None:
- firewall = client.firewalls.create(
+ firewall = client.gpu_droplets.firewalls.create(
body={
"droplet_ids": [8043964],
"inbound_rules": [
@@ -80,7 +80,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create(self, client: GradientAI) -> None:
- response = client.firewalls.with_raw_response.create()
+ response = client.gpu_droplets.firewalls.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -90,7 +90,7 @@ def test_raw_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create(self, client: GradientAI) -> None:
- with client.firewalls.with_streaming_response.create() as response:
+ with client.gpu_droplets.firewalls.with_streaming_response.create() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -102,7 +102,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- firewall = client.firewalls.retrieve(
+ firewall = client.gpu_droplets.firewalls.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"])
@@ -110,7 +110,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.firewalls.with_raw_response.retrieve(
+ response = client.gpu_droplets.firewalls.with_raw_response.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
@@ -122,7 +122,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.firewalls.with_streaming_response.retrieve(
+ with client.gpu_droplets.firewalls.with_streaming_response.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
) as response:
assert not response.is_closed
@@ -137,14 +137,14 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.with_raw_response.retrieve(
+ client.gpu_droplets.firewalls.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_update(self, client: GradientAI) -> None:
- firewall = client.firewalls.update(
+ firewall = client.gpu_droplets.firewalls.update(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
firewall={"name": "frontend-firewall"},
)
@@ -153,7 +153,7 @@ def test_method_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update_with_all_params(self, client: GradientAI) -> None:
- firewall = client.firewalls.update(
+ firewall = client.gpu_droplets.firewalls.update(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
firewall={
"droplet_ids": [8043964],
@@ -203,7 +203,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_update(self, client: GradientAI) -> None:
- response = client.firewalls.with_raw_response.update(
+ response = client.gpu_droplets.firewalls.with_raw_response.update(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
firewall={"name": "frontend-firewall"},
)
@@ -216,7 +216,7 @@ def test_raw_response_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_update(self, client: GradientAI) -> None:
- with client.firewalls.with_streaming_response.update(
+ with client.gpu_droplets.firewalls.with_streaming_response.update(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
firewall={"name": "frontend-firewall"},
) as response:
@@ -232,7 +232,7 @@ def test_streaming_response_update(self, client: GradientAI) -> None:
@parametrize
def test_path_params_update(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.with_raw_response.update(
+ client.gpu_droplets.firewalls.with_raw_response.update(
firewall_id="",
firewall={"name": "frontend-firewall"},
)
@@ -240,13 +240,13 @@ def test_path_params_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- firewall = client.firewalls.list()
+ firewall = client.gpu_droplets.firewalls.list()
assert_matches_type(FirewallListResponse, firewall, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- firewall = client.firewalls.list(
+ firewall = client.gpu_droplets.firewalls.list(
page=1,
per_page=1,
)
@@ -255,7 +255,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.firewalls.with_raw_response.list()
+ response = client.gpu_droplets.firewalls.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -265,7 +265,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.firewalls.with_streaming_response.list() as response:
+ with client.gpu_droplets.firewalls.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -277,7 +277,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- firewall = client.firewalls.delete(
+ firewall = client.gpu_droplets.firewalls.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert firewall is None
@@ -285,7 +285,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.firewalls.with_raw_response.delete(
+ response = client.gpu_droplets.firewalls.with_raw_response.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
@@ -297,7 +297,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.firewalls.with_streaming_response.delete(
+ with client.gpu_droplets.firewalls.with_streaming_response.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
) as response:
assert not response.is_closed
@@ -312,7 +312,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- client.firewalls.with_raw_response.delete(
+ client.gpu_droplets.firewalls.with_raw_response.delete(
"",
)
@@ -325,13 +325,13 @@ class TestAsyncFirewalls:
@pytest.mark.skip()
@parametrize
async def test_method_create(self, async_client: AsyncGradientAI) -> None:
- firewall = await async_client.firewalls.create()
+ firewall = await async_client.gpu_droplets.firewalls.create()
assert_matches_type(FirewallCreateResponse, firewall, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None:
- firewall = await async_client.firewalls.create(
+ firewall = await async_client.gpu_droplets.firewalls.create(
body={
"droplet_ids": [8043964],
"inbound_rules": [
@@ -380,7 +380,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.with_raw_response.create()
+ response = await async_client.gpu_droplets.firewalls.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -390,7 +390,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.with_streaming_response.create() as response:
+ async with async_client.gpu_droplets.firewalls.with_streaming_response.create() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -402,7 +402,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- firewall = await async_client.firewalls.retrieve(
+ firewall = await async_client.gpu_droplets.firewalls.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"])
@@ -410,7 +410,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.firewalls.with_raw_response.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
@@ -422,7 +422,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.firewalls.with_streaming_response.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
) as response:
assert not response.is_closed
@@ -437,14 +437,14 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.with_raw_response.retrieve(
+ await async_client.gpu_droplets.firewalls.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_update(self, async_client: AsyncGradientAI) -> None:
- firewall = await async_client.firewalls.update(
+ firewall = await async_client.gpu_droplets.firewalls.update(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
firewall={"name": "frontend-firewall"},
)
@@ -453,7 +453,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None:
- firewall = await async_client.firewalls.update(
+ firewall = await async_client.gpu_droplets.firewalls.update(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
firewall={
"droplet_ids": [8043964],
@@ -503,7 +503,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.with_raw_response.update(
+ response = await async_client.gpu_droplets.firewalls.with_raw_response.update(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
firewall={"name": "frontend-firewall"},
)
@@ -516,7 +516,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.with_streaming_response.update(
+ async with async_client.gpu_droplets.firewalls.with_streaming_response.update(
firewall_id="bb4b2611-3d72-467b-8602-280330ecd65c",
firewall={"name": "frontend-firewall"},
) as response:
@@ -532,7 +532,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_update(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.with_raw_response.update(
+ await async_client.gpu_droplets.firewalls.with_raw_response.update(
firewall_id="",
firewall={"name": "frontend-firewall"},
)
@@ -540,13 +540,13 @@ async def test_path_params_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- firewall = await async_client.firewalls.list()
+ firewall = await async_client.gpu_droplets.firewalls.list()
assert_matches_type(FirewallListResponse, firewall, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- firewall = await async_client.firewalls.list(
+ firewall = await async_client.gpu_droplets.firewalls.list(
page=1,
per_page=1,
)
@@ -555,7 +555,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.with_raw_response.list()
+ response = await async_client.gpu_droplets.firewalls.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -565,7 +565,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.firewalls.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -577,7 +577,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- firewall = await async_client.firewalls.delete(
+ firewall = await async_client.gpu_droplets.firewalls.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert firewall is None
@@ -585,7 +585,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.firewalls.with_raw_response.delete(
+ response = await async_client.gpu_droplets.firewalls.with_raw_response.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
@@ -597,7 +597,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.firewalls.with_streaming_response.delete(
+ async with async_client.gpu_droplets.firewalls.with_streaming_response.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
) as response:
assert not response.is_closed
@@ -612,6 +612,6 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"):
- await async_client.firewalls.with_raw_response.delete(
+ await async_client.gpu_droplets.firewalls.with_raw_response.delete(
"",
)
diff --git a/tests/api_resources/test_floating_ips.py b/tests/api_resources/gpu_droplets/test_floating_ips.py
similarity index 82%
rename from tests/api_resources/test_floating_ips.py
rename to tests/api_resources/gpu_droplets/test_floating_ips.py
index 40904ab8..9b8b3183 100644
--- a/tests/api_resources/test_floating_ips.py
+++ b/tests/api_resources/gpu_droplets/test_floating_ips.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
FloatingIPListResponse,
FloatingIPCreateResponse,
FloatingIPRetrieveResponse,
@@ -24,7 +24,7 @@ class TestFloatingIPs:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_1(self, client: GradientAI) -> None:
- floating_ip = client.floating_ips.create(
+ floating_ip = client.gpu_droplets.floating_ips.create(
droplet_id=2457247,
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
@@ -32,7 +32,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
- response = client.floating_ips.with_raw_response.create(
+ response = client.gpu_droplets.floating_ips.with_raw_response.create(
droplet_id=2457247,
)
@@ -44,7 +44,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
- with client.floating_ips.with_streaming_response.create(
+ with client.gpu_droplets.floating_ips.with_streaming_response.create(
droplet_id=2457247,
) as response:
assert not response.is_closed
@@ -58,7 +58,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_2(self, client: GradientAI) -> None:
- floating_ip = client.floating_ips.create(
+ floating_ip = client.gpu_droplets.floating_ips.create(
region="nyc3",
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
@@ -66,7 +66,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None:
- floating_ip = client.floating_ips.create(
+ floating_ip = client.gpu_droplets.floating_ips.create(
region="nyc3",
project_id="746c6152-2fa2-11ed-92d3-27aaa54e4988",
)
@@ -75,7 +75,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
- response = client.floating_ips.with_raw_response.create(
+ response = client.gpu_droplets.floating_ips.with_raw_response.create(
region="nyc3",
)
@@ -87,7 +87,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
- with client.floating_ips.with_streaming_response.create(
+ with client.gpu_droplets.floating_ips.with_streaming_response.create(
region="nyc3",
) as response:
assert not response.is_closed
@@ -101,7 +101,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- floating_ip = client.floating_ips.retrieve(
+ floating_ip = client.gpu_droplets.floating_ips.retrieve(
"192.168.1.1",
)
assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"])
@@ -109,7 +109,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.floating_ips.with_raw_response.retrieve(
+ response = client.gpu_droplets.floating_ips.with_raw_response.retrieve(
"192.168.1.1",
)
@@ -121,7 +121,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.floating_ips.with_streaming_response.retrieve(
+ with client.gpu_droplets.floating_ips.with_streaming_response.retrieve(
"192.168.1.1",
) as response:
assert not response.is_closed
@@ -136,20 +136,20 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- client.floating_ips.with_raw_response.retrieve(
+ client.gpu_droplets.floating_ips.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- floating_ip = client.floating_ips.list()
+ floating_ip = client.gpu_droplets.floating_ips.list()
assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- floating_ip = client.floating_ips.list(
+ floating_ip = client.gpu_droplets.floating_ips.list(
page=1,
per_page=1,
)
@@ -158,7 +158,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.floating_ips.with_raw_response.list()
+ response = client.gpu_droplets.floating_ips.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -168,7 +168,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.floating_ips.with_streaming_response.list() as response:
+ with client.gpu_droplets.floating_ips.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -180,7 +180,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- floating_ip = client.floating_ips.delete(
+ floating_ip = client.gpu_droplets.floating_ips.delete(
"192.168.1.1",
)
assert floating_ip is None
@@ -188,7 +188,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.floating_ips.with_raw_response.delete(
+ response = client.gpu_droplets.floating_ips.with_raw_response.delete(
"192.168.1.1",
)
@@ -200,7 +200,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.floating_ips.with_streaming_response.delete(
+ with client.gpu_droplets.floating_ips.with_streaming_response.delete(
"192.168.1.1",
) as response:
assert not response.is_closed
@@ -215,7 +215,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- client.floating_ips.with_raw_response.delete(
+ client.gpu_droplets.floating_ips.with_raw_response.delete(
"",
)
@@ -228,7 +228,7 @@ class TestAsyncFloatingIPs:
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- floating_ip = await async_client.floating_ips.create(
+ floating_ip = await async_client.gpu_droplets.floating_ips.create(
droplet_id=2457247,
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
@@ -236,7 +236,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.with_raw_response.create(
+ response = await async_client.gpu_droplets.floating_ips.with_raw_response.create(
droplet_id=2457247,
)
@@ -248,7 +248,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.with_streaming_response.create(
+ async with async_client.gpu_droplets.floating_ips.with_streaming_response.create(
droplet_id=2457247,
) as response:
assert not response.is_closed
@@ -262,7 +262,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- floating_ip = await async_client.floating_ips.create(
+ floating_ip = await async_client.gpu_droplets.floating_ips.create(
region="nyc3",
)
assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"])
@@ -270,7 +270,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- floating_ip = await async_client.floating_ips.create(
+ floating_ip = await async_client.gpu_droplets.floating_ips.create(
region="nyc3",
project_id="746c6152-2fa2-11ed-92d3-27aaa54e4988",
)
@@ -279,7 +279,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.with_raw_response.create(
+ response = await async_client.gpu_droplets.floating_ips.with_raw_response.create(
region="nyc3",
)
@@ -291,7 +291,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.with_streaming_response.create(
+ async with async_client.gpu_droplets.floating_ips.with_streaming_response.create(
region="nyc3",
) as response:
assert not response.is_closed
@@ -305,7 +305,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- floating_ip = await async_client.floating_ips.retrieve(
+ floating_ip = await async_client.gpu_droplets.floating_ips.retrieve(
"192.168.1.1",
)
assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"])
@@ -313,7 +313,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.floating_ips.with_raw_response.retrieve(
"192.168.1.1",
)
@@ -325,7 +325,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.floating_ips.with_streaming_response.retrieve(
"192.168.1.1",
) as response:
assert not response.is_closed
@@ -340,20 +340,20 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- await async_client.floating_ips.with_raw_response.retrieve(
+ await async_client.gpu_droplets.floating_ips.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- floating_ip = await async_client.floating_ips.list()
+ floating_ip = await async_client.gpu_droplets.floating_ips.list()
assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- floating_ip = await async_client.floating_ips.list(
+ floating_ip = await async_client.gpu_droplets.floating_ips.list(
page=1,
per_page=1,
)
@@ -362,7 +362,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.with_raw_response.list()
+ response = await async_client.gpu_droplets.floating_ips.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -372,7 +372,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.floating_ips.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -384,7 +384,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- floating_ip = await async_client.floating_ips.delete(
+ floating_ip = await async_client.gpu_droplets.floating_ips.delete(
"192.168.1.1",
)
assert floating_ip is None
@@ -392,7 +392,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.floating_ips.with_raw_response.delete(
+ response = await async_client.gpu_droplets.floating_ips.with_raw_response.delete(
"192.168.1.1",
)
@@ -404,7 +404,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.floating_ips.with_streaming_response.delete(
+ async with async_client.gpu_droplets.floating_ips.with_streaming_response.delete(
"192.168.1.1",
) as response:
assert not response.is_closed
@@ -419,6 +419,6 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"):
- await async_client.floating_ips.with_raw_response.delete(
+ await async_client.gpu_droplets.floating_ips.with_raw_response.delete(
"",
)
diff --git a/tests/api_resources/test_images.py b/tests/api_resources/gpu_droplets/test_images.py
similarity index 83%
rename from tests/api_resources/test_images.py
rename to tests/api_resources/gpu_droplets/test_images.py
index 4ca6ee2d..5a2a7c0c 100644
--- a/tests/api_resources/test_images.py
+++ b/tests/api_resources/gpu_droplets/test_images.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
ImageListResponse,
ImageCreateResponse,
ImageUpdateResponse,
@@ -25,13 +25,13 @@ class TestImages:
@pytest.mark.skip()
@parametrize
def test_method_create(self, client: GradientAI) -> None:
- image = client.images.create()
+ image = client.gpu_droplets.images.create()
assert_matches_type(ImageCreateResponse, image, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params(self, client: GradientAI) -> None:
- image = client.images.create(
+ image = client.gpu_droplets.images.create(
description=" ",
distribution="Ubuntu",
name="Nifty New Snapshot",
@@ -44,7 +44,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create(self, client: GradientAI) -> None:
- response = client.images.with_raw_response.create()
+ response = client.gpu_droplets.images.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -54,7 +54,7 @@ def test_raw_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create(self, client: GradientAI) -> None:
- with client.images.with_streaming_response.create() as response:
+ with client.gpu_droplets.images.with_streaming_response.create() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -66,7 +66,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- image = client.images.retrieve(
+ image = client.gpu_droplets.images.retrieve(
0,
)
assert_matches_type(ImageRetrieveResponse, image, path=["response"])
@@ -74,7 +74,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.images.with_raw_response.retrieve(
+ response = client.gpu_droplets.images.with_raw_response.retrieve(
0,
)
@@ -86,7 +86,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.images.with_streaming_response.retrieve(
+ with client.gpu_droplets.images.with_streaming_response.retrieve(
0,
) as response:
assert not response.is_closed
@@ -100,7 +100,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update(self, client: GradientAI) -> None:
- image = client.images.update(
+ image = client.gpu_droplets.images.update(
image_id=62137902,
)
assert_matches_type(ImageUpdateResponse, image, path=["response"])
@@ -108,7 +108,7 @@ def test_method_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update_with_all_params(self, client: GradientAI) -> None:
- image = client.images.update(
+ image = client.gpu_droplets.images.update(
image_id=62137902,
description=" ",
distribution="Ubuntu",
@@ -119,7 +119,7 @@ def test_method_update_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_update(self, client: GradientAI) -> None:
- response = client.images.with_raw_response.update(
+ response = client.gpu_droplets.images.with_raw_response.update(
image_id=62137902,
)
@@ -131,7 +131,7 @@ def test_raw_response_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_update(self, client: GradientAI) -> None:
- with client.images.with_streaming_response.update(
+ with client.gpu_droplets.images.with_streaming_response.update(
image_id=62137902,
) as response:
assert not response.is_closed
@@ -145,13 +145,13 @@ def test_streaming_response_update(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- image = client.images.list()
+ image = client.gpu_droplets.images.list()
assert_matches_type(ImageListResponse, image, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- image = client.images.list(
+ image = client.gpu_droplets.images.list(
page=1,
per_page=1,
private=True,
@@ -163,7 +163,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.images.with_raw_response.list()
+ response = client.gpu_droplets.images.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -173,7 +173,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.images.with_streaming_response.list() as response:
+ with client.gpu_droplets.images.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -185,7 +185,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- image = client.images.delete(
+ image = client.gpu_droplets.images.delete(
0,
)
assert image is None
@@ -193,7 +193,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.images.with_raw_response.delete(
+ response = client.gpu_droplets.images.with_raw_response.delete(
0,
)
@@ -205,7 +205,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.images.with_streaming_response.delete(
+ with client.gpu_droplets.images.with_streaming_response.delete(
0,
) as response:
assert not response.is_closed
@@ -225,13 +225,13 @@ class TestAsyncImages:
@pytest.mark.skip()
@parametrize
async def test_method_create(self, async_client: AsyncGradientAI) -> None:
- image = await async_client.images.create()
+ image = await async_client.gpu_droplets.images.create()
assert_matches_type(ImageCreateResponse, image, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None:
- image = await async_client.images.create(
+ image = await async_client.gpu_droplets.images.create(
description=" ",
distribution="Ubuntu",
name="Nifty New Snapshot",
@@ -244,7 +244,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.with_raw_response.create()
+ response = await async_client.gpu_droplets.images.with_raw_response.create()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -254,7 +254,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.with_streaming_response.create() as response:
+ async with async_client.gpu_droplets.images.with_streaming_response.create() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -266,7 +266,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- image = await async_client.images.retrieve(
+ image = await async_client.gpu_droplets.images.retrieve(
0,
)
assert_matches_type(ImageRetrieveResponse, image, path=["response"])
@@ -274,7 +274,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.images.with_raw_response.retrieve(
0,
)
@@ -286,7 +286,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.images.with_streaming_response.retrieve(
0,
) as response:
assert not response.is_closed
@@ -300,7 +300,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_method_update(self, async_client: AsyncGradientAI) -> None:
- image = await async_client.images.update(
+ image = await async_client.gpu_droplets.images.update(
image_id=62137902,
)
assert_matches_type(ImageUpdateResponse, image, path=["response"])
@@ -308,7 +308,7 @@ async def test_method_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_update_with_all_params(self, async_client: AsyncGradientAI) -> None:
- image = await async_client.images.update(
+ image = await async_client.gpu_droplets.images.update(
image_id=62137902,
description=" ",
distribution="Ubuntu",
@@ -319,7 +319,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.with_raw_response.update(
+ response = await async_client.gpu_droplets.images.with_raw_response.update(
image_id=62137902,
)
@@ -331,7 +331,7 @@ async def test_raw_response_update(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_update(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.with_streaming_response.update(
+ async with async_client.gpu_droplets.images.with_streaming_response.update(
image_id=62137902,
) as response:
assert not response.is_closed
@@ -345,13 +345,13 @@ async def test_streaming_response_update(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- image = await async_client.images.list()
+ image = await async_client.gpu_droplets.images.list()
assert_matches_type(ImageListResponse, image, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- image = await async_client.images.list(
+ image = await async_client.gpu_droplets.images.list(
page=1,
per_page=1,
private=True,
@@ -363,7 +363,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.with_raw_response.list()
+ response = await async_client.gpu_droplets.images.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -373,7 +373,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.images.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -385,7 +385,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- image = await async_client.images.delete(
+ image = await async_client.gpu_droplets.images.delete(
0,
)
assert image is None
@@ -393,7 +393,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.images.with_raw_response.delete(
+ response = await async_client.gpu_droplets.images.with_raw_response.delete(
0,
)
@@ -405,7 +405,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.images.with_streaming_response.delete(
+ async with async_client.gpu_droplets.images.with_streaming_response.delete(
0,
) as response:
assert not response.is_closed
diff --git a/tests/api_resources/test_load_balancers.py b/tests/api_resources/gpu_droplets/test_load_balancers.py
similarity index 90%
rename from tests/api_resources/test_load_balancers.py
rename to tests/api_resources/gpu_droplets/test_load_balancers.py
index 257636b8..b96c6d52 100644
--- a/tests/api_resources/test_load_balancers.py
+++ b/tests/api_resources/gpu_droplets/test_load_balancers.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
LoadBalancerListResponse,
LoadBalancerCreateResponse,
LoadBalancerUpdateResponse,
@@ -25,7 +25,7 @@ class TestLoadBalancers:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_1(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.create(
+ load_balancer = client.gpu_droplets.load_balancers.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -40,7 +40,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.create(
+ load_balancer = client.gpu_droplets.load_balancers.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -111,7 +111,7 @@ def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
- response = client.load_balancers.with_raw_response.create(
+ response = client.gpu_droplets.load_balancers.with_raw_response.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -130,7 +130,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
- with client.load_balancers.with_streaming_response.create(
+ with client.gpu_droplets.load_balancers.with_streaming_response.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -151,7 +151,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_2(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.create(
+ load_balancer = client.gpu_droplets.load_balancers.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -166,7 +166,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.create(
+ load_balancer = client.gpu_droplets.load_balancers.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -237,7 +237,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
- response = client.load_balancers.with_raw_response.create(
+ response = client.gpu_droplets.load_balancers.with_raw_response.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -256,7 +256,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
- with client.load_balancers.with_streaming_response.create(
+ with client.gpu_droplets.load_balancers.with_streaming_response.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -277,7 +277,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.retrieve(
+ load_balancer = client.gpu_droplets.load_balancers.retrieve(
"lb_id",
)
assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"])
@@ -285,7 +285,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.load_balancers.with_raw_response.retrieve(
+ response = client.gpu_droplets.load_balancers.with_raw_response.retrieve(
"lb_id",
)
@@ -297,7 +297,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.load_balancers.with_streaming_response.retrieve(
+ with client.gpu_droplets.load_balancers.with_streaming_response.retrieve(
"lb_id",
) as response:
assert not response.is_closed
@@ -312,14 +312,14 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.with_raw_response.retrieve(
+ client.gpu_droplets.load_balancers.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_update_overload_1(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.update(
+ load_balancer = client.gpu_droplets.load_balancers.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -335,7 +335,7 @@ def test_method_update_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update_with_all_params_overload_1(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.update(
+ load_balancer = client.gpu_droplets.load_balancers.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -407,7 +407,7 @@ def test_method_update_with_all_params_overload_1(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_raw_response_update_overload_1(self, client: GradientAI) -> None:
- response = client.load_balancers.with_raw_response.update(
+ response = client.gpu_droplets.load_balancers.with_raw_response.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -427,7 +427,7 @@ def test_raw_response_update_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_update_overload_1(self, client: GradientAI) -> None:
- with client.load_balancers.with_streaming_response.update(
+ with client.gpu_droplets.load_balancers.with_streaming_response.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -450,7 +450,7 @@ def test_streaming_response_update_overload_1(self, client: GradientAI) -> None:
@parametrize
def test_path_params_update_overload_1(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.with_raw_response.update(
+ client.gpu_droplets.load_balancers.with_raw_response.update(
lb_id="",
forwarding_rules=[
{
@@ -465,7 +465,7 @@ def test_path_params_update_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update_overload_2(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.update(
+ load_balancer = client.gpu_droplets.load_balancers.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -481,7 +481,7 @@ def test_method_update_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_update_with_all_params_overload_2(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.update(
+ load_balancer = client.gpu_droplets.load_balancers.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -553,7 +553,7 @@ def test_method_update_with_all_params_overload_2(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_raw_response_update_overload_2(self, client: GradientAI) -> None:
- response = client.load_balancers.with_raw_response.update(
+ response = client.gpu_droplets.load_balancers.with_raw_response.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -573,7 +573,7 @@ def test_raw_response_update_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_update_overload_2(self, client: GradientAI) -> None:
- with client.load_balancers.with_streaming_response.update(
+ with client.gpu_droplets.load_balancers.with_streaming_response.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -596,7 +596,7 @@ def test_streaming_response_update_overload_2(self, client: GradientAI) -> None:
@parametrize
def test_path_params_update_overload_2(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.with_raw_response.update(
+ client.gpu_droplets.load_balancers.with_raw_response.update(
lb_id="",
forwarding_rules=[
{
@@ -611,13 +611,13 @@ def test_path_params_update_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.list()
+ load_balancer = client.gpu_droplets.load_balancers.list()
assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.list(
+ load_balancer = client.gpu_droplets.load_balancers.list(
page=1,
per_page=1,
)
@@ -626,7 +626,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.load_balancers.with_raw_response.list()
+ response = client.gpu_droplets.load_balancers.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -636,7 +636,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.load_balancers.with_streaming_response.list() as response:
+ with client.gpu_droplets.load_balancers.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -648,7 +648,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.delete(
+ load_balancer = client.gpu_droplets.load_balancers.delete(
"lb_id",
)
assert load_balancer is None
@@ -656,7 +656,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.load_balancers.with_raw_response.delete(
+ response = client.gpu_droplets.load_balancers.with_raw_response.delete(
"lb_id",
)
@@ -668,7 +668,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.load_balancers.with_streaming_response.delete(
+ with client.gpu_droplets.load_balancers.with_streaming_response.delete(
"lb_id",
) as response:
assert not response.is_closed
@@ -683,14 +683,14 @@ def test_streaming_response_delete(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.with_raw_response.delete(
+ client.gpu_droplets.load_balancers.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_delete_cache(self, client: GradientAI) -> None:
- load_balancer = client.load_balancers.delete_cache(
+ load_balancer = client.gpu_droplets.load_balancers.delete_cache(
"lb_id",
)
assert load_balancer is None
@@ -698,7 +698,7 @@ def test_method_delete_cache(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete_cache(self, client: GradientAI) -> None:
- response = client.load_balancers.with_raw_response.delete_cache(
+ response = client.gpu_droplets.load_balancers.with_raw_response.delete_cache(
"lb_id",
)
@@ -710,7 +710,7 @@ def test_raw_response_delete_cache(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete_cache(self, client: GradientAI) -> None:
- with client.load_balancers.with_streaming_response.delete_cache(
+ with client.gpu_droplets.load_balancers.with_streaming_response.delete_cache(
"lb_id",
) as response:
assert not response.is_closed
@@ -725,7 +725,7 @@ def test_streaming_response_delete_cache(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete_cache(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- client.load_balancers.with_raw_response.delete_cache(
+ client.gpu_droplets.load_balancers.with_raw_response.delete_cache(
"",
)
@@ -738,7 +738,7 @@ class TestAsyncLoadBalancers:
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.create(
+ load_balancer = await async_client.gpu_droplets.load_balancers.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -753,7 +753,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.create(
+ load_balancer = await async_client.gpu_droplets.load_balancers.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -824,7 +824,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.with_raw_response.create(
+ response = await async_client.gpu_droplets.load_balancers.with_raw_response.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -843,7 +843,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.with_streaming_response.create(
+ async with async_client.gpu_droplets.load_balancers.with_streaming_response.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -864,7 +864,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.create(
+ load_balancer = await async_client.gpu_droplets.load_balancers.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -879,7 +879,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.create(
+ load_balancer = await async_client.gpu_droplets.load_balancers.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -950,7 +950,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.with_raw_response.create(
+ response = await async_client.gpu_droplets.load_balancers.with_raw_response.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -969,7 +969,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.with_streaming_response.create(
+ async with async_client.gpu_droplets.load_balancers.with_streaming_response.create(
forwarding_rules=[
{
"entry_port": 443,
@@ -990,7 +990,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.retrieve(
+ load_balancer = await async_client.gpu_droplets.load_balancers.retrieve(
"lb_id",
)
assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"])
@@ -998,7 +998,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.load_balancers.with_raw_response.retrieve(
"lb_id",
)
@@ -1010,7 +1010,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.load_balancers.with_streaming_response.retrieve(
"lb_id",
) as response:
assert not response.is_closed
@@ -1025,14 +1025,14 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.with_raw_response.retrieve(
+ await async_client.gpu_droplets.load_balancers.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_update_overload_1(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.update(
+ load_balancer = await async_client.gpu_droplets.load_balancers.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -1048,7 +1048,7 @@ async def test_method_update_overload_1(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_update_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.update(
+ load_balancer = await async_client.gpu_droplets.load_balancers.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -1120,7 +1120,7 @@ async def test_method_update_with_all_params_overload_1(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_raw_response_update_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.with_raw_response.update(
+ response = await async_client.gpu_droplets.load_balancers.with_raw_response.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -1140,7 +1140,7 @@ async def test_raw_response_update_overload_1(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_update_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.with_streaming_response.update(
+ async with async_client.gpu_droplets.load_balancers.with_streaming_response.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -1163,7 +1163,7 @@ async def test_streaming_response_update_overload_1(self, async_client: AsyncGra
@parametrize
async def test_path_params_update_overload_1(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.with_raw_response.update(
+ await async_client.gpu_droplets.load_balancers.with_raw_response.update(
lb_id="",
forwarding_rules=[
{
@@ -1178,7 +1178,7 @@ async def test_path_params_update_overload_1(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_method_update_overload_2(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.update(
+ load_balancer = await async_client.gpu_droplets.load_balancers.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -1194,7 +1194,7 @@ async def test_method_update_overload_2(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_update_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.update(
+ load_balancer = await async_client.gpu_droplets.load_balancers.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -1266,7 +1266,7 @@ async def test_method_update_with_all_params_overload_2(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_raw_response_update_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.with_raw_response.update(
+ response = await async_client.gpu_droplets.load_balancers.with_raw_response.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -1286,7 +1286,7 @@ async def test_raw_response_update_overload_2(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_update_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.with_streaming_response.update(
+ async with async_client.gpu_droplets.load_balancers.with_streaming_response.update(
lb_id="4de7ac8b-495b-4884-9a69-1050c6793cd6",
forwarding_rules=[
{
@@ -1309,7 +1309,7 @@ async def test_streaming_response_update_overload_2(self, async_client: AsyncGra
@parametrize
async def test_path_params_update_overload_2(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.with_raw_response.update(
+ await async_client.gpu_droplets.load_balancers.with_raw_response.update(
lb_id="",
forwarding_rules=[
{
@@ -1324,13 +1324,13 @@ async def test_path_params_update_overload_2(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.list()
+ load_balancer = await async_client.gpu_droplets.load_balancers.list()
assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.list(
+ load_balancer = await async_client.gpu_droplets.load_balancers.list(
page=1,
per_page=1,
)
@@ -1339,7 +1339,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.with_raw_response.list()
+ response = await async_client.gpu_droplets.load_balancers.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -1349,7 +1349,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.load_balancers.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -1361,7 +1361,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.delete(
+ load_balancer = await async_client.gpu_droplets.load_balancers.delete(
"lb_id",
)
assert load_balancer is None
@@ -1369,7 +1369,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.with_raw_response.delete(
+ response = await async_client.gpu_droplets.load_balancers.with_raw_response.delete(
"lb_id",
)
@@ -1381,7 +1381,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.with_streaming_response.delete(
+ async with async_client.gpu_droplets.load_balancers.with_streaming_response.delete(
"lb_id",
) as response:
assert not response.is_closed
@@ -1396,14 +1396,14 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.with_raw_response.delete(
+ await async_client.gpu_droplets.load_balancers.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_delete_cache(self, async_client: AsyncGradientAI) -> None:
- load_balancer = await async_client.load_balancers.delete_cache(
+ load_balancer = await async_client.gpu_droplets.load_balancers.delete_cache(
"lb_id",
)
assert load_balancer is None
@@ -1411,7 +1411,7 @@ async def test_method_delete_cache(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete_cache(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.load_balancers.with_raw_response.delete_cache(
+ response = await async_client.gpu_droplets.load_balancers.with_raw_response.delete_cache(
"lb_id",
)
@@ -1423,7 +1423,7 @@ async def test_raw_response_delete_cache(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete_cache(self, async_client: AsyncGradientAI) -> None:
- async with async_client.load_balancers.with_streaming_response.delete_cache(
+ async with async_client.gpu_droplets.load_balancers.with_streaming_response.delete_cache(
"lb_id",
) as response:
assert not response.is_closed
@@ -1438,6 +1438,6 @@ async def test_streaming_response_delete_cache(self, async_client: AsyncGradient
@parametrize
async def test_path_params_delete_cache(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"):
- await async_client.load_balancers.with_raw_response.delete_cache(
+ await async_client.gpu_droplets.load_balancers.with_raw_response.delete_cache(
"",
)
diff --git a/tests/api_resources/test_sizes.py b/tests/api_resources/gpu_droplets/test_sizes.py
similarity index 83%
rename from tests/api_resources/test_sizes.py
rename to tests/api_resources/gpu_droplets/test_sizes.py
index 4e73485e..1ff11cd7 100644
--- a/tests/api_resources/test_sizes.py
+++ b/tests/api_resources/gpu_droplets/test_sizes.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types import SizeListResponse
+from do_gradientai.types.gpu_droplets import SizeListResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -20,13 +20,13 @@ class TestSizes:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- size = client.sizes.list()
+ size = client.gpu_droplets.sizes.list()
assert_matches_type(SizeListResponse, size, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- size = client.sizes.list(
+ size = client.gpu_droplets.sizes.list(
page=1,
per_page=1,
)
@@ -35,7 +35,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.sizes.with_raw_response.list()
+ response = client.gpu_droplets.sizes.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -45,7 +45,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.sizes.with_streaming_response.list() as response:
+ with client.gpu_droplets.sizes.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -63,13 +63,13 @@ class TestAsyncSizes:
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- size = await async_client.sizes.list()
+ size = await async_client.gpu_droplets.sizes.list()
assert_matches_type(SizeListResponse, size, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- size = await async_client.sizes.list(
+ size = await async_client.gpu_droplets.sizes.list(
page=1,
per_page=1,
)
@@ -78,7 +78,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.sizes.with_raw_response.list()
+ response = await async_client.gpu_droplets.sizes.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -88,7 +88,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.sizes.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.sizes.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
diff --git a/tests/api_resources/test_snapshots.py b/tests/api_resources/gpu_droplets/test_snapshots.py
similarity index 82%
rename from tests/api_resources/test_snapshots.py
rename to tests/api_resources/gpu_droplets/test_snapshots.py
index 9910158a..413dd993 100644
--- a/tests/api_resources/test_snapshots.py
+++ b/tests/api_resources/gpu_droplets/test_snapshots.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types import SnapshotListResponse, SnapshotRetrieveResponse
+from do_gradientai.types.gpu_droplets import SnapshotListResponse, SnapshotRetrieveResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -20,7 +20,7 @@ class TestSnapshots:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- snapshot = client.snapshots.retrieve(
+ snapshot = client.gpu_droplets.snapshots.retrieve(
6372321,
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
@@ -28,7 +28,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.snapshots.with_raw_response.retrieve(
+ response = client.gpu_droplets.snapshots.with_raw_response.retrieve(
6372321,
)
@@ -40,7 +40,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.snapshots.with_streaming_response.retrieve(
+ with client.gpu_droplets.snapshots.with_streaming_response.retrieve(
6372321,
) as response:
assert not response.is_closed
@@ -54,13 +54,13 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- snapshot = client.snapshots.list()
+ snapshot = client.gpu_droplets.snapshots.list()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- snapshot = client.snapshots.list(
+ snapshot = client.gpu_droplets.snapshots.list(
page=1,
per_page=1,
resource_type="droplet",
@@ -70,7 +70,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.snapshots.with_raw_response.list()
+ response = client.gpu_droplets.snapshots.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -80,7 +80,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.snapshots.with_streaming_response.list() as response:
+ with client.gpu_droplets.snapshots.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -92,7 +92,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- snapshot = client.snapshots.delete(
+ snapshot = client.gpu_droplets.snapshots.delete(
6372321,
)
assert snapshot is None
@@ -100,7 +100,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.snapshots.with_raw_response.delete(
+ response = client.gpu_droplets.snapshots.with_raw_response.delete(
6372321,
)
@@ -112,7 +112,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.snapshots.with_streaming_response.delete(
+ with client.gpu_droplets.snapshots.with_streaming_response.delete(
6372321,
) as response:
assert not response.is_closed
@@ -132,7 +132,7 @@ class TestAsyncSnapshots:
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.snapshots.retrieve(
+ snapshot = await async_client.gpu_droplets.snapshots.retrieve(
6372321,
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
@@ -140,7 +140,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.snapshots.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.snapshots.with_raw_response.retrieve(
6372321,
)
@@ -152,7 +152,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.snapshots.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.snapshots.with_streaming_response.retrieve(
6372321,
) as response:
assert not response.is_closed
@@ -166,13 +166,13 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.snapshots.list()
+ snapshot = await async_client.gpu_droplets.snapshots.list()
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.snapshots.list(
+ snapshot = await async_client.gpu_droplets.snapshots.list(
page=1,
per_page=1,
resource_type="droplet",
@@ -182,7 +182,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.snapshots.with_raw_response.list()
+ response = await async_client.gpu_droplets.snapshots.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -192,7 +192,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.snapshots.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.snapshots.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -204,7 +204,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.snapshots.delete(
+ snapshot = await async_client.gpu_droplets.snapshots.delete(
6372321,
)
assert snapshot is None
@@ -212,7 +212,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.snapshots.with_raw_response.delete(
+ response = await async_client.gpu_droplets.snapshots.with_raw_response.delete(
6372321,
)
@@ -224,7 +224,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.snapshots.with_streaming_response.delete(
+ async with async_client.gpu_droplets.snapshots.with_streaming_response.delete(
6372321,
) as response:
assert not response.is_closed
diff --git a/tests/api_resources/test_volumes.py b/tests/api_resources/gpu_droplets/test_volumes.py
similarity index 83%
rename from tests/api_resources/test_volumes.py
rename to tests/api_resources/gpu_droplets/test_volumes.py
index 38d9cd91..baf6b430 100644
--- a/tests/api_resources/test_volumes.py
+++ b/tests/api_resources/gpu_droplets/test_volumes.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types import (
+from do_gradientai.types.gpu_droplets import (
VolumeListResponse,
VolumeCreateResponse,
VolumeRetrieveResponse,
@@ -24,7 +24,7 @@ class TestVolumes:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_1(self, client: GradientAI) -> None:
- volume = client.volumes.create(
+ volume = client.gpu_droplets.volumes.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -34,7 +34,7 @@ def test_method_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> None:
- volume = client.volumes.create(
+ volume = client.gpu_droplets.volumes.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -49,7 +49,7 @@ def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
- response = client.volumes.with_raw_response.create(
+ response = client.gpu_droplets.volumes.with_raw_response.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -63,7 +63,7 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
- with client.volumes.with_streaming_response.create(
+ with client.gpu_droplets.volumes.with_streaming_response.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -79,7 +79,7 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_overload_2(self, client: GradientAI) -> None:
- volume = client.volumes.create(
+ volume = client.gpu_droplets.volumes.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -89,7 +89,7 @@ def test_method_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None:
- volume = client.volumes.create(
+ volume = client.gpu_droplets.volumes.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -104,7 +104,7 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
- response = client.volumes.with_raw_response.create(
+ response = client.gpu_droplets.volumes.with_raw_response.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -118,7 +118,7 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
- with client.volumes.with_streaming_response.create(
+ with client.gpu_droplets.volumes.with_streaming_response.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -134,7 +134,7 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- volume = client.volumes.retrieve(
+ volume = client.gpu_droplets.volumes.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert_matches_type(VolumeRetrieveResponse, volume, path=["response"])
@@ -142,7 +142,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.volumes.with_raw_response.retrieve(
+ response = client.gpu_droplets.volumes.with_raw_response.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
@@ -154,7 +154,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.volumes.with_streaming_response.retrieve(
+ with client.gpu_droplets.volumes.with_streaming_response.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
) as response:
assert not response.is_closed
@@ -169,20 +169,20 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.with_raw_response.retrieve(
+ client.gpu_droplets.volumes.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- volume = client.volumes.list()
+ volume = client.gpu_droplets.volumes.list()
assert_matches_type(VolumeListResponse, volume, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- volume = client.volumes.list(
+ volume = client.gpu_droplets.volumes.list(
name="name",
page=1,
per_page=1,
@@ -193,7 +193,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.volumes.with_raw_response.list()
+ response = client.gpu_droplets.volumes.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -203,7 +203,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.volumes.with_streaming_response.list() as response:
+ with client.gpu_droplets.volumes.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -215,7 +215,7 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- volume = client.volumes.delete(
+ volume = client.gpu_droplets.volumes.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert volume is None
@@ -223,7 +223,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.volumes.with_raw_response.delete(
+ response = client.gpu_droplets.volumes.with_raw_response.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
@@ -235,7 +235,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.volumes.with_streaming_response.delete(
+ with client.gpu_droplets.volumes.with_streaming_response.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
) as response:
assert not response.is_closed
@@ -250,20 +250,20 @@ def test_streaming_response_delete(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.with_raw_response.delete(
+ client.gpu_droplets.volumes.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_delete_by_name(self, client: GradientAI) -> None:
- volume = client.volumes.delete_by_name()
+ volume = client.gpu_droplets.volumes.delete_by_name()
assert volume is None
@pytest.mark.skip()
@parametrize
def test_method_delete_by_name_with_all_params(self, client: GradientAI) -> None:
- volume = client.volumes.delete_by_name(
+ volume = client.gpu_droplets.volumes.delete_by_name(
name="name",
region="nyc3",
)
@@ -272,7 +272,7 @@ def test_method_delete_by_name_with_all_params(self, client: GradientAI) -> None
@pytest.mark.skip()
@parametrize
def test_raw_response_delete_by_name(self, client: GradientAI) -> None:
- response = client.volumes.with_raw_response.delete_by_name()
+ response = client.gpu_droplets.volumes.with_raw_response.delete_by_name()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -282,7 +282,7 @@ def test_raw_response_delete_by_name(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete_by_name(self, client: GradientAI) -> None:
- with client.volumes.with_streaming_response.delete_by_name() as response:
+ with client.gpu_droplets.volumes.with_streaming_response.delete_by_name() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -300,7 +300,7 @@ class TestAsyncVolumes:
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.create(
+ volume = await async_client.gpu_droplets.volumes.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -310,7 +310,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.create(
+ volume = await async_client.gpu_droplets.volumes.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -325,7 +325,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.with_raw_response.create(
+ response = await async_client.gpu_droplets.volumes.with_raw_response.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -339,7 +339,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.with_streaming_response.create(
+ async with async_client.gpu_droplets.volumes.with_streaming_response.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -355,7 +355,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.create(
+ volume = await async_client.gpu_droplets.volumes.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -365,7 +365,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradientAI) ->
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.create(
+ volume = await async_client.gpu_droplets.volumes.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -380,7 +380,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.with_raw_response.create(
+ response = await async_client.gpu_droplets.volumes.with_raw_response.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -394,7 +394,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.with_streaming_response.create(
+ async with async_client.gpu_droplets.volumes.with_streaming_response.create(
name="example",
region="nyc3",
size_gigabytes=10,
@@ -410,7 +410,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.retrieve(
+ volume = await async_client.gpu_droplets.volumes.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert_matches_type(VolumeRetrieveResponse, volume, path=["response"])
@@ -418,7 +418,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.volumes.with_raw_response.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
@@ -430,7 +430,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.volumes.with_streaming_response.retrieve(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
) as response:
assert not response.is_closed
@@ -445,20 +445,20 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.with_raw_response.retrieve(
+ await async_client.gpu_droplets.volumes.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.list()
+ volume = await async_client.gpu_droplets.volumes.list()
assert_matches_type(VolumeListResponse, volume, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.list(
+ volume = await async_client.gpu_droplets.volumes.list(
name="name",
page=1,
per_page=1,
@@ -469,7 +469,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.with_raw_response.list()
+ response = await async_client.gpu_droplets.volumes.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -479,7 +479,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.volumes.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -491,7 +491,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.delete(
+ volume = await async_client.gpu_droplets.volumes.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
assert volume is None
@@ -499,7 +499,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.with_raw_response.delete(
+ response = await async_client.gpu_droplets.volumes.with_raw_response.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
)
@@ -511,7 +511,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.with_streaming_response.delete(
+ async with async_client.gpu_droplets.volumes.with_streaming_response.delete(
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
) as response:
assert not response.is_closed
@@ -526,20 +526,20 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.with_raw_response.delete(
+ await async_client.gpu_droplets.volumes.with_raw_response.delete(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_delete_by_name(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.delete_by_name()
+ volume = await async_client.gpu_droplets.volumes.delete_by_name()
assert volume is None
@pytest.mark.skip()
@parametrize
async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGradientAI) -> None:
- volume = await async_client.volumes.delete_by_name(
+ volume = await async_client.gpu_droplets.volumes.delete_by_name(
name="name",
region="nyc3",
)
@@ -548,7 +548,7 @@ async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGr
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete_by_name(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.with_raw_response.delete_by_name()
+ response = await async_client.gpu_droplets.volumes.with_raw_response.delete_by_name()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
@@ -558,7 +558,7 @@ async def test_raw_response_delete_by_name(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete_by_name(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.with_streaming_response.delete_by_name() as response:
+ async with async_client.gpu_droplets.volumes.with_streaming_response.delete_by_name() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
diff --git a/tests/api_resources/volumes/__init__.py b/tests/api_resources/gpu_droplets/volumes/__init__.py
similarity index 100%
rename from tests/api_resources/volumes/__init__.py
rename to tests/api_resources/gpu_droplets/volumes/__init__.py
diff --git a/tests/api_resources/volumes/test_actions.py b/tests/api_resources/gpu_droplets/volumes/test_actions.py
similarity index 83%
rename from tests/api_resources/volumes/test_actions.py
rename to tests/api_resources/gpu_droplets/volumes/test_actions.py
index 5b9c2786..40d9b4eb 100644
--- a/tests/api_resources/volumes/test_actions.py
+++ b/tests/api_resources/gpu_droplets/volumes/test_actions.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.volumes import (
+from do_gradientai.types.gpu_droplets.volumes import (
ActionListResponse,
ActionRetrieveResponse,
ActionInitiateByIDResponse,
@@ -25,7 +25,7 @@ class TestActions:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- action = client.volumes.actions.retrieve(
+ action = client.gpu_droplets.volumes.actions.retrieve(
action_id=36804636,
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
@@ -34,7 +34,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve_with_all_params(self, client: GradientAI) -> None:
- action = client.volumes.actions.retrieve(
+ action = client.gpu_droplets.volumes.actions.retrieve(
action_id=36804636,
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
page=1,
@@ -45,7 +45,7 @@ def test_method_retrieve_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.volumes.actions.with_raw_response.retrieve(
+ response = client.gpu_droplets.volumes.actions.with_raw_response.retrieve(
action_id=36804636,
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
@@ -58,7 +58,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.volumes.actions.with_streaming_response.retrieve(
+ with client.gpu_droplets.volumes.actions.with_streaming_response.retrieve(
action_id=36804636,
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
) as response:
@@ -74,7 +74,7 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.actions.with_raw_response.retrieve(
+ client.gpu_droplets.volumes.actions.with_raw_response.retrieve(
action_id=36804636,
volume_id="",
)
@@ -82,7 +82,7 @@ def test_path_params_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- action = client.volumes.actions.list(
+ action = client.gpu_droplets.volumes.actions.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
assert_matches_type(ActionListResponse, action, path=["response"])
@@ -90,7 +90,7 @@ def test_method_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- action = client.volumes.actions.list(
+ action = client.gpu_droplets.volumes.actions.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
page=1,
per_page=1,
@@ -100,7 +100,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.volumes.actions.with_raw_response.list(
+ response = client.gpu_droplets.volumes.actions.with_raw_response.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
@@ -112,7 +112,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.volumes.actions.with_streaming_response.list(
+ with client.gpu_droplets.volumes.actions.with_streaming_response.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
) as response:
assert not response.is_closed
@@ -127,14 +127,14 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@parametrize
def test_path_params_list(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.actions.with_raw_response.list(
+ client.gpu_droplets.volumes.actions.with_raw_response.list(
volume_id="",
)
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_id_overload_1(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_id(
+ action = client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -144,7 +144,7 @@ def test_method_initiate_by_id_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_id_with_all_params_overload_1(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_id(
+ action = client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -158,7 +158,7 @@ def test_method_initiate_by_id_with_all_params_overload_1(self, client: Gradient
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_by_id_overload_1(self, client: GradientAI) -> None:
- response = client.volumes.actions.with_raw_response.initiate_by_id(
+ response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -172,7 +172,7 @@ def test_raw_response_initiate_by_id_overload_1(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_by_id_overload_1(self, client: GradientAI) -> None:
- with client.volumes.actions.with_streaming_response.initiate_by_id(
+ with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -189,7 +189,7 @@ def test_streaming_response_initiate_by_id_overload_1(self, client: GradientAI)
@parametrize
def test_path_params_initiate_by_id_overload_1(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.actions.with_raw_response.initiate_by_id(
+ client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="",
droplet_id=11612190,
type="attach",
@@ -198,7 +198,7 @@ def test_path_params_initiate_by_id_overload_1(self, client: GradientAI) -> None
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_id_overload_2(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_id(
+ action = client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -208,7 +208,7 @@ def test_method_initiate_by_id_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_id_with_all_params_overload_2(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_id(
+ action = client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -221,7 +221,7 @@ def test_method_initiate_by_id_with_all_params_overload_2(self, client: Gradient
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_by_id_overload_2(self, client: GradientAI) -> None:
- response = client.volumes.actions.with_raw_response.initiate_by_id(
+ response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -235,7 +235,7 @@ def test_raw_response_initiate_by_id_overload_2(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_by_id_overload_2(self, client: GradientAI) -> None:
- with client.volumes.actions.with_streaming_response.initiate_by_id(
+ with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -252,7 +252,7 @@ def test_streaming_response_initiate_by_id_overload_2(self, client: GradientAI)
@parametrize
def test_path_params_initiate_by_id_overload_2(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.actions.with_raw_response.initiate_by_id(
+ client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="",
droplet_id=11612190,
type="attach",
@@ -261,7 +261,7 @@ def test_path_params_initiate_by_id_overload_2(self, client: GradientAI) -> None
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_id_overload_3(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_id(
+ action = client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
size_gigabytes=16384,
type="attach",
@@ -271,7 +271,7 @@ def test_method_initiate_by_id_overload_3(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_id_with_all_params_overload_3(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_id(
+ action = client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
size_gigabytes=16384,
type="attach",
@@ -284,7 +284,7 @@ def test_method_initiate_by_id_with_all_params_overload_3(self, client: Gradient
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_by_id_overload_3(self, client: GradientAI) -> None:
- response = client.volumes.actions.with_raw_response.initiate_by_id(
+ response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
size_gigabytes=16384,
type="attach",
@@ -298,7 +298,7 @@ def test_raw_response_initiate_by_id_overload_3(self, client: GradientAI) -> Non
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_by_id_overload_3(self, client: GradientAI) -> None:
- with client.volumes.actions.with_streaming_response.initiate_by_id(
+ with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
size_gigabytes=16384,
type="attach",
@@ -315,7 +315,7 @@ def test_streaming_response_initiate_by_id_overload_3(self, client: GradientAI)
@parametrize
def test_path_params_initiate_by_id_overload_3(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.actions.with_raw_response.initiate_by_id(
+ client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="",
size_gigabytes=16384,
type="attach",
@@ -324,7 +324,7 @@ def test_path_params_initiate_by_id_overload_3(self, client: GradientAI) -> None
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_name_overload_1(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_name(
+ action = client.gpu_droplets.volumes.actions.initiate_by_name(
droplet_id=11612190,
type="attach",
)
@@ -333,7 +333,7 @@ def test_method_initiate_by_name_overload_1(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_name_with_all_params_overload_1(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_name(
+ action = client.gpu_droplets.volumes.actions.initiate_by_name(
droplet_id=11612190,
type="attach",
page=1,
@@ -346,7 +346,7 @@ def test_method_initiate_by_name_with_all_params_overload_1(self, client: Gradie
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_by_name_overload_1(self, client: GradientAI) -> None:
- response = client.volumes.actions.with_raw_response.initiate_by_name(
+ response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name(
droplet_id=11612190,
type="attach",
)
@@ -359,7 +359,7 @@ def test_raw_response_initiate_by_name_overload_1(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_by_name_overload_1(self, client: GradientAI) -> None:
- with client.volumes.actions.with_streaming_response.initiate_by_name(
+ with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name(
droplet_id=11612190,
type="attach",
) as response:
@@ -374,7 +374,7 @@ def test_streaming_response_initiate_by_name_overload_1(self, client: GradientAI
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_name_overload_2(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_name(
+ action = client.gpu_droplets.volumes.actions.initiate_by_name(
droplet_id=11612190,
type="attach",
)
@@ -383,7 +383,7 @@ def test_method_initiate_by_name_overload_2(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_initiate_by_name_with_all_params_overload_2(self, client: GradientAI) -> None:
- action = client.volumes.actions.initiate_by_name(
+ action = client.gpu_droplets.volumes.actions.initiate_by_name(
droplet_id=11612190,
type="attach",
page=1,
@@ -395,7 +395,7 @@ def test_method_initiate_by_name_with_all_params_overload_2(self, client: Gradie
@pytest.mark.skip()
@parametrize
def test_raw_response_initiate_by_name_overload_2(self, client: GradientAI) -> None:
- response = client.volumes.actions.with_raw_response.initiate_by_name(
+ response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name(
droplet_id=11612190,
type="attach",
)
@@ -408,7 +408,7 @@ def test_raw_response_initiate_by_name_overload_2(self, client: GradientAI) -> N
@pytest.mark.skip()
@parametrize
def test_streaming_response_initiate_by_name_overload_2(self, client: GradientAI) -> None:
- with client.volumes.actions.with_streaming_response.initiate_by_name(
+ with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name(
droplet_id=11612190,
type="attach",
) as response:
@@ -429,7 +429,7 @@ class TestAsyncActions:
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.retrieve(
+ action = await async_client.gpu_droplets.volumes.actions.retrieve(
action_id=36804636,
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
@@ -438,7 +438,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_retrieve_with_all_params(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.retrieve(
+ action = await async_client.gpu_droplets.volumes.actions.retrieve(
action_id=36804636,
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
page=1,
@@ -449,7 +449,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.actions.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.volumes.actions.with_raw_response.retrieve(
action_id=36804636,
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
@@ -462,7 +462,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.actions.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.volumes.actions.with_streaming_response.retrieve(
action_id=36804636,
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
) as response:
@@ -478,7 +478,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.actions.with_raw_response.retrieve(
+ await async_client.gpu_droplets.volumes.actions.with_raw_response.retrieve(
action_id=36804636,
volume_id="",
)
@@ -486,7 +486,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.list(
+ action = await async_client.gpu_droplets.volumes.actions.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
assert_matches_type(ActionListResponse, action, path=["response"])
@@ -494,7 +494,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.list(
+ action = await async_client.gpu_droplets.volumes.actions.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
page=1,
per_page=1,
@@ -504,7 +504,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.actions.with_raw_response.list(
+ response = await async_client.gpu_droplets.volumes.actions.with_raw_response.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
@@ -516,7 +516,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.actions.with_streaming_response.list(
+ async with async_client.gpu_droplets.volumes.actions.with_streaming_response.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
) as response:
assert not response.is_closed
@@ -531,14 +531,14 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@parametrize
async def test_path_params_list(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.actions.with_raw_response.list(
+ await async_client.gpu_droplets.volumes.actions.with_raw_response.list(
volume_id="",
)
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_id(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -548,7 +548,7 @@ async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_id_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_id(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -562,7 +562,7 @@ async def test_method_initiate_by_id_with_all_params_overload_1(self, async_clie
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.actions.with_raw_response.initiate_by_id(
+ response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -576,7 +576,7 @@ async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_by_id_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.actions.with_streaming_response.initiate_by_id(
+ async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -593,7 +593,7 @@ async def test_streaming_response_initiate_by_id_overload_1(self, async_client:
@parametrize
async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.actions.with_raw_response.initiate_by_id(
+ await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="",
droplet_id=11612190,
type="attach",
@@ -602,7 +602,7 @@ async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGr
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_id(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -612,7 +612,7 @@ async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_id_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_id(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -625,7 +625,7 @@ async def test_method_initiate_by_id_with_all_params_overload_2(self, async_clie
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.actions.with_raw_response.initiate_by_id(
+ response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -639,7 +639,7 @@ async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_by_id_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.actions.with_streaming_response.initiate_by_id(
+ async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
droplet_id=11612190,
type="attach",
@@ -656,7 +656,7 @@ async def test_streaming_response_initiate_by_id_overload_2(self, async_client:
@parametrize
async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.actions.with_raw_response.initiate_by_id(
+ await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="",
droplet_id=11612190,
type="attach",
@@ -665,7 +665,7 @@ async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGr
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_id(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
size_gigabytes=16384,
type="attach",
@@ -675,7 +675,7 @@ async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradien
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_id_with_all_params_overload_3(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_id(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
size_gigabytes=16384,
type="attach",
@@ -688,7 +688,7 @@ async def test_method_initiate_by_id_with_all_params_overload_3(self, async_clie
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.actions.with_raw_response.initiate_by_id(
+ response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
size_gigabytes=16384,
type="attach",
@@ -702,7 +702,7 @@ async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncG
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_by_id_overload_3(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.actions.with_streaming_response.initiate_by_id(
+ async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
size_gigabytes=16384,
type="attach",
@@ -719,7 +719,7 @@ async def test_streaming_response_initiate_by_id_overload_3(self, async_client:
@parametrize
async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.actions.with_raw_response.initiate_by_id(
+ await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id(
volume_id="",
size_gigabytes=16384,
type="attach",
@@ -728,7 +728,7 @@ async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGr
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_name(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_name(
droplet_id=11612190,
type="attach",
)
@@ -737,7 +737,7 @@ async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradi
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_name_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_name(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_name(
droplet_id=11612190,
type="attach",
page=1,
@@ -750,7 +750,7 @@ async def test_method_initiate_by_name_with_all_params_overload_1(self, async_cl
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_by_name_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.actions.with_raw_response.initiate_by_name(
+ response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name(
droplet_id=11612190,
type="attach",
)
@@ -763,7 +763,7 @@ async def test_raw_response_initiate_by_name_overload_1(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_by_name_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.actions.with_streaming_response.initiate_by_name(
+ async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name(
droplet_id=11612190,
type="attach",
) as response:
@@ -778,7 +778,7 @@ async def test_streaming_response_initiate_by_name_overload_1(self, async_client
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_name(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_name(
droplet_id=11612190,
type="attach",
)
@@ -787,7 +787,7 @@ async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradi
@pytest.mark.skip()
@parametrize
async def test_method_initiate_by_name_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- action = await async_client.volumes.actions.initiate_by_name(
+ action = await async_client.gpu_droplets.volumes.actions.initiate_by_name(
droplet_id=11612190,
type="attach",
page=1,
@@ -799,7 +799,7 @@ async def test_method_initiate_by_name_with_all_params_overload_2(self, async_cl
@pytest.mark.skip()
@parametrize
async def test_raw_response_initiate_by_name_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.actions.with_raw_response.initiate_by_name(
+ response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name(
droplet_id=11612190,
type="attach",
)
@@ -812,7 +812,7 @@ async def test_raw_response_initiate_by_name_overload_2(self, async_client: Asyn
@pytest.mark.skip()
@parametrize
async def test_streaming_response_initiate_by_name_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.actions.with_streaming_response.initiate_by_name(
+ async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name(
droplet_id=11612190,
type="attach",
) as response:
diff --git a/tests/api_resources/volumes/test_snapshots.py b/tests/api_resources/gpu_droplets/volumes/test_snapshots.py
similarity index 82%
rename from tests/api_resources/volumes/test_snapshots.py
rename to tests/api_resources/gpu_droplets/volumes/test_snapshots.py
index dccca462..4884d372 100644
--- a/tests/api_resources/volumes/test_snapshots.py
+++ b/tests/api_resources/gpu_droplets/volumes/test_snapshots.py
@@ -9,7 +9,7 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types.volumes import (
+from do_gradientai.types.gpu_droplets.volumes import (
SnapshotListResponse,
SnapshotCreateResponse,
SnapshotRetrieveResponse,
@@ -24,7 +24,7 @@ class TestSnapshots:
@pytest.mark.skip()
@parametrize
def test_method_create(self, client: GradientAI) -> None:
- snapshot = client.volumes.snapshots.create(
+ snapshot = client.gpu_droplets.volumes.snapshots.create(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
name="big-data-snapshot1475261774",
)
@@ -33,7 +33,7 @@ def test_method_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params(self, client: GradientAI) -> None:
- snapshot = client.volumes.snapshots.create(
+ snapshot = client.gpu_droplets.volumes.snapshots.create(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
name="big-data-snapshot1475261774",
tags=["base-image", "prod"],
@@ -43,7 +43,7 @@ def test_method_create_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_create(self, client: GradientAI) -> None:
- response = client.volumes.snapshots.with_raw_response.create(
+ response = client.gpu_droplets.volumes.snapshots.with_raw_response.create(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
name="big-data-snapshot1475261774",
)
@@ -56,7 +56,7 @@ def test_raw_response_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_create(self, client: GradientAI) -> None:
- with client.volumes.snapshots.with_streaming_response.create(
+ with client.gpu_droplets.volumes.snapshots.with_streaming_response.create(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
name="big-data-snapshot1475261774",
) as response:
@@ -72,7 +72,7 @@ def test_streaming_response_create(self, client: GradientAI) -> None:
@parametrize
def test_path_params_create(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.snapshots.with_raw_response.create(
+ client.gpu_droplets.volumes.snapshots.with_raw_response.create(
volume_id="",
name="big-data-snapshot1475261774",
)
@@ -80,7 +80,7 @@ def test_path_params_create(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- snapshot = client.volumes.snapshots.retrieve(
+ snapshot = client.gpu_droplets.volumes.snapshots.retrieve(
"snapshot_id",
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
@@ -88,7 +88,7 @@ def test_method_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.volumes.snapshots.with_raw_response.retrieve(
+ response = client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve(
"snapshot_id",
)
@@ -100,7 +100,7 @@ def test_raw_response_retrieve(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.volumes.snapshots.with_streaming_response.retrieve(
+ with client.gpu_droplets.volumes.snapshots.with_streaming_response.retrieve(
"snapshot_id",
) as response:
assert not response.is_closed
@@ -115,14 +115,14 @@ def test_streaming_response_retrieve(self, client: GradientAI) -> None:
@parametrize
def test_path_params_retrieve(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"):
- client.volumes.snapshots.with_raw_response.retrieve(
+ client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- snapshot = client.volumes.snapshots.list(
+ snapshot = client.gpu_droplets.volumes.snapshots.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
@@ -130,7 +130,7 @@ def test_method_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- snapshot = client.volumes.snapshots.list(
+ snapshot = client.gpu_droplets.volumes.snapshots.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
page=1,
per_page=1,
@@ -140,7 +140,7 @@ def test_method_list_with_all_params(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.volumes.snapshots.with_raw_response.list(
+ response = client.gpu_droplets.volumes.snapshots.with_raw_response.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
@@ -152,7 +152,7 @@ def test_raw_response_list(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.volumes.snapshots.with_streaming_response.list(
+ with client.gpu_droplets.volumes.snapshots.with_streaming_response.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
) as response:
assert not response.is_closed
@@ -167,14 +167,14 @@ def test_streaming_response_list(self, client: GradientAI) -> None:
@parametrize
def test_path_params_list(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- client.volumes.snapshots.with_raw_response.list(
+ client.gpu_droplets.volumes.snapshots.with_raw_response.list(
volume_id="",
)
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- snapshot = client.volumes.snapshots.delete(
+ snapshot = client.gpu_droplets.volumes.snapshots.delete(
"snapshot_id",
)
assert snapshot is None
@@ -182,7 +182,7 @@ def test_method_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.volumes.snapshots.with_raw_response.delete(
+ response = client.gpu_droplets.volumes.snapshots.with_raw_response.delete(
"snapshot_id",
)
@@ -194,7 +194,7 @@ def test_raw_response_delete(self, client: GradientAI) -> None:
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.volumes.snapshots.with_streaming_response.delete(
+ with client.gpu_droplets.volumes.snapshots.with_streaming_response.delete(
"snapshot_id",
) as response:
assert not response.is_closed
@@ -209,7 +209,7 @@ def test_streaming_response_delete(self, client: GradientAI) -> None:
@parametrize
def test_path_params_delete(self, client: GradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"):
- client.volumes.snapshots.with_raw_response.delete(
+ client.gpu_droplets.volumes.snapshots.with_raw_response.delete(
"",
)
@@ -222,7 +222,7 @@ class TestAsyncSnapshots:
@pytest.mark.skip()
@parametrize
async def test_method_create(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.volumes.snapshots.create(
+ snapshot = await async_client.gpu_droplets.volumes.snapshots.create(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
name="big-data-snapshot1475261774",
)
@@ -231,7 +231,7 @@ async def test_method_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.volumes.snapshots.create(
+ snapshot = await async_client.gpu_droplets.volumes.snapshots.create(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
name="big-data-snapshot1475261774",
tags=["base-image", "prod"],
@@ -241,7 +241,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradientAI
@pytest.mark.skip()
@parametrize
async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.snapshots.with_raw_response.create(
+ response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.create(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
name="big-data-snapshot1475261774",
)
@@ -254,7 +254,7 @@ async def test_raw_response_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.snapshots.with_streaming_response.create(
+ async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.create(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
name="big-data-snapshot1475261774",
) as response:
@@ -270,7 +270,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_create(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.snapshots.with_raw_response.create(
+ await async_client.gpu_droplets.volumes.snapshots.with_raw_response.create(
volume_id="",
name="big-data-snapshot1475261774",
)
@@ -278,7 +278,7 @@ async def test_path_params_create(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.volumes.snapshots.retrieve(
+ snapshot = await async_client.gpu_droplets.volumes.snapshots.retrieve(
"snapshot_id",
)
assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"])
@@ -286,7 +286,7 @@ async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.snapshots.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve(
"snapshot_id",
)
@@ -298,7 +298,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> Non
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.snapshots.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.retrieve(
"snapshot_id",
) as response:
assert not response.is_closed
@@ -313,14 +313,14 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI)
@parametrize
async def test_path_params_retrieve(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"):
- await async_client.volumes.snapshots.with_raw_response.retrieve(
+ await async_client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve(
"",
)
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.volumes.snapshots.list(
+ snapshot = await async_client.gpu_droplets.volumes.snapshots.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
assert_matches_type(SnapshotListResponse, snapshot, path=["response"])
@@ -328,7 +328,7 @@ async def test_method_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.volumes.snapshots.list(
+ snapshot = await async_client.gpu_droplets.volumes.snapshots.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
page=1,
per_page=1,
@@ -338,7 +338,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradientAI)
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.snapshots.with_raw_response.list(
+ response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
)
@@ -350,7 +350,7 @@ async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.snapshots.with_streaming_response.list(
+ async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.list(
volume_id="7724db7c-e098-11e5-b522-000f53304e51",
) as response:
assert not response.is_closed
@@ -365,14 +365,14 @@ async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> N
@parametrize
async def test_path_params_list(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"):
- await async_client.volumes.snapshots.with_raw_response.list(
+ await async_client.gpu_droplets.volumes.snapshots.with_raw_response.list(
volume_id="",
)
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- snapshot = await async_client.volumes.snapshots.delete(
+ snapshot = await async_client.gpu_droplets.volumes.snapshots.delete(
"snapshot_id",
)
assert snapshot is None
@@ -380,7 +380,7 @@ async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.volumes.snapshots.with_raw_response.delete(
+ response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.delete(
"snapshot_id",
)
@@ -392,7 +392,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.volumes.snapshots.with_streaming_response.delete(
+ async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.delete(
"snapshot_id",
) as response:
assert not response.is_closed
@@ -407,6 +407,6 @@ async def test_streaming_response_delete(self, async_client: AsyncGradientAI) ->
@parametrize
async def test_path_params_delete(self, async_client: AsyncGradientAI) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"):
- await async_client.volumes.snapshots.with_raw_response.delete(
+ await async_client.gpu_droplets.volumes.snapshots.with_raw_response.delete(
"",
)
diff --git a/tests/api_resources/test_account.py b/tests/api_resources/test_account.py
deleted file mode 100644
index f2dd39bf..00000000
--- a/tests/api_resources/test_account.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from __future__ import annotations
-
-import os
-from typing import Any, cast
-
-import pytest
-
-from tests.utils import assert_matches_type
-from do_gradientai import GradientAI, AsyncGradientAI
-from do_gradientai.types import AccountRetrieveResponse
-
-base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-
-
-class TestAccount:
- parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
-
- @pytest.mark.skip()
- @parametrize
- def test_method_retrieve(self, client: GradientAI) -> None:
- account = client.account.retrieve()
- assert_matches_type(AccountRetrieveResponse, account, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.account.with_raw_response.retrieve()
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- account = response.parse()
- assert_matches_type(AccountRetrieveResponse, account, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.account.with_streaming_response.retrieve() as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- account = response.parse()
- assert_matches_type(AccountRetrieveResponse, account, path=["response"])
-
- assert cast(Any, response.is_closed) is True
-
-
-class TestAsyncAccount:
- parametrize = pytest.mark.parametrize(
- "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
- )
-
- @pytest.mark.skip()
- @parametrize
- async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- account = await async_client.account.retrieve()
- assert_matches_type(AccountRetrieveResponse, account, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.account.with_raw_response.retrieve()
-
- assert response.is_closed is True
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- account = await response.parse()
- assert_matches_type(AccountRetrieveResponse, account, path=["response"])
-
- @pytest.mark.skip()
- @parametrize
- async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.account.with_streaming_response.retrieve() as response:
- assert not response.is_closed
- assert response.http_request.headers.get("X-Stainless-Lang") == "python"
-
- account = await response.parse()
- assert_matches_type(AccountRetrieveResponse, account, path=["response"])
-
- assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_droplets.py b/tests/api_resources/test_gpu_droplets.py
similarity index 65%
rename from tests/api_resources/test_droplets.py
rename to tests/api_resources/test_gpu_droplets.py
index e77cded1..cbc7e63b 100644
--- a/tests/api_resources/test_droplets.py
+++ b/tests/api_resources/test_gpu_droplets.py
@@ -10,35 +10,35 @@
from tests.utils import assert_matches_type
from do_gradientai import GradientAI, AsyncGradientAI
from do_gradientai.types import (
- DropletListResponse,
- DropletCreateResponse,
- DropletRetrieveResponse,
- DropletListKernelsResponse,
- DropletListFirewallsResponse,
- DropletListNeighborsResponse,
- DropletListSnapshotsResponse,
+ GPUDropletListResponse,
+ GPUDropletCreateResponse,
+ GPUDropletRetrieveResponse,
+ GPUDropletListKernelsResponse,
+ GPUDropletListFirewallsResponse,
+ GPUDropletListNeighborsResponse,
+ GPUDropletListSnapshotsResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
-class TestDroplets:
+class TestGPUDroplets:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@pytest.mark.skip()
@parametrize
def test_method_create_overload_1(self, client: GradientAI) -> None:
- droplet = client.droplets.create(
+ gpu_droplet = client.gpu_droplets.create(
image="ubuntu-20-04-x64",
name="example.com",
size="s-1vcpu-1gb",
)
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> None:
- droplet = client.droplets.create(
+ gpu_droplet = client.gpu_droplets.create(
image="ubuntu-20-04-x64",
name="example.com",
size="s-1vcpu-1gb",
@@ -59,12 +59,12 @@ def test_method_create_with_all_params_overload_1(self, client: GradientAI) -> N
vpc_uuid="760e09ef-dc84-11e8-981e-3cfdfeaae000",
with_droplet_agent=True,
)
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.create(
+ response = client.gpu_droplets.with_raw_response.create(
image="ubuntu-20-04-x64",
name="example.com",
size="s-1vcpu-1gb",
@@ -72,13 +72,13 @@ def test_raw_response_create_overload_1(self, client: GradientAI) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.create(
+ with client.gpu_droplets.with_streaming_response.create(
image="ubuntu-20-04-x64",
name="example.com",
size="s-1vcpu-1gb",
@@ -86,25 +86,25 @@ def test_streaming_response_create_overload_1(self, client: GradientAI) -> None:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_create_overload_2(self, client: GradientAI) -> None:
- droplet = client.droplets.create(
+ gpu_droplet = client.gpu_droplets.create(
image="ubuntu-20-04-x64",
names=["sub-01.example.com", "sub-02.example.com"],
size="s-1vcpu-1gb",
)
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> None:
- droplet = client.droplets.create(
+ gpu_droplet = client.gpu_droplets.create(
image="ubuntu-20-04-x64",
names=["sub-01.example.com", "sub-02.example.com"],
size="s-1vcpu-1gb",
@@ -125,12 +125,12 @@ def test_method_create_with_all_params_overload_2(self, client: GradientAI) -> N
vpc_uuid="760e09ef-dc84-11e8-981e-3cfdfeaae000",
with_droplet_agent=True,
)
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.create(
+ response = client.gpu_droplets.with_raw_response.create(
image="ubuntu-20-04-x64",
names=["sub-01.example.com", "sub-02.example.com"],
size="s-1vcpu-1gb",
@@ -138,13 +138,13 @@ def test_raw_response_create_overload_2(self, client: GradientAI) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.create(
+ with client.gpu_droplets.with_streaming_response.create(
image="ubuntu-20-04-x64",
names=["sub-01.example.com", "sub-02.example.com"],
size="s-1vcpu-1gb",
@@ -152,321 +152,321 @@ def test_streaming_response_create_overload_2(self, client: GradientAI) -> None:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_retrieve(self, client: GradientAI) -> None:
- droplet = client.droplets.retrieve(
+ gpu_droplet = client.gpu_droplets.retrieve(
1,
)
- assert_matches_type(DropletRetrieveResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_retrieve(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.retrieve(
+ response = client.gpu_droplets.with_raw_response.retrieve(
1,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletRetrieveResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_retrieve(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.retrieve(
+ with client.gpu_droplets.with_streaming_response.retrieve(
1,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletRetrieveResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_list(self, client: GradientAI) -> None:
- droplet = client.droplets.list()
- assert_matches_type(DropletListResponse, droplet, path=["response"])
+ gpu_droplet = client.gpu_droplets.list()
+ assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_with_all_params(self, client: GradientAI) -> None:
- droplet = client.droplets.list(
+ gpu_droplet = client.gpu_droplets.list(
name="name",
page=1,
per_page=1,
tag_name="tag_name",
type="droplets",
)
- assert_matches_type(DropletListResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.list()
+ response = client.gpu_droplets.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_list(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.list() as response:
+ with client.gpu_droplets.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_delete(self, client: GradientAI) -> None:
- droplet = client.droplets.delete(
+ gpu_droplet = client.gpu_droplets.delete(
1,
)
- assert droplet is None
+ assert gpu_droplet is None
@pytest.mark.skip()
@parametrize
def test_raw_response_delete(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.delete(
+ response = client.gpu_droplets.with_raw_response.delete(
1,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert droplet is None
+ gpu_droplet = response.parse()
+ assert gpu_droplet is None
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.delete(
+ with client.gpu_droplets.with_streaming_response.delete(
1,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert droplet is None
+ gpu_droplet = response.parse()
+ assert gpu_droplet is None
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_delete_by_tag(self, client: GradientAI) -> None:
- droplet = client.droplets.delete_by_tag(
+ gpu_droplet = client.gpu_droplets.delete_by_tag(
tag_name="tag_name",
)
- assert droplet is None
+ assert gpu_droplet is None
@pytest.mark.skip()
@parametrize
def test_raw_response_delete_by_tag(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.delete_by_tag(
+ response = client.gpu_droplets.with_raw_response.delete_by_tag(
tag_name="tag_name",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert droplet is None
+ gpu_droplet = response.parse()
+ assert gpu_droplet is None
@pytest.mark.skip()
@parametrize
def test_streaming_response_delete_by_tag(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.delete_by_tag(
+ with client.gpu_droplets.with_streaming_response.delete_by_tag(
tag_name="tag_name",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert droplet is None
+ gpu_droplet = response.parse()
+ assert gpu_droplet is None
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_list_firewalls(self, client: GradientAI) -> None:
- droplet = client.droplets.list_firewalls(
+ gpu_droplet = client.gpu_droplets.list_firewalls(
droplet_id=3164444,
)
- assert_matches_type(DropletListFirewallsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_firewalls_with_all_params(self, client: GradientAI) -> None:
- droplet = client.droplets.list_firewalls(
+ gpu_droplet = client.gpu_droplets.list_firewalls(
droplet_id=3164444,
page=1,
per_page=1,
)
- assert_matches_type(DropletListFirewallsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list_firewalls(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.list_firewalls(
+ response = client.gpu_droplets.with_raw_response.list_firewalls(
droplet_id=3164444,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListFirewallsResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_firewalls(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.list_firewalls(
+ with client.gpu_droplets.with_streaming_response.list_firewalls(
droplet_id=3164444,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListFirewallsResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_list_kernels(self, client: GradientAI) -> None:
- droplet = client.droplets.list_kernels(
+ gpu_droplet = client.gpu_droplets.list_kernels(
droplet_id=3164444,
)
- assert_matches_type(DropletListKernelsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_kernels_with_all_params(self, client: GradientAI) -> None:
- droplet = client.droplets.list_kernels(
+ gpu_droplet = client.gpu_droplets.list_kernels(
droplet_id=3164444,
page=1,
per_page=1,
)
- assert_matches_type(DropletListKernelsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list_kernels(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.list_kernels(
+ response = client.gpu_droplets.with_raw_response.list_kernels(
droplet_id=3164444,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListKernelsResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_kernels(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.list_kernels(
+ with client.gpu_droplets.with_streaming_response.list_kernels(
droplet_id=3164444,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListKernelsResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_list_neighbors(self, client: GradientAI) -> None:
- droplet = client.droplets.list_neighbors(
+ gpu_droplet = client.gpu_droplets.list_neighbors(
1,
)
- assert_matches_type(DropletListNeighborsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list_neighbors(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.list_neighbors(
+ response = client.gpu_droplets.with_raw_response.list_neighbors(
1,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListNeighborsResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_neighbors(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.list_neighbors(
+ with client.gpu_droplets.with_streaming_response.list_neighbors(
1,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListNeighborsResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
def test_method_list_snapshots(self, client: GradientAI) -> None:
- droplet = client.droplets.list_snapshots(
+ gpu_droplet = client.gpu_droplets.list_snapshots(
droplet_id=3164444,
)
- assert_matches_type(DropletListSnapshotsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_method_list_snapshots_with_all_params(self, client: GradientAI) -> None:
- droplet = client.droplets.list_snapshots(
+ gpu_droplet = client.gpu_droplets.list_snapshots(
droplet_id=3164444,
page=1,
per_page=1,
)
- assert_matches_type(DropletListSnapshotsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_raw_response_list_snapshots(self, client: GradientAI) -> None:
- response = client.droplets.with_raw_response.list_snapshots(
+ response = client.gpu_droplets.with_raw_response.list_snapshots(
droplet_id=3164444,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListSnapshotsResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
def test_streaming_response_list_snapshots(self, client: GradientAI) -> None:
- with client.droplets.with_streaming_response.list_snapshots(
+ with client.gpu_droplets.with_streaming_response.list_snapshots(
droplet_id=3164444,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = response.parse()
- assert_matches_type(DropletListSnapshotsResponse, droplet, path=["response"])
+ gpu_droplet = response.parse()
+ assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
-class TestAsyncDroplets:
+class TestAsyncGPUDroplets:
parametrize = pytest.mark.parametrize(
"async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"]
)
@@ -474,17 +474,17 @@ class TestAsyncDroplets:
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.create(
+ gpu_droplet = await async_client.gpu_droplets.create(
image="ubuntu-20-04-x64",
name="example.com",
size="s-1vcpu-1gb",
)
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.create(
+ gpu_droplet = await async_client.gpu_droplets.create(
image="ubuntu-20-04-x64",
name="example.com",
size="s-1vcpu-1gb",
@@ -505,12 +505,12 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
vpc_uuid="760e09ef-dc84-11e8-981e-3cfdfeaae000",
with_droplet_agent=True,
)
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.create(
+ response = await async_client.gpu_droplets.with_raw_response.create(
image="ubuntu-20-04-x64",
name="example.com",
size="s-1vcpu-1gb",
@@ -518,13 +518,13 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradientA
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_1(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.create(
+ async with async_client.gpu_droplets.with_streaming_response.create(
image="ubuntu-20-04-x64",
name="example.com",
size="s-1vcpu-1gb",
@@ -532,25 +532,25 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.create(
+ gpu_droplet = await async_client.gpu_droplets.create(
image="ubuntu-20-04-x64",
names=["sub-01.example.com", "sub-02.example.com"],
size="s-1vcpu-1gb",
)
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.create(
+ gpu_droplet = await async_client.gpu_droplets.create(
image="ubuntu-20-04-x64",
names=["sub-01.example.com", "sub-02.example.com"],
size="s-1vcpu-1gb",
@@ -571,12 +571,12 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
vpc_uuid="760e09ef-dc84-11e8-981e-3cfdfeaae000",
with_droplet_agent=True,
)
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.create(
+ response = await async_client.gpu_droplets.with_raw_response.create(
image="ubuntu-20-04-x64",
names=["sub-01.example.com", "sub-02.example.com"],
size="s-1vcpu-1gb",
@@ -584,13 +584,13 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradientA
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_create_overload_2(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.create(
+ async with async_client.gpu_droplets.with_streaming_response.create(
image="ubuntu-20-04-x64",
names=["sub-01.example.com", "sub-02.example.com"],
size="s-1vcpu-1gb",
@@ -598,315 +598,315 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletCreateResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_retrieve(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.retrieve(
+ gpu_droplet = await async_client.gpu_droplets.retrieve(
1,
)
- assert_matches_type(DropletRetrieveResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.retrieve(
+ response = await async_client.gpu_droplets.with_raw_response.retrieve(
1,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletRetrieveResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_retrieve(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.retrieve(
+ async with async_client.gpu_droplets.with_streaming_response.retrieve(
1,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletRetrieveResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_list(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list()
- assert_matches_type(DropletListResponse, droplet, path=["response"])
+ gpu_droplet = await async_client.gpu_droplets.list()
+ assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list(
+ gpu_droplet = await async_client.gpu_droplets.list(
name="name",
page=1,
per_page=1,
tag_name="tag_name",
type="droplets",
)
- assert_matches_type(DropletListResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.list()
+ response = await async_client.gpu_droplets.with_raw_response.list()
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.list() as response:
+ async with async_client.gpu_droplets.with_streaming_response.list() as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_delete(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.delete(
+ gpu_droplet = await async_client.gpu_droplets.delete(
1,
)
- assert droplet is None
+ assert gpu_droplet is None
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.delete(
+ response = await async_client.gpu_droplets.with_raw_response.delete(
1,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert droplet is None
+ gpu_droplet = await response.parse()
+ assert gpu_droplet is None
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.delete(
+ async with async_client.gpu_droplets.with_streaming_response.delete(
1,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert droplet is None
+ gpu_droplet = await response.parse()
+ assert gpu_droplet is None
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_delete_by_tag(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.delete_by_tag(
+ gpu_droplet = await async_client.gpu_droplets.delete_by_tag(
tag_name="tag_name",
)
- assert droplet is None
+ assert gpu_droplet is None
@pytest.mark.skip()
@parametrize
async def test_raw_response_delete_by_tag(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.delete_by_tag(
+ response = await async_client.gpu_droplets.with_raw_response.delete_by_tag(
tag_name="tag_name",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert droplet is None
+ gpu_droplet = await response.parse()
+ assert gpu_droplet is None
@pytest.mark.skip()
@parametrize
async def test_streaming_response_delete_by_tag(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.delete_by_tag(
+ async with async_client.gpu_droplets.with_streaming_response.delete_by_tag(
tag_name="tag_name",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert droplet is None
+ gpu_droplet = await response.parse()
+ assert gpu_droplet is None
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_list_firewalls(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list_firewalls(
+ gpu_droplet = await async_client.gpu_droplets.list_firewalls(
droplet_id=3164444,
)
- assert_matches_type(DropletListFirewallsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_firewalls_with_all_params(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list_firewalls(
+ gpu_droplet = await async_client.gpu_droplets.list_firewalls(
droplet_id=3164444,
page=1,
per_page=1,
)
- assert_matches_type(DropletListFirewallsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_firewalls(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.list_firewalls(
+ response = await async_client.gpu_droplets.with_raw_response.list_firewalls(
droplet_id=3164444,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListFirewallsResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_firewalls(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.list_firewalls(
+ async with async_client.gpu_droplets.with_streaming_response.list_firewalls(
droplet_id=3164444,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListFirewallsResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_list_kernels(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list_kernels(
+ gpu_droplet = await async_client.gpu_droplets.list_kernels(
droplet_id=3164444,
)
- assert_matches_type(DropletListKernelsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_kernels_with_all_params(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list_kernels(
+ gpu_droplet = await async_client.gpu_droplets.list_kernels(
droplet_id=3164444,
page=1,
per_page=1,
)
- assert_matches_type(DropletListKernelsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_kernels(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.list_kernels(
+ response = await async_client.gpu_droplets.with_raw_response.list_kernels(
droplet_id=3164444,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListKernelsResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_kernels(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.list_kernels(
+ async with async_client.gpu_droplets.with_streaming_response.list_kernels(
droplet_id=3164444,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListKernelsResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_list_neighbors(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list_neighbors(
+ gpu_droplet = await async_client.gpu_droplets.list_neighbors(
1,
)
- assert_matches_type(DropletListNeighborsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_neighbors(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.list_neighbors(
+ response = await async_client.gpu_droplets.with_raw_response.list_neighbors(
1,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListNeighborsResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_neighbors(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.list_neighbors(
+ async with async_client.gpu_droplets.with_streaming_response.list_neighbors(
1,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListNeighborsResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True
@pytest.mark.skip()
@parametrize
async def test_method_list_snapshots(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list_snapshots(
+ gpu_droplet = await async_client.gpu_droplets.list_snapshots(
droplet_id=3164444,
)
- assert_matches_type(DropletListSnapshotsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_method_list_snapshots_with_all_params(self, async_client: AsyncGradientAI) -> None:
- droplet = await async_client.droplets.list_snapshots(
+ gpu_droplet = await async_client.gpu_droplets.list_snapshots(
droplet_id=3164444,
page=1,
per_page=1,
)
- assert_matches_type(DropletListSnapshotsResponse, droplet, path=["response"])
+ assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_raw_response_list_snapshots(self, async_client: AsyncGradientAI) -> None:
- response = await async_client.droplets.with_raw_response.list_snapshots(
+ response = await async_client.gpu_droplets.with_raw_response.list_snapshots(
droplet_id=3164444,
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListSnapshotsResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
@pytest.mark.skip()
@parametrize
async def test_streaming_response_list_snapshots(self, async_client: AsyncGradientAI) -> None:
- async with async_client.droplets.with_streaming_response.list_snapshots(
+ async with async_client.gpu_droplets.with_streaming_response.list_snapshots(
droplet_id=3164444,
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- droplet = await response.parse()
- assert_matches_type(DropletListSnapshotsResponse, droplet, path=["response"])
+ gpu_droplet = await response.parse()
+ assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"])
assert cast(Any, response.is_closed) is True