From 835aa7c204f5def64cdcd8b863581fd6a1ea37b6 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 9 Aug 2025 05:13:51 +0000 Subject: [PATCH 1/4] chore: update @stainless-api/prism-cli to v5.15.0 --- scripts/mock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/mock b/scripts/mock index d2814ae6..0b28f6ea 100755 --- a/scripts/mock +++ b/scripts/mock @@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}" # Run prism mock on the given spec if [ "$1" == "--daemon" ]; then - npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log & + npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log & # Wait for server to come online echo -n "Waiting for server" @@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then echo else - npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" + npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" fi From c32441201c3156cc4fe5b400a4f396eaf19ecaad Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Sat, 9 Aug 2025 05:20:15 +0000 Subject: [PATCH 2/4] chore(internal): update comment in script --- scripts/test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/test b/scripts/test index 2b878456..dbeda2d2 100755 --- a/scripts/test +++ b/scripts/test @@ -43,7 +43,7 @@ elif ! prism_is_running ; then echo -e "To run the server, pass in the path or url of your OpenAPI" echo -e "spec to the prism command:" echo - echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}" + echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}" echo exit 1 From 4757cc594565cf8500b4087205e6eb5fd8c5d5c5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 02:21:33 +0000 Subject: [PATCH 3/4] chore(internal): codegen related update --- .../agents/chat/test_completions.py | 32 +-- .../evaluation_metrics/anthropic/test_keys.py | 104 ++++----- .../evaluation_metrics/openai/test_keys.py | 104 ++++----- .../agents/evaluation_metrics/test_models.py | 16 +- .../evaluation_metrics/test_workspaces.py | 96 ++++----- .../workspaces/test_agents.py | 40 ++-- tests/api_resources/agents/test_api_keys.py | 92 ++++---- .../agents/test_evaluation_datasets.py | 32 +-- .../agents/test_evaluation_metrics.py | 28 +-- .../agents/test_evaluation_runs.py | 68 +++--- .../agents/test_evaluation_test_cases.py | 88 ++++---- tests/api_resources/agents/test_functions.py | 56 ++--- .../agents/test_knowledge_bases.py | 48 ++--- tests/api_resources/agents/test_routes.py | 72 +++---- tests/api_resources/agents/test_versions.py | 40 ++-- tests/api_resources/chat/test_completions.py | 32 +-- .../databases/schema_registry/test_config.py | 64 +++--- .../gpu_droplets/account/test_keys.py | 68 +++--- .../gpu_droplets/firewalls/test_droplets.py | 32 +-- .../gpu_droplets/firewalls/test_rules.py | 40 ++-- .../gpu_droplets/firewalls/test_tags.py | 32 +-- .../gpu_droplets/floating_ips/test_actions.py | 64 +++--- .../gpu_droplets/images/test_actions.py | 48 ++--- .../load_balancers/test_droplets.py | 32 +-- .../load_balancers/test_forwarding_rules.py | 32 +-- .../gpu_droplets/test_actions.py | 200 +++++++++--------- .../gpu_droplets/test_autoscale.py | 140 ++++++------ .../gpu_droplets/test_backups.py | 56 ++--- .../test_destroy_with_associated_resources.py | 64 +++--- .../gpu_droplets/test_firewalls.py | 84 ++++---- .../gpu_droplets/test_floating_ips.py | 76 +++---- .../api_resources/gpu_droplets/test_images.py | 72 +++---- .../gpu_droplets/test_load_balancers.py | 136 ++++++------ .../api_resources/gpu_droplets/test_sizes.py | 16 +- .../gpu_droplets/test_snapshots.py | 40 ++-- .../gpu_droplets/test_volumes.py | 96 ++++----- .../gpu_droplets/volumes/test_actions.py | 132 ++++++------ .../gpu_droplets/volumes/test_snapshots.py | 72 +++---- .../api_resources/inference/test_api_keys.py | 84 ++++---- .../knowledge_bases/test_data_sources.py | 56 ++--- .../knowledge_bases/test_indexing_jobs.py | 84 ++++---- .../models/providers/test_anthropic.py | 104 ++++----- .../models/providers/test_openai.py | 104 ++++----- tests/api_resources/test_agents.py | 104 ++++----- tests/api_resources/test_gpu_droplets.py | 144 ++++++------- tests/api_resources/test_knowledge_bases.py | 84 ++++---- tests/api_resources/test_models.py | 16 +- tests/api_resources/test_regions.py | 16 +- 48 files changed, 1670 insertions(+), 1670 deletions(-) diff --git a/tests/api_resources/agents/chat/test_completions.py b/tests/api_resources/agents/chat/test_completions.py index 474c11c9..a0df0e6f 100644 --- a/tests/api_resources/agents/chat/test_completions.py +++ b/tests/api_resources/agents/chat/test_completions.py @@ -17,7 +17,7 @@ class TestCompletions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_1(self, client: Gradient) -> None: completion = client.agents.chat.completions.create( @@ -31,7 +31,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None: ) assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: completion = client.agents.chat.completions.create( @@ -71,7 +71,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non ) assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.agents.chat.completions.with_raw_response.create( @@ -89,7 +89,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None: completion = response.parse() assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.agents.chat.completions.with_streaming_response.create( @@ -109,7 +109,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_2(self, client: Gradient) -> None: completion_stream = client.agents.chat.completions.create( @@ -124,7 +124,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None: ) completion_stream.response.close() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: completion_stream = client.agents.chat.completions.create( @@ -164,7 +164,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non ) completion_stream.response.close() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.agents.chat.completions.with_raw_response.create( @@ -182,7 +182,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None: stream = response.parse() stream.close() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.agents.chat.completions.with_streaming_response.create( @@ -212,7 +212,7 @@ class TestAsyncCompletions: ids=["loose", "strict", "aiohttp"], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: completion = await async_client.agents.chat.completions.create( @@ -226,7 +226,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: completion = await async_client.agents.chat.completions.create( @@ -266,7 +266,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn ) assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.agents.chat.completions.with_raw_response.create( @@ -284,7 +284,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) completion = await response.parse() assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.agents.chat.completions.with_streaming_response.create( @@ -304,7 +304,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: completion_stream = await async_client.agents.chat.completions.create( @@ -319,7 +319,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No ) await completion_stream.response.aclose() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: completion_stream = await async_client.agents.chat.completions.create( @@ -359,7 +359,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn ) await completion_stream.response.aclose() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.agents.chat.completions.with_raw_response.create( @@ -377,7 +377,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) stream = await response.parse() await stream.close() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.agents.chat.completions.with_streaming_response.create( diff --git a/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py b/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py index a8ca5724..b6b461e6 100644 --- a/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py +++ b/tests/api_resources/agents/evaluation_metrics/anthropic/test_keys.py @@ -24,13 +24,13 @@ class TestKeys: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.create( @@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.create() @@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.create() as response: @@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.retrieve( @@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.retrieve( @@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.retrieve( @@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.update( @@ -111,7 +111,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.update( @@ -122,7 +122,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.update( @@ -134,7 +134,7 @@ def test_raw_response_update(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.update( @@ -148,7 +148,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -156,13 +156,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.list( @@ -171,7 +171,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list() @@ -181,7 +181,7 @@ def test_raw_response_list(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list() as response: @@ -193,7 +193,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.delete( @@ -201,7 +201,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(KeyDeleteResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.delete( @@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyDeleteResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.delete( @@ -227,7 +227,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -235,7 +235,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_agents(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.list_agents( @@ -243,7 +243,7 @@ def test_method_list_agents(self, client: Gradient) -> None: ) assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_agents_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.anthropic.keys.list_agents( @@ -253,7 +253,7 @@ def test_method_list_agents_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_agents(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list_agents( @@ -265,7 +265,7 @@ def test_raw_response_list_agents(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_agents(self, client: Gradient) -> None: with client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list_agents( @@ -279,7 +279,7 @@ def test_streaming_response_list_agents(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list_agents(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -293,13 +293,13 @@ class TestAsyncKeys: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.create( @@ -308,7 +308,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.create() @@ -318,7 +318,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.create() as response: @@ -330,7 +330,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.retrieve( @@ -338,7 +338,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.retrieve( @@ -350,7 +350,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.retrieve( @@ -364,7 +364,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -372,7 +372,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.update( @@ -380,7 +380,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.update( @@ -391,7 +391,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.update( @@ -403,7 +403,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.update( @@ -417,7 +417,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -425,13 +425,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.list( @@ -440,7 +440,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list() @@ -450,7 +450,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list() as response: @@ -462,7 +462,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.delete( @@ -470,7 +470,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyDeleteResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.delete( @@ -482,7 +482,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyDeleteResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.delete( @@ -496,7 +496,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -504,7 +504,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_agents(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.list_agents( @@ -512,7 +512,7 @@ async def test_method_list_agents(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.anthropic.keys.list_agents( @@ -522,7 +522,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi ) assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.anthropic.keys.with_raw_response.list_agents( @@ -534,7 +534,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> No key = await response.parse() assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.anthropic.keys.with_streaming_response.list_agents( @@ -548,7 +548,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradient) assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): diff --git a/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py b/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py index 5a22b1bc..da5cf8e1 100644 --- a/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py +++ b/tests/api_resources/agents/evaluation_metrics/openai/test_keys.py @@ -24,13 +24,13 @@ class TestKeys: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.create( @@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.create() @@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.create() as response: @@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.retrieve( @@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.retrieve( @@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.retrieve( @@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.update( @@ -111,7 +111,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.update( @@ -122,7 +122,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.update( @@ -134,7 +134,7 @@ def test_raw_response_update(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.update( @@ -148,7 +148,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -156,13 +156,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.list( @@ -171,7 +171,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.list() @@ -181,7 +181,7 @@ def test_raw_response_list(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.list() as response: @@ -193,7 +193,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.delete( @@ -201,7 +201,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(KeyDeleteResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.delete( @@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyDeleteResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.delete( @@ -227,7 +227,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -235,7 +235,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_agents(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.list_agents( @@ -243,7 +243,7 @@ def test_method_list_agents(self, client: Gradient) -> None: ) assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_agents_with_all_params(self, client: Gradient) -> None: key = client.agents.evaluation_metrics.openai.keys.list_agents( @@ -253,7 +253,7 @@ def test_method_list_agents_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_agents(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.openai.keys.with_raw_response.list_agents( @@ -265,7 +265,7 @@ def test_raw_response_list_agents(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_agents(self, client: Gradient) -> None: with client.agents.evaluation_metrics.openai.keys.with_streaming_response.list_agents( @@ -279,7 +279,7 @@ def test_streaming_response_list_agents(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list_agents(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -293,13 +293,13 @@ class TestAsyncKeys: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.create() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.create( @@ -308,7 +308,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.create() @@ -318,7 +318,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.create() as response: @@ -330,7 +330,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.retrieve( @@ -338,7 +338,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.retrieve( @@ -350,7 +350,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.retrieve( @@ -364,7 +364,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -372,7 +372,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.update( @@ -380,7 +380,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.update( @@ -391,7 +391,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.update( @@ -403,7 +403,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.update( @@ -417,7 +417,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -425,13 +425,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.list( @@ -440,7 +440,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.list() @@ -450,7 +450,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.list() as response: @@ -462,7 +462,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.delete( @@ -470,7 +470,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyDeleteResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.delete( @@ -482,7 +482,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyDeleteResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.delete( @@ -496,7 +496,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -504,7 +504,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_agents(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.list_agents( @@ -512,7 +512,7 @@ async def test_method_list_agents(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.agents.evaluation_metrics.openai.keys.list_agents( @@ -522,7 +522,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi ) assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.openai.keys.with_raw_response.list_agents( @@ -534,7 +534,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> No key = await response.parse() assert_matches_type(KeyListAgentsResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.openai.keys.with_streaming_response.list_agents( @@ -548,7 +548,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradient) assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): diff --git a/tests/api_resources/agents/evaluation_metrics/test_models.py b/tests/api_resources/agents/evaluation_metrics/test_models.py index 624e5288..677b3383 100644 --- a/tests/api_resources/agents/evaluation_metrics/test_models.py +++ b/tests/api_resources/agents/evaluation_metrics/test_models.py @@ -17,13 +17,13 @@ class TestModels: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: model = client.agents.evaluation_metrics.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: model = client.agents.evaluation_metrics.models.list( @@ -34,7 +34,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.models.with_raw_response.list() @@ -44,7 +44,7 @@ def test_raw_response_list(self, client: Gradient) -> None: model = response.parse() assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.models.with_streaming_response.list() as response: @@ -62,13 +62,13 @@ class TestAsyncModels: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: model = await async_client.agents.evaluation_metrics.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: model = await async_client.agents.evaluation_metrics.models.list( @@ -79,7 +79,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.models.with_raw_response.list() @@ -89,7 +89,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: model = await response.parse() assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.models.with_streaming_response.list() as response: diff --git a/tests/api_resources/agents/evaluation_metrics/test_workspaces.py b/tests/api_resources/agents/evaluation_metrics/test_workspaces.py index f326c1e3..3acede09 100644 --- a/tests/api_resources/agents/evaluation_metrics/test_workspaces.py +++ b/tests/api_resources/agents/evaluation_metrics/test_workspaces.py @@ -24,13 +24,13 @@ class TestWorkspaces: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.create() assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.create( @@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.create() @@ -50,7 +50,7 @@ def test_raw_response_create(self, client: Gradient) -> None: workspace = response.parse() assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.create() as response: @@ -62,7 +62,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.retrieve( @@ -70,7 +70,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(WorkspaceRetrieveResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.retrieve( @@ -82,7 +82,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: workspace = response.parse() assert_matches_type(WorkspaceRetrieveResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.retrieve( @@ -96,7 +96,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): @@ -104,7 +104,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.update( @@ -112,7 +112,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.update( @@ -123,7 +123,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.update( @@ -135,7 +135,7 @@ def test_raw_response_update(self, client: Gradient) -> None: workspace = response.parse() assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.update( @@ -149,7 +149,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"): @@ -157,13 +157,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_workspace_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.list() assert_matches_type(WorkspaceListResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.list() @@ -173,7 +173,7 @@ def test_raw_response_list(self, client: Gradient) -> None: workspace = response.parse() assert_matches_type(WorkspaceListResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.list() as response: @@ -185,7 +185,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.delete( @@ -193,7 +193,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(WorkspaceDeleteResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.delete( @@ -205,7 +205,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: workspace = response.parse() assert_matches_type(WorkspaceDeleteResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.delete( @@ -219,7 +219,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): @@ -227,7 +227,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_evaluation_test_cases(self, client: Gradient) -> None: workspace = client.agents.evaluation_metrics.workspaces.list_evaluation_test_cases( @@ -235,7 +235,7 @@ def test_method_list_evaluation_test_cases(self, client: Gradient) -> None: ) assert_matches_type(WorkspaceListEvaluationTestCasesResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_evaluation_test_cases(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.with_raw_response.list_evaluation_test_cases( @@ -247,7 +247,7 @@ def test_raw_response_list_evaluation_test_cases(self, client: Gradient) -> None workspace = response.parse() assert_matches_type(WorkspaceListEvaluationTestCasesResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_evaluation_test_cases(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.with_streaming_response.list_evaluation_test_cases( @@ -261,7 +261,7 @@ def test_streaming_response_list_evaluation_test_cases(self, client: Gradient) - assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list_evaluation_test_cases(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): @@ -275,13 +275,13 @@ class TestAsyncWorkspaces: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.create() assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.create( @@ -291,7 +291,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.create() @@ -301,7 +301,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: workspace = await response.parse() assert_matches_type(WorkspaceCreateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.create() as response: @@ -313,7 +313,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.retrieve( @@ -321,7 +321,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(WorkspaceRetrieveResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.retrieve( @@ -333,7 +333,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: workspace = await response.parse() assert_matches_type(WorkspaceRetrieveResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.retrieve( @@ -347,7 +347,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): @@ -355,7 +355,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.update( @@ -363,7 +363,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.update( @@ -374,7 +374,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.update( @@ -386,7 +386,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: workspace = await response.parse() assert_matches_type(WorkspaceUpdateResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.update( @@ -400,7 +400,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"): @@ -408,13 +408,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_workspace_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.list() assert_matches_type(WorkspaceListResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.list() @@ -424,7 +424,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: workspace = await response.parse() assert_matches_type(WorkspaceListResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.list() as response: @@ -436,7 +436,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.delete( @@ -444,7 +444,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(WorkspaceDeleteResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.delete( @@ -456,7 +456,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: workspace = await response.parse() assert_matches_type(WorkspaceDeleteResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.delete( @@ -470,7 +470,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): @@ -478,7 +478,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None: workspace = await async_client.agents.evaluation_metrics.workspaces.list_evaluation_test_cases( @@ -486,7 +486,7 @@ async def test_method_list_evaluation_test_cases(self, async_client: AsyncGradie ) assert_matches_type(WorkspaceListEvaluationTestCasesResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.with_raw_response.list_evaluation_test_cases( @@ -498,7 +498,7 @@ async def test_raw_response_list_evaluation_test_cases(self, async_client: Async workspace = await response.parse() assert_matches_type(WorkspaceListEvaluationTestCasesResponse, workspace, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.with_streaming_response.list_evaluation_test_cases( @@ -512,7 +512,7 @@ async def test_streaming_response_list_evaluation_test_cases(self, async_client: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list_evaluation_test_cases(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): diff --git a/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py b/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py index 2d63ff65..4154843c 100644 --- a/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py +++ b/tests/api_resources/agents/evaluation_metrics/workspaces/test_agents.py @@ -20,7 +20,7 @@ class TestAgents: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: agent = client.agents.evaluation_metrics.workspaces.agents.list( @@ -28,7 +28,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: agent = client.agents.evaluation_metrics.workspaces.agents.list( @@ -39,7 +39,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.agents.with_raw_response.list( @@ -51,7 +51,7 @@ def test_raw_response_list(self, client: Gradient) -> None: agent = response.parse() assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.list( @@ -65,7 +65,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): @@ -73,7 +73,7 @@ def test_path_params_list(self, client: Gradient) -> None: workspace_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_move(self, client: Gradient) -> None: agent = client.agents.evaluation_metrics.workspaces.agents.move( @@ -81,7 +81,7 @@ def test_method_move(self, client: Gradient) -> None: ) assert_matches_type(AgentMoveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_move_with_all_params(self, client: Gradient) -> None: agent = client.agents.evaluation_metrics.workspaces.agents.move( @@ -91,7 +91,7 @@ def test_method_move_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AgentMoveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_move(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.workspaces.agents.with_raw_response.move( @@ -103,7 +103,7 @@ def test_raw_response_move(self, client: Gradient) -> None: agent = response.parse() assert_matches_type(AgentMoveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_move(self, client: Gradient) -> None: with client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.move( @@ -117,7 +117,7 @@ def test_streaming_response_move(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_move(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"): @@ -131,7 +131,7 @@ class TestAsyncAgents: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.evaluation_metrics.workspaces.agents.list( @@ -139,7 +139,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.evaluation_metrics.workspaces.agents.list( @@ -150,7 +150,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.agents.with_raw_response.list( @@ -162,7 +162,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: agent = await response.parse() assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.list( @@ -176,7 +176,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `workspace_uuid` but received ''"): @@ -184,7 +184,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None: workspace_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_move(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.evaluation_metrics.workspaces.agents.move( @@ -192,7 +192,7 @@ async def test_method_move(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AgentMoveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_move_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.evaluation_metrics.workspaces.agents.move( @@ -202,7 +202,7 @@ async def test_method_move_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(AgentMoveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_move(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.workspaces.agents.with_raw_response.move( @@ -214,7 +214,7 @@ async def test_raw_response_move(self, async_client: AsyncGradient) -> None: agent = await response.parse() assert_matches_type(AgentMoveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_move(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.workspaces.agents.with_streaming_response.move( @@ -228,7 +228,7 @@ async def test_streaming_response_move(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_move(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_workspace_uuid` but received ''"): diff --git a/tests/api_resources/agents/test_api_keys.py b/tests/api_resources/agents/test_api_keys.py index 4b80fc54..dbb19890 100644 --- a/tests/api_resources/agents/test_api_keys.py +++ b/tests/api_resources/agents/test_api_keys.py @@ -23,7 +23,7 @@ class TestAPIKeys: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: api_key = client.agents.api_keys.create( @@ -31,7 +31,7 @@ def test_method_create(self, client: Gradient) -> None: ) assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: api_key = client.agents.api_keys.create( @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.create( @@ -53,7 +53,7 @@ def test_raw_response_create(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.create( @@ -67,7 +67,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_create(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): @@ -75,7 +75,7 @@ def test_path_params_create(self, client: Gradient) -> None: path_agent_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: api_key = client.agents.api_keys.update( @@ -84,7 +84,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: api_key = client.agents.api_keys.update( @@ -96,7 +96,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.update( @@ -109,7 +109,7 @@ def test_raw_response_update(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.update( @@ -124,7 +124,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): @@ -139,7 +139,7 @@ def test_path_params_update(self, client: Gradient) -> None: path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: api_key = client.agents.api_keys.list( @@ -147,7 +147,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: api_key = client.agents.api_keys.list( @@ -157,7 +157,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.list( @@ -169,7 +169,7 @@ def test_raw_response_list(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.list( @@ -183,7 +183,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -191,7 +191,7 @@ def test_path_params_list(self, client: Gradient) -> None: agent_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: api_key = client.agents.api_keys.delete( @@ -200,7 +200,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.delete( @@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.delete( @@ -228,7 +228,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -243,7 +243,7 @@ def test_path_params_delete(self, client: Gradient) -> None: agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_regenerate(self, client: Gradient) -> None: api_key = client.agents.api_keys.regenerate( @@ -252,7 +252,7 @@ def test_method_regenerate(self, client: Gradient) -> None: ) assert_matches_type(APIKeyRegenerateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_regenerate(self, client: Gradient) -> None: response = client.agents.api_keys.with_raw_response.regenerate( @@ -265,7 +265,7 @@ def test_raw_response_regenerate(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyRegenerateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_regenerate(self, client: Gradient) -> None: with client.agents.api_keys.with_streaming_response.regenerate( @@ -280,7 +280,7 @@ def test_streaming_response_regenerate(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_regenerate(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -301,7 +301,7 @@ class TestAsyncAPIKeys: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.create( @@ -309,7 +309,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.create( @@ -319,7 +319,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.create( @@ -331,7 +331,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: api_key = await response.parse() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.create( @@ -345,7 +345,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_create(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): @@ -353,7 +353,7 @@ async def test_path_params_create(self, async_client: AsyncGradient) -> None: path_agent_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.update( @@ -362,7 +362,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.update( @@ -374,7 +374,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.update( @@ -387,7 +387,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: api_key = await response.parse() assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.update( @@ -402,7 +402,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): @@ -417,7 +417,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.list( @@ -425,7 +425,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.list( @@ -435,7 +435,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.list( @@ -447,7 +447,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: api_key = await response.parse() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.list( @@ -461,7 +461,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -469,7 +469,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None: agent_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.delete( @@ -478,7 +478,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.delete( @@ -491,7 +491,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: api_key = await response.parse() assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.delete( @@ -506,7 +506,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -521,7 +521,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_regenerate(self, async_client: AsyncGradient) -> None: api_key = await async_client.agents.api_keys.regenerate( @@ -530,7 +530,7 @@ async def test_method_regenerate(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APIKeyRegenerateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_regenerate(self, async_client: AsyncGradient) -> None: response = await async_client.agents.api_keys.with_raw_response.regenerate( @@ -543,7 +543,7 @@ async def test_raw_response_regenerate(self, async_client: AsyncGradient) -> Non api_key = await response.parse() assert_matches_type(APIKeyRegenerateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_regenerate(self, async_client: AsyncGradient) -> None: async with async_client.agents.api_keys.with_streaming_response.regenerate( @@ -558,7 +558,7 @@ async def test_streaming_response_regenerate(self, async_client: AsyncGradient) assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_regenerate(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): diff --git a/tests/api_resources/agents/test_evaluation_datasets.py b/tests/api_resources/agents/test_evaluation_datasets.py index 3978ebdd..64dceb03 100644 --- a/tests/api_resources/agents/test_evaluation_datasets.py +++ b/tests/api_resources/agents/test_evaluation_datasets.py @@ -20,13 +20,13 @@ class TestEvaluationDatasets: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: evaluation_dataset = client.agents.evaluation_datasets.create() assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: evaluation_dataset = client.agents.evaluation_datasets.create( @@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_datasets.with_raw_response.create() @@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None: evaluation_dataset = response.parse() assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_datasets.with_streaming_response.create() as response: @@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_file_upload_presigned_urls(self, client: Gradient) -> None: evaluation_dataset = client.agents.evaluation_datasets.create_file_upload_presigned_urls() @@ -69,7 +69,7 @@ def test_method_create_file_upload_presigned_urls(self, client: Gradient) -> Non EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_file_upload_presigned_urls_with_all_params(self, client: Gradient) -> None: evaluation_dataset = client.agents.evaluation_datasets.create_file_upload_presigned_urls( @@ -84,7 +84,7 @@ def test_method_create_file_upload_presigned_urls_with_all_params(self, client: EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_file_upload_presigned_urls(self, client: Gradient) -> None: response = client.agents.evaluation_datasets.with_raw_response.create_file_upload_presigned_urls() @@ -96,7 +96,7 @@ def test_raw_response_create_file_upload_presigned_urls(self, client: Gradient) EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_file_upload_presigned_urls(self, client: Gradient) -> None: with client.agents.evaluation_datasets.with_streaming_response.create_file_upload_presigned_urls() as response: @@ -116,13 +116,13 @@ class TestAsyncEvaluationDatasets: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: evaluation_dataset = await async_client.agents.evaluation_datasets.create() assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_dataset = await async_client.agents.evaluation_datasets.create( @@ -135,7 +135,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_datasets.with_raw_response.create() @@ -145,7 +145,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: evaluation_dataset = await response.parse() assert_matches_type(EvaluationDatasetCreateResponse, evaluation_dataset, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_datasets.with_streaming_response.create() as response: @@ -157,7 +157,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None: evaluation_dataset = await async_client.agents.evaluation_datasets.create_file_upload_presigned_urls() @@ -165,7 +165,7 @@ async def test_method_create_file_upload_presigned_urls(self, async_client: Asyn EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_file_upload_presigned_urls_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_dataset = await async_client.agents.evaluation_datasets.create_file_upload_presigned_urls( @@ -180,7 +180,7 @@ async def test_method_create_file_upload_presigned_urls_with_all_params(self, as EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_datasets.with_raw_response.create_file_upload_presigned_urls() @@ -192,7 +192,7 @@ async def test_raw_response_create_file_upload_presigned_urls(self, async_client EvaluationDatasetCreateFileUploadPresignedURLsResponse, evaluation_dataset, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_file_upload_presigned_urls(self, async_client: AsyncGradient) -> None: async with ( diff --git a/tests/api_resources/agents/test_evaluation_metrics.py b/tests/api_resources/agents/test_evaluation_metrics.py index 612f4228..088353bb 100644 --- a/tests/api_resources/agents/test_evaluation_metrics.py +++ b/tests/api_resources/agents/test_evaluation_metrics.py @@ -20,13 +20,13 @@ class TestEvaluationMetrics: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: evaluation_metric = client.agents.evaluation_metrics.list() assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.with_raw_response.list() @@ -36,7 +36,7 @@ def test_raw_response_list(self, client: Gradient) -> None: evaluation_metric = response.parse() assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_metrics.with_streaming_response.list() as response: @@ -48,13 +48,13 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_regions(self, client: Gradient) -> None: evaluation_metric = client.agents.evaluation_metrics.list_regions() assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_regions_with_all_params(self, client: Gradient) -> None: evaluation_metric = client.agents.evaluation_metrics.list_regions( @@ -63,7 +63,7 @@ def test_method_list_regions_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_regions(self, client: Gradient) -> None: response = client.agents.evaluation_metrics.with_raw_response.list_regions() @@ -73,7 +73,7 @@ def test_raw_response_list_regions(self, client: Gradient) -> None: evaluation_metric = response.parse() assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_regions(self, client: Gradient) -> None: with client.agents.evaluation_metrics.with_streaming_response.list_regions() as response: @@ -91,13 +91,13 @@ class TestAsyncEvaluationMetrics: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: evaluation_metric = await async_client.agents.evaluation_metrics.list() assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.with_raw_response.list() @@ -107,7 +107,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: evaluation_metric = await response.parse() assert_matches_type(EvaluationMetricListResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.with_streaming_response.list() as response: @@ -119,13 +119,13 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_regions(self, async_client: AsyncGradient) -> None: evaluation_metric = await async_client.agents.evaluation_metrics.list_regions() assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_regions_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_metric = await async_client.agents.evaluation_metrics.list_regions( @@ -134,7 +134,7 @@ async def test_method_list_regions_with_all_params(self, async_client: AsyncGrad ) assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_regions(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_metrics.with_raw_response.list_regions() @@ -144,7 +144,7 @@ async def test_raw_response_list_regions(self, async_client: AsyncGradient) -> N evaluation_metric = await response.parse() assert_matches_type(EvaluationMetricListRegionsResponse, evaluation_metric, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_regions(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_metrics.with_streaming_response.list_regions() as response: diff --git a/tests/api_resources/agents/test_evaluation_runs.py b/tests/api_resources/agents/test_evaluation_runs.py index be842cbc..8d7e1826 100644 --- a/tests/api_resources/agents/test_evaluation_runs.py +++ b/tests/api_resources/agents/test_evaluation_runs.py @@ -22,13 +22,13 @@ class TestEvaluationRuns: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.create() assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.create( @@ -38,7 +38,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_runs.with_raw_response.create() @@ -48,7 +48,7 @@ def test_raw_response_create(self, client: Gradient) -> None: evaluation_run = response.parse() assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_runs.with_streaming_response.create() as response: @@ -60,7 +60,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.retrieve( @@ -68,7 +68,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_runs.with_raw_response.retrieve( @@ -80,7 +80,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: evaluation_run = response.parse() assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_runs.with_streaming_response.retrieve( @@ -94,7 +94,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): @@ -102,7 +102,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_results(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.list_results( @@ -110,7 +110,7 @@ def test_method_list_results(self, client: Gradient) -> None: ) assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_results_with_all_params(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.list_results( @@ -120,7 +120,7 @@ def test_method_list_results_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_results(self, client: Gradient) -> None: response = client.agents.evaluation_runs.with_raw_response.list_results( @@ -132,7 +132,7 @@ def test_raw_response_list_results(self, client: Gradient) -> None: evaluation_run = response.parse() assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_results(self, client: Gradient) -> None: with client.agents.evaluation_runs.with_streaming_response.list_results( @@ -146,7 +146,7 @@ def test_streaming_response_list_results(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list_results(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): @@ -154,7 +154,7 @@ def test_path_params_list_results(self, client: Gradient) -> None: evaluation_run_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve_results(self, client: Gradient) -> None: evaluation_run = client.agents.evaluation_runs.retrieve_results( @@ -163,7 +163,7 @@ def test_method_retrieve_results(self, client: Gradient) -> None: ) assert_matches_type(EvaluationRunRetrieveResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve_results(self, client: Gradient) -> None: response = client.agents.evaluation_runs.with_raw_response.retrieve_results( @@ -176,7 +176,7 @@ def test_raw_response_retrieve_results(self, client: Gradient) -> None: evaluation_run = response.parse() assert_matches_type(EvaluationRunRetrieveResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve_results(self, client: Gradient) -> None: with client.agents.evaluation_runs.with_streaming_response.retrieve_results( @@ -191,7 +191,7 @@ def test_streaming_response_retrieve_results(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve_results(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): @@ -206,13 +206,13 @@ class TestAsyncEvaluationRuns: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.create() assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.create( @@ -222,7 +222,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_runs.with_raw_response.create() @@ -232,7 +232,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: evaluation_run = await response.parse() assert_matches_type(EvaluationRunCreateResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_runs.with_streaming_response.create() as response: @@ -244,7 +244,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.retrieve( @@ -252,7 +252,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_runs.with_raw_response.retrieve( @@ -264,7 +264,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: evaluation_run = await response.parse() assert_matches_type(EvaluationRunRetrieveResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_runs.with_streaming_response.retrieve( @@ -278,7 +278,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): @@ -286,7 +286,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_results(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.list_results( @@ -294,7 +294,7 @@ async def test_method_list_results(self, async_client: AsyncGradient) -> None: ) assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_results_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.list_results( @@ -304,7 +304,7 @@ async def test_method_list_results_with_all_params(self, async_client: AsyncGrad ) assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_results(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_runs.with_raw_response.list_results( @@ -316,7 +316,7 @@ async def test_raw_response_list_results(self, async_client: AsyncGradient) -> N evaluation_run = await response.parse() assert_matches_type(EvaluationRunListResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_results(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_runs.with_streaming_response.list_results( @@ -330,7 +330,7 @@ async def test_streaming_response_list_results(self, async_client: AsyncGradient assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list_results(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): @@ -338,7 +338,7 @@ async def test_path_params_list_results(self, async_client: AsyncGradient) -> No evaluation_run_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve_results(self, async_client: AsyncGradient) -> None: evaluation_run = await async_client.agents.evaluation_runs.retrieve_results( @@ -347,7 +347,7 @@ async def test_method_retrieve_results(self, async_client: AsyncGradient) -> Non ) assert_matches_type(EvaluationRunRetrieveResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve_results(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_runs.with_raw_response.retrieve_results( @@ -360,7 +360,7 @@ async def test_raw_response_retrieve_results(self, async_client: AsyncGradient) evaluation_run = await response.parse() assert_matches_type(EvaluationRunRetrieveResultsResponse, evaluation_run, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve_results(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_runs.with_streaming_response.retrieve_results( @@ -375,7 +375,7 @@ async def test_streaming_response_retrieve_results(self, async_client: AsyncGrad assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve_results(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `evaluation_run_uuid` but received ''"): diff --git a/tests/api_resources/agents/test_evaluation_test_cases.py b/tests/api_resources/agents/test_evaluation_test_cases.py index b1d92580..7cd0a07e 100644 --- a/tests/api_resources/agents/test_evaluation_test_cases.py +++ b/tests/api_resources/agents/test_evaluation_test_cases.py @@ -23,13 +23,13 @@ class TestEvaluationTestCases: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.create() assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.create( @@ -47,7 +47,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.create() @@ -57,7 +57,7 @@ def test_raw_response_create(self, client: Gradient) -> None: evaluation_test_case = response.parse() assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.create() as response: @@ -69,7 +69,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.retrieve( @@ -77,7 +77,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve_with_all_params(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.retrieve( @@ -86,7 +86,7 @@ def test_method_retrieve_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.retrieve( @@ -98,7 +98,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: evaluation_test_case = response.parse() assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.retrieve( @@ -112,7 +112,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `test_case_uuid` but received ''"): @@ -120,7 +120,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: test_case_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.update( @@ -128,7 +128,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.update( @@ -147,7 +147,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.update( @@ -159,7 +159,7 @@ def test_raw_response_update(self, client: Gradient) -> None: evaluation_test_case = response.parse() assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.update( @@ -173,7 +173,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_test_case_uuid` but received ''"): @@ -181,13 +181,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_test_case_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.list() assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.list() @@ -197,7 +197,7 @@ def test_raw_response_list(self, client: Gradient) -> None: evaluation_test_case = response.parse() assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.list() as response: @@ -209,7 +209,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_evaluation_runs(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.list_evaluation_runs( @@ -217,7 +217,7 @@ def test_method_list_evaluation_runs(self, client: Gradient) -> None: ) assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_evaluation_runs_with_all_params(self, client: Gradient) -> None: evaluation_test_case = client.agents.evaluation_test_cases.list_evaluation_runs( @@ -226,7 +226,7 @@ def test_method_list_evaluation_runs_with_all_params(self, client: Gradient) -> ) assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_evaluation_runs(self, client: Gradient) -> None: response = client.agents.evaluation_test_cases.with_raw_response.list_evaluation_runs( @@ -238,7 +238,7 @@ def test_raw_response_list_evaluation_runs(self, client: Gradient) -> None: evaluation_test_case = response.parse() assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_evaluation_runs(self, client: Gradient) -> None: with client.agents.evaluation_test_cases.with_streaming_response.list_evaluation_runs( @@ -252,7 +252,7 @@ def test_streaming_response_list_evaluation_runs(self, client: Gradient) -> None assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list_evaluation_runs(self, client: Gradient) -> None: with pytest.raises( @@ -268,13 +268,13 @@ class TestAsyncEvaluationTestCases: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.create() assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.create( @@ -292,7 +292,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.create() @@ -302,7 +302,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: evaluation_test_case = await response.parse() assert_matches_type(EvaluationTestCaseCreateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.create() as response: @@ -314,7 +314,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.retrieve( @@ -322,7 +322,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.retrieve( @@ -331,7 +331,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient ) assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.retrieve( @@ -343,7 +343,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: evaluation_test_case = await response.parse() assert_matches_type(EvaluationTestCaseRetrieveResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.retrieve( @@ -357,7 +357,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `test_case_uuid` but received ''"): @@ -365,7 +365,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: test_case_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.update( @@ -373,7 +373,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.update( @@ -392,7 +392,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.update( @@ -404,7 +404,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: evaluation_test_case = await response.parse() assert_matches_type(EvaluationTestCaseUpdateResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.update( @@ -418,7 +418,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_test_case_uuid` but received ''"): @@ -426,13 +426,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_test_case_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.list() assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.list() @@ -442,7 +442,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: evaluation_test_case = await response.parse() assert_matches_type(EvaluationTestCaseListResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.list() as response: @@ -454,7 +454,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_evaluation_runs(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.list_evaluation_runs( @@ -462,7 +462,7 @@ async def test_method_list_evaluation_runs(self, async_client: AsyncGradient) -> ) assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_evaluation_runs_with_all_params(self, async_client: AsyncGradient) -> None: evaluation_test_case = await async_client.agents.evaluation_test_cases.list_evaluation_runs( @@ -471,7 +471,7 @@ async def test_method_list_evaluation_runs_with_all_params(self, async_client: A ) assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_evaluation_runs(self, async_client: AsyncGradient) -> None: response = await async_client.agents.evaluation_test_cases.with_raw_response.list_evaluation_runs( @@ -483,7 +483,7 @@ async def test_raw_response_list_evaluation_runs(self, async_client: AsyncGradie evaluation_test_case = await response.parse() assert_matches_type(EvaluationTestCaseListEvaluationRunsResponse, evaluation_test_case, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_evaluation_runs(self, async_client: AsyncGradient) -> None: async with async_client.agents.evaluation_test_cases.with_streaming_response.list_evaluation_runs( @@ -497,7 +497,7 @@ async def test_streaming_response_list_evaluation_runs(self, async_client: Async assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list_evaluation_runs(self, async_client: AsyncGradient) -> None: with pytest.raises( diff --git a/tests/api_resources/agents/test_functions.py b/tests/api_resources/agents/test_functions.py index 0ba54432..64d55331 100644 --- a/tests/api_resources/agents/test_functions.py +++ b/tests/api_resources/agents/test_functions.py @@ -21,7 +21,7 @@ class TestFunctions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: function = client.agents.functions.create( @@ -29,7 +29,7 @@ def test_method_create(self, client: Gradient) -> None: ) assert_matches_type(FunctionCreateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: function = client.agents.functions.create( @@ -44,7 +44,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(FunctionCreateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.functions.with_raw_response.create( @@ -56,7 +56,7 @@ def test_raw_response_create(self, client: Gradient) -> None: function = response.parse() assert_matches_type(FunctionCreateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.functions.with_streaming_response.create( @@ -70,7 +70,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_create(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): @@ -78,7 +78,7 @@ def test_path_params_create(self, client: Gradient) -> None: path_agent_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: function = client.agents.functions.update( @@ -87,7 +87,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(FunctionUpdateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: function = client.agents.functions.update( @@ -104,7 +104,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(FunctionUpdateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.functions.with_raw_response.update( @@ -117,7 +117,7 @@ def test_raw_response_update(self, client: Gradient) -> None: function = response.parse() assert_matches_type(FunctionUpdateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.functions.with_streaming_response.update( @@ -132,7 +132,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): @@ -147,7 +147,7 @@ def test_path_params_update(self, client: Gradient) -> None: path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: function = client.agents.functions.delete( @@ -156,7 +156,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(FunctionDeleteResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.functions.with_raw_response.delete( @@ -169,7 +169,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: function = response.parse() assert_matches_type(FunctionDeleteResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.functions.with_streaming_response.delete( @@ -184,7 +184,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -205,7 +205,7 @@ class TestAsyncFunctions: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.create( @@ -213,7 +213,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None: ) assert_matches_type(FunctionCreateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.create( @@ -228,7 +228,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(FunctionCreateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.functions.with_raw_response.create( @@ -240,7 +240,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: function = await response.parse() assert_matches_type(FunctionCreateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.functions.with_streaming_response.create( @@ -254,7 +254,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_create(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): @@ -262,7 +262,7 @@ async def test_path_params_create(self, async_client: AsyncGradient) -> None: path_agent_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.update( @@ -271,7 +271,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(FunctionUpdateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.update( @@ -288,7 +288,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(FunctionUpdateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.functions.with_raw_response.update( @@ -301,7 +301,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: function = await response.parse() assert_matches_type(FunctionUpdateResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.functions.with_streaming_response.update( @@ -316,7 +316,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_agent_uuid` but received ''"): @@ -331,7 +331,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: function = await async_client.agents.functions.delete( @@ -340,7 +340,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(FunctionDeleteResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.functions.with_raw_response.delete( @@ -353,7 +353,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: function = await response.parse() assert_matches_type(FunctionDeleteResponse, function, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.functions.with_streaming_response.delete( @@ -368,7 +368,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): diff --git a/tests/api_resources/agents/test_knowledge_bases.py b/tests/api_resources/agents/test_knowledge_bases.py index dd35e5f4..60dae7d0 100644 --- a/tests/api_resources/agents/test_knowledge_bases.py +++ b/tests/api_resources/agents/test_knowledge_bases.py @@ -17,7 +17,7 @@ class TestKnowledgeBases: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_attach(self, client: Gradient) -> None: knowledge_base = client.agents.knowledge_bases.attach( @@ -25,7 +25,7 @@ def test_method_attach(self, client: Gradient) -> None: ) assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_attach(self, client: Gradient) -> None: response = client.agents.knowledge_bases.with_raw_response.attach( @@ -37,7 +37,7 @@ def test_raw_response_attach(self, client: Gradient) -> None: knowledge_base = response.parse() assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_attach(self, client: Gradient) -> None: with client.agents.knowledge_bases.with_streaming_response.attach( @@ -51,7 +51,7 @@ def test_streaming_response_attach(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_attach(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -59,7 +59,7 @@ def test_path_params_attach(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_attach_single(self, client: Gradient) -> None: knowledge_base = client.agents.knowledge_bases.attach_single( @@ -68,7 +68,7 @@ def test_method_attach_single(self, client: Gradient) -> None: ) assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_attach_single(self, client: Gradient) -> None: response = client.agents.knowledge_bases.with_raw_response.attach_single( @@ -81,7 +81,7 @@ def test_raw_response_attach_single(self, client: Gradient) -> None: knowledge_base = response.parse() assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_attach_single(self, client: Gradient) -> None: with client.agents.knowledge_bases.with_streaming_response.attach_single( @@ -96,7 +96,7 @@ def test_streaming_response_attach_single(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_attach_single(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -111,7 +111,7 @@ def test_path_params_attach_single(self, client: Gradient) -> None: agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_detach(self, client: Gradient) -> None: knowledge_base = client.agents.knowledge_bases.detach( @@ -120,7 +120,7 @@ def test_method_detach(self, client: Gradient) -> None: ) assert_matches_type(KnowledgeBaseDetachResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_detach(self, client: Gradient) -> None: response = client.agents.knowledge_bases.with_raw_response.detach( @@ -133,7 +133,7 @@ def test_raw_response_detach(self, client: Gradient) -> None: knowledge_base = response.parse() assert_matches_type(KnowledgeBaseDetachResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_detach(self, client: Gradient) -> None: with client.agents.knowledge_bases.with_streaming_response.detach( @@ -148,7 +148,7 @@ def test_streaming_response_detach(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_detach(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -169,7 +169,7 @@ class TestAsyncKnowledgeBases: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_attach(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.agents.knowledge_bases.attach( @@ -177,7 +177,7 @@ async def test_method_attach(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_attach(self, async_client: AsyncGradient) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.attach( @@ -189,7 +189,7 @@ async def test_raw_response_attach(self, async_client: AsyncGradient) -> None: knowledge_base = await response.parse() assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_attach(self, async_client: AsyncGradient) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.attach( @@ -203,7 +203,7 @@ async def test_streaming_response_attach(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_attach(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -211,7 +211,7 @@ async def test_path_params_attach(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_attach_single(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.agents.knowledge_bases.attach_single( @@ -220,7 +220,7 @@ async def test_method_attach_single(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_attach_single(self, async_client: AsyncGradient) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.attach_single( @@ -233,7 +233,7 @@ async def test_raw_response_attach_single(self, async_client: AsyncGradient) -> knowledge_base = await response.parse() assert_matches_type(APILinkKnowledgeBaseOutput, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_attach_single(self, async_client: AsyncGradient) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.attach_single( @@ -248,7 +248,7 @@ async def test_streaming_response_attach_single(self, async_client: AsyncGradien assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_attach_single(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): @@ -263,7 +263,7 @@ async def test_path_params_attach_single(self, async_client: AsyncGradient) -> N agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_detach(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.agents.knowledge_bases.detach( @@ -272,7 +272,7 @@ async def test_method_detach(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KnowledgeBaseDetachResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_detach(self, async_client: AsyncGradient) -> None: response = await async_client.agents.knowledge_bases.with_raw_response.detach( @@ -285,7 +285,7 @@ async def test_raw_response_detach(self, async_client: AsyncGradient) -> None: knowledge_base = await response.parse() assert_matches_type(KnowledgeBaseDetachResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_detach(self, async_client: AsyncGradient) -> None: async with async_client.agents.knowledge_bases.with_streaming_response.detach( @@ -300,7 +300,7 @@ async def test_streaming_response_detach(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_detach(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_uuid` but received ''"): diff --git a/tests/api_resources/agents/test_routes.py b/tests/api_resources/agents/test_routes.py index 294fa853..37bc4eac 100644 --- a/tests/api_resources/agents/test_routes.py +++ b/tests/api_resources/agents/test_routes.py @@ -22,7 +22,7 @@ class TestRoutes: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: route = client.agents.routes.update( @@ -31,7 +31,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(RouteUpdateResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: route = client.agents.routes.update( @@ -45,7 +45,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(RouteUpdateResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.routes.with_raw_response.update( @@ -58,7 +58,7 @@ def test_raw_response_update(self, client: Gradient) -> None: route = response.parse() assert_matches_type(RouteUpdateResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.routes.with_streaming_response.update( @@ -73,7 +73,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises( @@ -90,7 +90,7 @@ def test_path_params_update(self, client: Gradient) -> None: path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: route = client.agents.routes.delete( @@ -99,7 +99,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(RouteDeleteResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.routes.with_raw_response.delete( @@ -112,7 +112,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: route = response.parse() assert_matches_type(RouteDeleteResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.routes.with_streaming_response.delete( @@ -127,7 +127,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `parent_agent_uuid` but received ''"): @@ -142,7 +142,7 @@ def test_path_params_delete(self, client: Gradient) -> None: parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_add(self, client: Gradient) -> None: route = client.agents.routes.add( @@ -151,7 +151,7 @@ def test_method_add(self, client: Gradient) -> None: ) assert_matches_type(RouteAddResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_add_with_all_params(self, client: Gradient) -> None: route = client.agents.routes.add( @@ -164,7 +164,7 @@ def test_method_add_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(RouteAddResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_add(self, client: Gradient) -> None: response = client.agents.routes.with_raw_response.add( @@ -177,7 +177,7 @@ def test_raw_response_add(self, client: Gradient) -> None: route = response.parse() assert_matches_type(RouteAddResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_add(self, client: Gradient) -> None: with client.agents.routes.with_streaming_response.add( @@ -192,7 +192,7 @@ def test_streaming_response_add(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_add(self, client: Gradient) -> None: with pytest.raises( @@ -209,7 +209,7 @@ def test_path_params_add(self, client: Gradient) -> None: path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_view(self, client: Gradient) -> None: route = client.agents.routes.view( @@ -217,7 +217,7 @@ def test_method_view(self, client: Gradient) -> None: ) assert_matches_type(RouteViewResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_view(self, client: Gradient) -> None: response = client.agents.routes.with_raw_response.view( @@ -229,7 +229,7 @@ def test_raw_response_view(self, client: Gradient) -> None: route = response.parse() assert_matches_type(RouteViewResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_view(self, client: Gradient) -> None: with client.agents.routes.with_streaming_response.view( @@ -243,7 +243,7 @@ def test_streaming_response_view(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_view(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -257,7 +257,7 @@ class TestAsyncRoutes: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.update( @@ -266,7 +266,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(RouteUpdateResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.update( @@ -280,7 +280,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(RouteUpdateResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.routes.with_raw_response.update( @@ -293,7 +293,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: route = await response.parse() assert_matches_type(RouteUpdateResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.routes.with_streaming_response.update( @@ -308,7 +308,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises( @@ -325,7 +325,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.delete( @@ -334,7 +334,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(RouteDeleteResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.routes.with_raw_response.delete( @@ -347,7 +347,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: route = await response.parse() assert_matches_type(RouteDeleteResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.routes.with_streaming_response.delete( @@ -362,7 +362,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `parent_agent_uuid` but received ''"): @@ -377,7 +377,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_add(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.add( @@ -386,7 +386,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None: ) assert_matches_type(RouteAddResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_add_with_all_params(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.add( @@ -399,7 +399,7 @@ async def test_method_add_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(RouteAddResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.agents.routes.with_raw_response.add( @@ -412,7 +412,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None: route = await response.parse() assert_matches_type(RouteAddResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.agents.routes.with_streaming_response.add( @@ -427,7 +427,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises( @@ -444,7 +444,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None: path_parent_agent_uuid='"123e4567-e89b-12d3-a456-426614174000"', ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_view(self, async_client: AsyncGradient) -> None: route = await async_client.agents.routes.view( @@ -452,7 +452,7 @@ async def test_method_view(self, async_client: AsyncGradient) -> None: ) assert_matches_type(RouteViewResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_view(self, async_client: AsyncGradient) -> None: response = await async_client.agents.routes.with_raw_response.view( @@ -464,7 +464,7 @@ async def test_raw_response_view(self, async_client: AsyncGradient) -> None: route = await response.parse() assert_matches_type(RouteViewResponse, route, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_view(self, async_client: AsyncGradient) -> None: async with async_client.agents.routes.with_streaming_response.view( @@ -478,7 +478,7 @@ async def test_streaming_response_view(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_view(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): diff --git a/tests/api_resources/agents/test_versions.py b/tests/api_resources/agents/test_versions.py index 4b45edf7..d12e362e 100644 --- a/tests/api_resources/agents/test_versions.py +++ b/tests/api_resources/agents/test_versions.py @@ -17,7 +17,7 @@ class TestVersions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: version = client.agents.versions.update( @@ -25,7 +25,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(VersionUpdateResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: version = client.agents.versions.update( @@ -35,7 +35,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(VersionUpdateResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.versions.with_raw_response.update( @@ -47,7 +47,7 @@ def test_raw_response_update(self, client: Gradient) -> None: version = response.parse() assert_matches_type(VersionUpdateResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.versions.with_streaming_response.update( @@ -61,7 +61,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): @@ -69,7 +69,7 @@ def test_path_params_update(self, client: Gradient) -> None: path_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: version = client.agents.versions.list( @@ -77,7 +77,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(VersionListResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: version = client.agents.versions.list( @@ -87,7 +87,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(VersionListResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.versions.with_raw_response.list( @@ -99,7 +99,7 @@ def test_raw_response_list(self, client: Gradient) -> None: version = response.parse() assert_matches_type(VersionListResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.versions.with_streaming_response.list( @@ -113,7 +113,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -127,7 +127,7 @@ class TestAsyncVersions: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: version = await async_client.agents.versions.update( @@ -135,7 +135,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(VersionUpdateResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: version = await async_client.agents.versions.update( @@ -145,7 +145,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(VersionUpdateResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.versions.with_raw_response.update( @@ -157,7 +157,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: version = await response.parse() assert_matches_type(VersionUpdateResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.versions.with_streaming_response.update( @@ -171,7 +171,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): @@ -179,7 +179,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: version = await async_client.agents.versions.list( @@ -187,7 +187,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(VersionListResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: version = await async_client.agents.versions.list( @@ -197,7 +197,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(VersionListResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.versions.with_raw_response.list( @@ -209,7 +209,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: version = await response.parse() assert_matches_type(VersionListResponse, version, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.versions.with_streaming_response.list( @@ -223,7 +223,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): diff --git a/tests/api_resources/chat/test_completions.py b/tests/api_resources/chat/test_completions.py index a25fd3c4..fce393fd 100644 --- a/tests/api_resources/chat/test_completions.py +++ b/tests/api_resources/chat/test_completions.py @@ -17,7 +17,7 @@ class TestCompletions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_1(self, client: Gradient) -> None: completion = client.chat.completions.create( @@ -31,7 +31,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None: ) assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: completion = client.chat.completions.create( @@ -71,7 +71,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non ) assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.chat.completions.with_raw_response.create( @@ -89,7 +89,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None: completion = response.parse() assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.chat.completions.with_streaming_response.create( @@ -109,7 +109,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_2(self, client: Gradient) -> None: completion_stream = client.chat.completions.create( @@ -124,7 +124,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None: ) completion_stream.response.close() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: completion_stream = client.chat.completions.create( @@ -164,7 +164,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non ) completion_stream.response.close() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.chat.completions.with_raw_response.create( @@ -182,7 +182,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None: stream = response.parse() stream.close() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.chat.completions.with_streaming_response.create( @@ -209,7 +209,7 @@ class TestAsyncCompletions: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: completion = await async_client.chat.completions.create( @@ -223,7 +223,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: completion = await async_client.chat.completions.create( @@ -263,7 +263,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn ) assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.chat.completions.with_raw_response.create( @@ -281,7 +281,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) completion = await response.parse() assert_matches_type(CompletionCreateResponse, completion, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.chat.completions.with_streaming_response.create( @@ -301,7 +301,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: completion_stream = await async_client.chat.completions.create( @@ -316,7 +316,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No ) await completion_stream.response.aclose() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: completion_stream = await async_client.chat.completions.create( @@ -356,7 +356,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn ) await completion_stream.response.aclose() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.chat.completions.with_raw_response.create( @@ -374,7 +374,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) stream = await response.parse() await stream.close() - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.chat.completions.with_streaming_response.create( diff --git a/tests/api_resources/databases/schema_registry/test_config.py b/tests/api_resources/databases/schema_registry/test_config.py index 024d8b0a..b1d21f62 100644 --- a/tests/api_resources/databases/schema_registry/test_config.py +++ b/tests/api_resources/databases/schema_registry/test_config.py @@ -22,7 +22,7 @@ class TestConfig: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: config = client.databases.schema_registry.config.retrieve( @@ -30,7 +30,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(ConfigRetrieveResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.databases.schema_registry.config.with_raw_response.retrieve( @@ -42,7 +42,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: config = response.parse() assert_matches_type(ConfigRetrieveResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.databases.schema_registry.config.with_streaming_response.retrieve( @@ -56,7 +56,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): @@ -64,7 +64,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: config = client.databases.schema_registry.config.update( @@ -73,7 +73,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(ConfigUpdateResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.databases.schema_registry.config.with_raw_response.update( @@ -86,7 +86,7 @@ def test_raw_response_update(self, client: Gradient) -> None: config = response.parse() assert_matches_type(ConfigUpdateResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.databases.schema_registry.config.with_streaming_response.update( @@ -101,7 +101,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): @@ -110,7 +110,7 @@ def test_path_params_update(self, client: Gradient) -> None: compatibility_level="BACKWARD", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve_subject(self, client: Gradient) -> None: config = client.databases.schema_registry.config.retrieve_subject( @@ -119,7 +119,7 @@ def test_method_retrieve_subject(self, client: Gradient) -> None: ) assert_matches_type(ConfigRetrieveSubjectResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve_subject(self, client: Gradient) -> None: response = client.databases.schema_registry.config.with_raw_response.retrieve_subject( @@ -132,7 +132,7 @@ def test_raw_response_retrieve_subject(self, client: Gradient) -> None: config = response.parse() assert_matches_type(ConfigRetrieveSubjectResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve_subject(self, client: Gradient) -> None: with client.databases.schema_registry.config.with_streaming_response.retrieve_subject( @@ -147,7 +147,7 @@ def test_streaming_response_retrieve_subject(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve_subject(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): @@ -162,7 +162,7 @@ def test_path_params_retrieve_subject(self, client: Gradient) -> None: database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_subject(self, client: Gradient) -> None: config = client.databases.schema_registry.config.update_subject( @@ -172,7 +172,7 @@ def test_method_update_subject(self, client: Gradient) -> None: ) assert_matches_type(ConfigUpdateSubjectResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update_subject(self, client: Gradient) -> None: response = client.databases.schema_registry.config.with_raw_response.update_subject( @@ -186,7 +186,7 @@ def test_raw_response_update_subject(self, client: Gradient) -> None: config = response.parse() assert_matches_type(ConfigUpdateSubjectResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update_subject(self, client: Gradient) -> None: with client.databases.schema_registry.config.with_streaming_response.update_subject( @@ -202,7 +202,7 @@ def test_streaming_response_update_subject(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update_subject(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): @@ -225,7 +225,7 @@ class TestAsyncConfig: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: config = await async_client.databases.schema_registry.config.retrieve( @@ -233,7 +233,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ConfigRetrieveResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.databases.schema_registry.config.with_raw_response.retrieve( @@ -245,7 +245,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: config = await response.parse() assert_matches_type(ConfigRetrieveResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.databases.schema_registry.config.with_streaming_response.retrieve( @@ -259,7 +259,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): @@ -267,7 +267,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: config = await async_client.databases.schema_registry.config.update( @@ -276,7 +276,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ConfigUpdateResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.databases.schema_registry.config.with_raw_response.update( @@ -289,7 +289,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: config = await response.parse() assert_matches_type(ConfigUpdateResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.databases.schema_registry.config.with_streaming_response.update( @@ -304,7 +304,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): @@ -313,7 +313,7 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: compatibility_level="BACKWARD", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve_subject(self, async_client: AsyncGradient) -> None: config = await async_client.databases.schema_registry.config.retrieve_subject( @@ -322,7 +322,7 @@ async def test_method_retrieve_subject(self, async_client: AsyncGradient) -> Non ) assert_matches_type(ConfigRetrieveSubjectResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve_subject(self, async_client: AsyncGradient) -> None: response = await async_client.databases.schema_registry.config.with_raw_response.retrieve_subject( @@ -335,7 +335,7 @@ async def test_raw_response_retrieve_subject(self, async_client: AsyncGradient) config = await response.parse() assert_matches_type(ConfigRetrieveSubjectResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve_subject(self, async_client: AsyncGradient) -> None: async with async_client.databases.schema_registry.config.with_streaming_response.retrieve_subject( @@ -350,7 +350,7 @@ async def test_streaming_response_retrieve_subject(self, async_client: AsyncGrad assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve_subject(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): @@ -365,7 +365,7 @@ async def test_path_params_retrieve_subject(self, async_client: AsyncGradient) - database_cluster_uuid="9cc10173-e9ea-4176-9dbc-a4cee4c4ff30", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_subject(self, async_client: AsyncGradient) -> None: config = await async_client.databases.schema_registry.config.update_subject( @@ -375,7 +375,7 @@ async def test_method_update_subject(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ConfigUpdateSubjectResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update_subject(self, async_client: AsyncGradient) -> None: response = await async_client.databases.schema_registry.config.with_raw_response.update_subject( @@ -389,7 +389,7 @@ async def test_raw_response_update_subject(self, async_client: AsyncGradient) -> config = await response.parse() assert_matches_type(ConfigUpdateSubjectResponse, config, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update_subject(self, async_client: AsyncGradient) -> None: async with async_client.databases.schema_registry.config.with_streaming_response.update_subject( @@ -405,7 +405,7 @@ async def test_streaming_response_update_subject(self, async_client: AsyncGradie assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update_subject(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `database_cluster_uuid` but received ''"): diff --git a/tests/api_resources/gpu_droplets/account/test_keys.py b/tests/api_resources/gpu_droplets/account/test_keys.py index 5a63c275..93817d1e 100644 --- a/tests/api_resources/gpu_droplets/account/test_keys.py +++ b/tests/api_resources/gpu_droplets/account/test_keys.py @@ -22,7 +22,7 @@ class TestKeys: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.create( @@ -31,7 +31,7 @@ def test_method_create(self, client: Gradient) -> None: ) assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.create( @@ -44,7 +44,7 @@ def test_raw_response_create(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.create( @@ -59,7 +59,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.retrieve( @@ -67,7 +67,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.retrieve( @@ -79,7 +79,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.retrieve( @@ -93,7 +93,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.update( @@ -101,7 +101,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.update( @@ -110,7 +110,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.update( @@ -122,7 +122,7 @@ def test_raw_response_update(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.update( @@ -136,13 +136,13 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.list( @@ -151,7 +151,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.list() @@ -161,7 +161,7 @@ def test_raw_response_list(self, client: Gradient) -> None: key = response.parse() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.list() as response: @@ -173,7 +173,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: key = client.gpu_droplets.account.keys.delete( @@ -181,7 +181,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert key is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.account.keys.with_raw_response.delete( @@ -193,7 +193,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: key = response.parse() assert key is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.account.keys.with_streaming_response.delete( @@ -213,7 +213,7 @@ class TestAsyncKeys: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.create( @@ -222,7 +222,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.create( @@ -235,7 +235,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyCreateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.create( @@ -250,7 +250,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.retrieve( @@ -258,7 +258,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.retrieve( @@ -270,7 +270,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyRetrieveResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.retrieve( @@ -284,7 +284,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.update( @@ -292,7 +292,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.update( @@ -301,7 +301,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.update( @@ -313,7 +313,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyUpdateResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.update( @@ -327,13 +327,13 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.list() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.list( @@ -342,7 +342,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.list() @@ -352,7 +352,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: key = await response.parse() assert_matches_type(KeyListResponse, key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.list() as response: @@ -364,7 +364,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: key = await async_client.gpu_droplets.account.keys.delete( @@ -372,7 +372,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert key is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.account.keys.with_raw_response.delete( @@ -384,7 +384,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: key = await response.parse() assert key is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.account.keys.with_streaming_response.delete( diff --git a/tests/api_resources/gpu_droplets/firewalls/test_droplets.py b/tests/api_resources/gpu_droplets/firewalls/test_droplets.py index 8f39a064..693e315d 100644 --- a/tests/api_resources/gpu_droplets/firewalls/test_droplets.py +++ b/tests/api_resources/gpu_droplets/firewalls/test_droplets.py @@ -15,7 +15,7 @@ class TestDroplets: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_add(self, client: Gradient) -> None: droplet = client.gpu_droplets.firewalls.droplets.add( @@ -24,7 +24,7 @@ def test_method_add(self, client: Gradient) -> None: ) assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.droplets.with_raw_response.add( @@ -37,7 +37,7 @@ def test_raw_response_add(self, client: Gradient) -> None: droplet = response.parse() assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.droplets.with_streaming_response.add( @@ -52,7 +52,7 @@ def test_streaming_response_add(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -61,7 +61,7 @@ def test_path_params_add(self, client: Gradient) -> None: droplet_ids=[49696269], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_remove(self, client: Gradient) -> None: droplet = client.gpu_droplets.firewalls.droplets.remove( @@ -70,7 +70,7 @@ def test_method_remove(self, client: Gradient) -> None: ) assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.droplets.with_raw_response.remove( @@ -83,7 +83,7 @@ def test_raw_response_remove(self, client: Gradient) -> None: droplet = response.parse() assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.droplets.with_streaming_response.remove( @@ -98,7 +98,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -113,7 +113,7 @@ class TestAsyncDroplets: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_add(self, async_client: AsyncGradient) -> None: droplet = await async_client.gpu_droplets.firewalls.droplets.add( @@ -122,7 +122,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None: ) assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.droplets.with_raw_response.add( @@ -135,7 +135,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None: droplet = await response.parse() assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.droplets.with_streaming_response.add( @@ -150,7 +150,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -159,7 +159,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None: droplet_ids=[49696269], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_remove(self, async_client: AsyncGradient) -> None: droplet = await async_client.gpu_droplets.firewalls.droplets.remove( @@ -168,7 +168,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None: ) assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.droplets.with_raw_response.remove( @@ -181,7 +181,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: droplet = await response.parse() assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.droplets.with_streaming_response.remove( @@ -196,7 +196,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): diff --git a/tests/api_resources/gpu_droplets/firewalls/test_rules.py b/tests/api_resources/gpu_droplets/firewalls/test_rules.py index 2bd74228..27694390 100644 --- a/tests/api_resources/gpu_droplets/firewalls/test_rules.py +++ b/tests/api_resources/gpu_droplets/firewalls/test_rules.py @@ -15,7 +15,7 @@ class TestRules: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_add(self, client: Gradient) -> None: rule = client.gpu_droplets.firewalls.rules.add( @@ -23,7 +23,7 @@ def test_method_add(self, client: Gradient) -> None: ) assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_add_with_all_params(self, client: Gradient) -> None: rule = client.gpu_droplets.firewalls.rules.add( @@ -57,7 +57,7 @@ def test_method_add_with_all_params(self, client: Gradient) -> None: ) assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.rules.with_raw_response.add( @@ -69,7 +69,7 @@ def test_raw_response_add(self, client: Gradient) -> None: rule = response.parse() assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.rules.with_streaming_response.add( @@ -83,7 +83,7 @@ def test_streaming_response_add(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -91,7 +91,7 @@ def test_path_params_add(self, client: Gradient) -> None: firewall_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_remove(self, client: Gradient) -> None: rule = client.gpu_droplets.firewalls.rules.remove( @@ -99,7 +99,7 @@ def test_method_remove(self, client: Gradient) -> None: ) assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_remove_with_all_params(self, client: Gradient) -> None: rule = client.gpu_droplets.firewalls.rules.remove( @@ -133,7 +133,7 @@ def test_method_remove_with_all_params(self, client: Gradient) -> None: ) assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.rules.with_raw_response.remove( @@ -145,7 +145,7 @@ def test_raw_response_remove(self, client: Gradient) -> None: rule = response.parse() assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.rules.with_streaming_response.remove( @@ -159,7 +159,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -173,7 +173,7 @@ class TestAsyncRules: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_add(self, async_client: AsyncGradient) -> None: rule = await async_client.gpu_droplets.firewalls.rules.add( @@ -181,7 +181,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None: ) assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_add_with_all_params(self, async_client: AsyncGradient) -> None: rule = await async_client.gpu_droplets.firewalls.rules.add( @@ -215,7 +215,7 @@ async def test_method_add_with_all_params(self, async_client: AsyncGradient) -> ) assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.rules.with_raw_response.add( @@ -227,7 +227,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None: rule = await response.parse() assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.rules.with_streaming_response.add( @@ -241,7 +241,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -249,7 +249,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None: firewall_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_remove(self, async_client: AsyncGradient) -> None: rule = await async_client.gpu_droplets.firewalls.rules.remove( @@ -257,7 +257,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None: ) assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_remove_with_all_params(self, async_client: AsyncGradient) -> None: rule = await async_client.gpu_droplets.firewalls.rules.remove( @@ -291,7 +291,7 @@ async def test_method_remove_with_all_params(self, async_client: AsyncGradient) ) assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.rules.with_raw_response.remove( @@ -303,7 +303,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: rule = await response.parse() assert rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.rules.with_streaming_response.remove( @@ -317,7 +317,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): diff --git a/tests/api_resources/gpu_droplets/firewalls/test_tags.py b/tests/api_resources/gpu_droplets/firewalls/test_tags.py index cbd86f65..50c7563b 100644 --- a/tests/api_resources/gpu_droplets/firewalls/test_tags.py +++ b/tests/api_resources/gpu_droplets/firewalls/test_tags.py @@ -15,7 +15,7 @@ class TestTags: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_add(self, client: Gradient) -> None: tag = client.gpu_droplets.firewalls.tags.add( @@ -24,7 +24,7 @@ def test_method_add(self, client: Gradient) -> None: ) assert tag is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.tags.with_raw_response.add( @@ -37,7 +37,7 @@ def test_raw_response_add(self, client: Gradient) -> None: tag = response.parse() assert tag is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.tags.with_streaming_response.add( @@ -52,7 +52,7 @@ def test_streaming_response_add(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -61,7 +61,7 @@ def test_path_params_add(self, client: Gradient) -> None: tags=["frontend"], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_remove(self, client: Gradient) -> None: tag = client.gpu_droplets.firewalls.tags.remove( @@ -70,7 +70,7 @@ def test_method_remove(self, client: Gradient) -> None: ) assert tag is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.tags.with_raw_response.remove( @@ -83,7 +83,7 @@ def test_raw_response_remove(self, client: Gradient) -> None: tag = response.parse() assert tag is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.tags.with_streaming_response.remove( @@ -98,7 +98,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -113,7 +113,7 @@ class TestAsyncTags: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_add(self, async_client: AsyncGradient) -> None: tag = await async_client.gpu_droplets.firewalls.tags.add( @@ -122,7 +122,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None: ) assert tag is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.tags.with_raw_response.add( @@ -135,7 +135,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None: tag = await response.parse() assert tag is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.tags.with_streaming_response.add( @@ -150,7 +150,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -159,7 +159,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None: tags=["frontend"], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_remove(self, async_client: AsyncGradient) -> None: tag = await async_client.gpu_droplets.firewalls.tags.remove( @@ -168,7 +168,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None: ) assert tag is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.tags.with_raw_response.remove( @@ -181,7 +181,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: tag = await response.parse() assert tag is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.tags.with_streaming_response.remove( @@ -196,7 +196,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): diff --git a/tests/api_resources/gpu_droplets/floating_ips/test_actions.py b/tests/api_resources/gpu_droplets/floating_ips/test_actions.py index 9417a880..0d678103 100644 --- a/tests/api_resources/gpu_droplets/floating_ips/test_actions.py +++ b/tests/api_resources/gpu_droplets/floating_ips/test_actions.py @@ -21,7 +21,7 @@ class TestActions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.floating_ips.actions.create( @@ -30,7 +30,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None: ) assert_matches_type(ActionCreateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.actions.with_raw_response.create( @@ -43,7 +43,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionCreateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.actions.with_streaming_response.create( @@ -58,7 +58,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_create_overload_1(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -67,7 +67,7 @@ def test_path_params_create_overload_1(self, client: Gradient) -> None: type="assign", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.floating_ips.actions.create( @@ -77,7 +77,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None: ) assert_matches_type(ActionCreateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.actions.with_raw_response.create( @@ -91,7 +91,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionCreateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.actions.with_streaming_response.create( @@ -107,7 +107,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_create_overload_2(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -117,7 +117,7 @@ def test_path_params_create_overload_2(self, client: Gradient) -> None: type="assign", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: action = client.gpu_droplets.floating_ips.actions.retrieve( @@ -126,7 +126,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve( @@ -139,7 +139,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.actions.with_streaming_response.retrieve( @@ -154,7 +154,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -163,7 +163,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: floating_ip="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: action = client.gpu_droplets.floating_ips.actions.list( @@ -171,7 +171,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.actions.with_raw_response.list( @@ -183,7 +183,7 @@ def test_raw_response_list(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.actions.with_streaming_response.list( @@ -197,7 +197,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -211,7 +211,7 @@ class TestAsyncActions: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.floating_ips.actions.create( @@ -220,7 +220,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(ActionCreateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create( @@ -233,7 +233,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) action = await response.parse() assert_matches_type(ActionCreateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.create( @@ -248,7 +248,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_create_overload_1(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -257,7 +257,7 @@ async def test_path_params_create_overload_1(self, async_client: AsyncGradient) type="assign", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.floating_ips.actions.create( @@ -267,7 +267,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No ) assert_matches_type(ActionCreateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.create( @@ -281,7 +281,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) action = await response.parse() assert_matches_type(ActionCreateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.create( @@ -297,7 +297,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_create_overload_2(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -307,7 +307,7 @@ async def test_path_params_create_overload_2(self, async_client: AsyncGradient) type="assign", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.floating_ips.actions.retrieve( @@ -316,7 +316,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.retrieve( @@ -329,7 +329,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: action = await response.parse() assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.retrieve( @@ -344,7 +344,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -353,7 +353,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: floating_ip="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.floating_ips.actions.list( @@ -361,7 +361,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.actions.with_raw_response.list( @@ -373,7 +373,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: action = await response.parse() assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.actions.with_streaming_response.list( @@ -387,7 +387,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): diff --git a/tests/api_resources/gpu_droplets/images/test_actions.py b/tests/api_resources/gpu_droplets/images/test_actions.py index f59e3986..ad8b9585 100644 --- a/tests/api_resources/gpu_droplets/images/test_actions.py +++ b/tests/api_resources/gpu_droplets/images/test_actions.py @@ -18,7 +18,7 @@ class TestActions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.images.actions.create( @@ -27,7 +27,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None: ) assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.images.actions.with_raw_response.create( @@ -40,7 +40,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None: action = response.parse() assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.images.actions.with_streaming_response.create( @@ -55,7 +55,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.images.actions.create( @@ -65,7 +65,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None: ) assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.images.actions.with_raw_response.create( @@ -79,7 +79,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None: action = response.parse() assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.images.actions.with_streaming_response.create( @@ -95,7 +95,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: action = client.gpu_droplets.images.actions.retrieve( @@ -104,7 +104,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.images.actions.with_raw_response.retrieve( @@ -117,7 +117,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: action = response.parse() assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.images.actions.with_streaming_response.retrieve( @@ -132,7 +132,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: action = client.gpu_droplets.images.actions.list( @@ -140,7 +140,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.images.actions.with_raw_response.list( @@ -152,7 +152,7 @@ def test_raw_response_list(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.images.actions.with_streaming_response.list( @@ -172,7 +172,7 @@ class TestAsyncActions: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.images.actions.create( @@ -181,7 +181,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.actions.with_raw_response.create( @@ -194,7 +194,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) action = await response.parse() assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.actions.with_streaming_response.create( @@ -209,7 +209,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.images.actions.create( @@ -219,7 +219,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No ) assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.actions.with_raw_response.create( @@ -233,7 +233,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) action = await response.parse() assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.actions.with_streaming_response.create( @@ -249,7 +249,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.images.actions.retrieve( @@ -258,7 +258,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.actions.with_raw_response.retrieve( @@ -271,7 +271,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: action = await response.parse() assert_matches_type(Action, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.actions.with_streaming_response.retrieve( @@ -286,7 +286,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.images.actions.list( @@ -294,7 +294,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.actions.with_raw_response.list( @@ -306,7 +306,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: action = await response.parse() assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.actions.with_streaming_response.list( diff --git a/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py b/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py index 200dad39..e6eefd23 100644 --- a/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py +++ b/tests/api_resources/gpu_droplets/load_balancers/test_droplets.py @@ -15,7 +15,7 @@ class TestDroplets: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_add(self, client: Gradient) -> None: droplet = client.gpu_droplets.load_balancers.droplets.add( @@ -24,7 +24,7 @@ def test_method_add(self, client: Gradient) -> None: ) assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.droplets.with_raw_response.add( @@ -37,7 +37,7 @@ def test_raw_response_add(self, client: Gradient) -> None: droplet = response.parse() assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.droplets.with_streaming_response.add( @@ -52,7 +52,7 @@ def test_streaming_response_add(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -61,7 +61,7 @@ def test_path_params_add(self, client: Gradient) -> None: droplet_ids=[3164444, 3164445], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_remove(self, client: Gradient) -> None: droplet = client.gpu_droplets.load_balancers.droplets.remove( @@ -70,7 +70,7 @@ def test_method_remove(self, client: Gradient) -> None: ) assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.droplets.with_raw_response.remove( @@ -83,7 +83,7 @@ def test_raw_response_remove(self, client: Gradient) -> None: droplet = response.parse() assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.droplets.with_streaming_response.remove( @@ -98,7 +98,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -113,7 +113,7 @@ class TestAsyncDroplets: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_add(self, async_client: AsyncGradient) -> None: droplet = await async_client.gpu_droplets.load_balancers.droplets.add( @@ -122,7 +122,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None: ) assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.add( @@ -135,7 +135,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None: droplet = await response.parse() assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.droplets.with_streaming_response.add( @@ -150,7 +150,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -159,7 +159,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None: droplet_ids=[3164444, 3164445], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_remove(self, async_client: AsyncGradient) -> None: droplet = await async_client.gpu_droplets.load_balancers.droplets.remove( @@ -168,7 +168,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None: ) assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.droplets.with_raw_response.remove( @@ -181,7 +181,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: droplet = await response.parse() assert droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.droplets.with_streaming_response.remove( @@ -196,7 +196,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): diff --git a/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py b/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py index 4f1decdf..a3cc0bd1 100644 --- a/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py +++ b/tests/api_resources/gpu_droplets/load_balancers/test_forwarding_rules.py @@ -15,7 +15,7 @@ class TestForwardingRules: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_add(self, client: Gradient) -> None: forwarding_rule = client.gpu_droplets.load_balancers.forwarding_rules.add( @@ -31,7 +31,7 @@ def test_method_add(self, client: Gradient) -> None: ) assert forwarding_rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_add(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add( @@ -51,7 +51,7 @@ def test_raw_response_add(self, client: Gradient) -> None: forwarding_rule = response.parse() assert forwarding_rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_add(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.add( @@ -73,7 +73,7 @@ def test_streaming_response_add(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_add(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -89,7 +89,7 @@ def test_path_params_add(self, client: Gradient) -> None: ], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_remove(self, client: Gradient) -> None: forwarding_rule = client.gpu_droplets.load_balancers.forwarding_rules.remove( @@ -105,7 +105,7 @@ def test_method_remove(self, client: Gradient) -> None: ) assert forwarding_rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_remove(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove( @@ -125,7 +125,7 @@ def test_raw_response_remove(self, client: Gradient) -> None: forwarding_rule = response.parse() assert forwarding_rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_remove(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.remove( @@ -147,7 +147,7 @@ def test_streaming_response_remove(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_remove(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -169,7 +169,7 @@ class TestAsyncForwardingRules: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_add(self, async_client: AsyncGradient) -> None: forwarding_rule = await async_client.gpu_droplets.load_balancers.forwarding_rules.add( @@ -185,7 +185,7 @@ async def test_method_add(self, async_client: AsyncGradient) -> None: ) assert forwarding_rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_add(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.add( @@ -205,7 +205,7 @@ async def test_raw_response_add(self, async_client: AsyncGradient) -> None: forwarding_rule = await response.parse() assert forwarding_rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_add(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.add( @@ -227,7 +227,7 @@ async def test_streaming_response_add(self, async_client: AsyncGradient) -> None assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_add(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -243,7 +243,7 @@ async def test_path_params_add(self, async_client: AsyncGradient) -> None: ], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_remove(self, async_client: AsyncGradient) -> None: forwarding_rule = await async_client.gpu_droplets.load_balancers.forwarding_rules.remove( @@ -259,7 +259,7 @@ async def test_method_remove(self, async_client: AsyncGradient) -> None: ) assert forwarding_rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.forwarding_rules.with_raw_response.remove( @@ -279,7 +279,7 @@ async def test_raw_response_remove(self, async_client: AsyncGradient) -> None: forwarding_rule = await response.parse() assert forwarding_rule is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_remove(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.forwarding_rules.with_streaming_response.remove( @@ -301,7 +301,7 @@ async def test_streaming_response_remove(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_remove(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): diff --git a/tests/api_resources/gpu_droplets/test_actions.py b/tests/api_resources/gpu_droplets/test_actions.py index 7a52c608..e514196b 100644 --- a/tests/api_resources/gpu_droplets/test_actions.py +++ b/tests/api_resources/gpu_droplets/test_actions.py @@ -22,7 +22,7 @@ class TestActions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: action = client.gpu_droplets.actions.retrieve( @@ -31,7 +31,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.retrieve( @@ -44,7 +44,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.retrieve( @@ -59,7 +59,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: action = client.gpu_droplets.actions.list( @@ -67,7 +67,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: action = client.gpu_droplets.actions.list( @@ -77,7 +77,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.list( @@ -89,7 +89,7 @@ def test_raw_response_list(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.list( @@ -103,7 +103,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_bulk_initiate_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.actions.bulk_initiate( @@ -111,7 +111,7 @@ def test_method_bulk_initiate_overload_1(self, client: Gradient) -> None: ) assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_bulk_initiate_with_all_params_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.actions.bulk_initiate( @@ -120,7 +120,7 @@ def test_method_bulk_initiate_with_all_params_overload_1(self, client: Gradient) ) assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_bulk_initiate_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.bulk_initiate( @@ -132,7 +132,7 @@ def test_raw_response_bulk_initiate_overload_1(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_bulk_initiate_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.bulk_initiate( @@ -146,7 +146,7 @@ def test_streaming_response_bulk_initiate_overload_1(self, client: Gradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_bulk_initiate_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.actions.bulk_initiate( @@ -154,7 +154,7 @@ def test_method_bulk_initiate_overload_2(self, client: Gradient) -> None: ) assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_bulk_initiate_with_all_params_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.actions.bulk_initiate( @@ -164,7 +164,7 @@ def test_method_bulk_initiate_with_all_params_overload_2(self, client: Gradient) ) assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_bulk_initiate_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.bulk_initiate( @@ -176,7 +176,7 @@ def test_raw_response_bulk_initiate_overload_2(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_bulk_initiate_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.bulk_initiate( @@ -190,7 +190,7 @@ def test_streaming_response_bulk_initiate_overload_2(self, client: Gradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -199,7 +199,7 @@ def test_method_initiate_overload_1(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -212,7 +212,7 @@ def test_raw_response_initiate_overload_1(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -227,7 +227,7 @@ def test_streaming_response_initiate_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -236,7 +236,7 @@ def test_method_initiate_overload_2(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_with_all_params_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -250,7 +250,7 @@ def test_method_initiate_with_all_params_overload_2(self, client: Gradient) -> N ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -263,7 +263,7 @@ def test_raw_response_initiate_overload_2(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -278,7 +278,7 @@ def test_streaming_response_initiate_overload_2(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_3(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -287,7 +287,7 @@ def test_method_initiate_overload_3(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_with_all_params_overload_3(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -301,7 +301,7 @@ def test_method_initiate_with_all_params_overload_3(self, client: Gradient) -> N ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_3(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -314,7 +314,7 @@ def test_raw_response_initiate_overload_3(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_3(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -329,7 +329,7 @@ def test_streaming_response_initiate_overload_3(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_4(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -338,7 +338,7 @@ def test_method_initiate_overload_4(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_with_all_params_overload_4(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -348,7 +348,7 @@ def test_method_initiate_with_all_params_overload_4(self, client: Gradient) -> N ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_4(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -361,7 +361,7 @@ def test_raw_response_initiate_overload_4(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_4(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -376,7 +376,7 @@ def test_streaming_response_initiate_overload_4(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_5(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -385,7 +385,7 @@ def test_method_initiate_overload_5(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_with_all_params_overload_5(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -396,7 +396,7 @@ def test_method_initiate_with_all_params_overload_5(self, client: Gradient) -> N ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_5(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -409,7 +409,7 @@ def test_raw_response_initiate_overload_5(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_5(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -424,7 +424,7 @@ def test_streaming_response_initiate_overload_5(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_6(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -433,7 +433,7 @@ def test_method_initiate_overload_6(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_with_all_params_overload_6(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -443,7 +443,7 @@ def test_method_initiate_with_all_params_overload_6(self, client: Gradient) -> N ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_6(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -456,7 +456,7 @@ def test_raw_response_initiate_overload_6(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_6(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -471,7 +471,7 @@ def test_streaming_response_initiate_overload_6(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_7(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -480,7 +480,7 @@ def test_method_initiate_overload_7(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_with_all_params_overload_7(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -490,7 +490,7 @@ def test_method_initiate_with_all_params_overload_7(self, client: Gradient) -> N ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_7(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -503,7 +503,7 @@ def test_raw_response_initiate_overload_7(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_7(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -518,7 +518,7 @@ def test_streaming_response_initiate_overload_7(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_8(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -527,7 +527,7 @@ def test_method_initiate_overload_8(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_with_all_params_overload_8(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -537,7 +537,7 @@ def test_method_initiate_with_all_params_overload_8(self, client: Gradient) -> N ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_8(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -550,7 +550,7 @@ def test_raw_response_initiate_overload_8(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_8(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -565,7 +565,7 @@ def test_streaming_response_initiate_overload_8(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_overload_9(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -574,7 +574,7 @@ def test_method_initiate_overload_9(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_with_all_params_overload_9(self, client: Gradient) -> None: action = client.gpu_droplets.actions.initiate( @@ -584,7 +584,7 @@ def test_method_initiate_with_all_params_overload_9(self, client: Gradient) -> N ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_overload_9(self, client: Gradient) -> None: response = client.gpu_droplets.actions.with_raw_response.initiate( @@ -597,7 +597,7 @@ def test_raw_response_initiate_overload_9(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_overload_9(self, client: Gradient) -> None: with client.gpu_droplets.actions.with_streaming_response.initiate( @@ -618,7 +618,7 @@ class TestAsyncActions: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.retrieve( @@ -627,7 +627,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.retrieve( @@ -640,7 +640,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: action = await response.parse() assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.retrieve( @@ -655,7 +655,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.list( @@ -663,7 +663,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.list( @@ -673,7 +673,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.list( @@ -685,7 +685,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: action = await response.parse() assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.list( @@ -699,7 +699,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.bulk_initiate( @@ -707,7 +707,7 @@ async def test_method_bulk_initiate_overload_1(self, async_client: AsyncGradient ) assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_bulk_initiate_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.bulk_initiate( @@ -716,7 +716,7 @@ async def test_method_bulk_initiate_with_all_params_overload_1(self, async_clien ) assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.bulk_initiate( @@ -728,7 +728,7 @@ async def test_raw_response_bulk_initiate_overload_1(self, async_client: AsyncGr action = await response.parse() assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_bulk_initiate_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.bulk_initiate( @@ -742,7 +742,7 @@ async def test_streaming_response_bulk_initiate_overload_1(self, async_client: A assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.bulk_initiate( @@ -750,7 +750,7 @@ async def test_method_bulk_initiate_overload_2(self, async_client: AsyncGradient ) assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_bulk_initiate_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.bulk_initiate( @@ -760,7 +760,7 @@ async def test_method_bulk_initiate_with_all_params_overload_2(self, async_clien ) assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.bulk_initiate( @@ -772,7 +772,7 @@ async def test_raw_response_bulk_initiate_overload_2(self, async_client: AsyncGr action = await response.parse() assert_matches_type(ActionBulkInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_bulk_initiate_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.bulk_initiate( @@ -786,7 +786,7 @@ async def test_streaming_response_bulk_initiate_overload_2(self, async_client: A assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -795,7 +795,7 @@ async def test_method_initiate_overload_1(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -808,7 +808,7 @@ async def test_raw_response_initiate_overload_1(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( @@ -823,7 +823,7 @@ async def test_streaming_response_initiate_overload_1(self, async_client: AsyncG assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -832,7 +832,7 @@ async def test_method_initiate_overload_2(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -846,7 +846,7 @@ async def test_method_initiate_with_all_params_overload_2(self, async_client: As ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -859,7 +859,7 @@ async def test_raw_response_initiate_overload_2(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( @@ -874,7 +874,7 @@ async def test_streaming_response_initiate_overload_2(self, async_client: AsyncG assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_3(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -883,7 +883,7 @@ async def test_method_initiate_overload_3(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_with_all_params_overload_3(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -897,7 +897,7 @@ async def test_method_initiate_with_all_params_overload_3(self, async_client: As ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -910,7 +910,7 @@ async def test_raw_response_initiate_overload_3(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_3(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( @@ -925,7 +925,7 @@ async def test_streaming_response_initiate_overload_3(self, async_client: AsyncG assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_4(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -934,7 +934,7 @@ async def test_method_initiate_overload_4(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_with_all_params_overload_4(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -944,7 +944,7 @@ async def test_method_initiate_with_all_params_overload_4(self, async_client: As ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -957,7 +957,7 @@ async def test_raw_response_initiate_overload_4(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_4(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( @@ -972,7 +972,7 @@ async def test_streaming_response_initiate_overload_4(self, async_client: AsyncG assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_5(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -981,7 +981,7 @@ async def test_method_initiate_overload_5(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_with_all_params_overload_5(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -992,7 +992,7 @@ async def test_method_initiate_with_all_params_overload_5(self, async_client: As ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -1005,7 +1005,7 @@ async def test_raw_response_initiate_overload_5(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_5(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( @@ -1020,7 +1020,7 @@ async def test_streaming_response_initiate_overload_5(self, async_client: AsyncG assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_6(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -1029,7 +1029,7 @@ async def test_method_initiate_overload_6(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_with_all_params_overload_6(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -1039,7 +1039,7 @@ async def test_method_initiate_with_all_params_overload_6(self, async_client: As ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -1052,7 +1052,7 @@ async def test_raw_response_initiate_overload_6(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_6(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( @@ -1067,7 +1067,7 @@ async def test_streaming_response_initiate_overload_6(self, async_client: AsyncG assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_7(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -1076,7 +1076,7 @@ async def test_method_initiate_overload_7(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_with_all_params_overload_7(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -1086,7 +1086,7 @@ async def test_method_initiate_with_all_params_overload_7(self, async_client: As ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -1099,7 +1099,7 @@ async def test_raw_response_initiate_overload_7(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_7(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( @@ -1114,7 +1114,7 @@ async def test_streaming_response_initiate_overload_7(self, async_client: AsyncG assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_8(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -1123,7 +1123,7 @@ async def test_method_initiate_overload_8(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_with_all_params_overload_8(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -1133,7 +1133,7 @@ async def test_method_initiate_with_all_params_overload_8(self, async_client: As ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -1146,7 +1146,7 @@ async def test_raw_response_initiate_overload_8(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_8(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( @@ -1161,7 +1161,7 @@ async def test_streaming_response_initiate_overload_8(self, async_client: AsyncG assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_overload_9(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -1170,7 +1170,7 @@ async def test_method_initiate_overload_9(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_with_all_params_overload_9(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.actions.initiate( @@ -1180,7 +1180,7 @@ async def test_method_initiate_with_all_params_overload_9(self, async_client: As ) assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.actions.with_raw_response.initiate( @@ -1193,7 +1193,7 @@ async def test_raw_response_initiate_overload_9(self, async_client: AsyncGradien action = await response.parse() assert_matches_type(ActionInitiateResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_overload_9(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.actions.with_streaming_response.initiate( diff --git a/tests/api_resources/gpu_droplets/test_autoscale.py b/tests/api_resources/gpu_droplets/test_autoscale.py index 16be3e00..cbf67b19 100644 --- a/tests/api_resources/gpu_droplets/test_autoscale.py +++ b/tests/api_resources/gpu_droplets/test_autoscale.py @@ -24,7 +24,7 @@ class TestAutoscale: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.create( @@ -42,7 +42,7 @@ def test_method_create(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.create( @@ -70,7 +70,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.create( @@ -92,7 +92,7 @@ def test_raw_response_create(self, client: Gradient) -> None: autoscale = response.parse() assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.create( @@ -116,7 +116,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.retrieve( @@ -124,7 +124,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.retrieve( @@ -136,7 +136,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: autoscale = response.parse() assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.retrieve( @@ -150,7 +150,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -158,7 +158,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.update( @@ -174,7 +174,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.update( @@ -197,7 +197,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.update( @@ -217,7 +217,7 @@ def test_raw_response_update(self, client: Gradient) -> None: autoscale = response.parse() assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.update( @@ -239,7 +239,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -255,13 +255,13 @@ def test_path_params_update(self, client: Gradient) -> None: name="my-autoscale-pool", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list() assert_matches_type(AutoscaleListResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list( @@ -271,7 +271,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleListResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.list() @@ -281,7 +281,7 @@ def test_raw_response_list(self, client: Gradient) -> None: autoscale = response.parse() assert_matches_type(AutoscaleListResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.list() as response: @@ -293,7 +293,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.delete( @@ -301,7 +301,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert autoscale is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.delete( @@ -313,7 +313,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: autoscale = response.parse() assert autoscale is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.delete( @@ -327,7 +327,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -335,7 +335,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete_dangerous(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.delete_dangerous( @@ -344,7 +344,7 @@ def test_method_delete_dangerous(self, client: Gradient) -> None: ) assert autoscale is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete_dangerous(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.delete_dangerous( @@ -357,7 +357,7 @@ def test_raw_response_delete_dangerous(self, client: Gradient) -> None: autoscale = response.parse() assert autoscale is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete_dangerous(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.delete_dangerous( @@ -372,7 +372,7 @@ def test_streaming_response_delete_dangerous(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete_dangerous(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -381,7 +381,7 @@ def test_path_params_delete_dangerous(self, client: Gradient) -> None: x_dangerous=True, ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_history(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list_history( @@ -389,7 +389,7 @@ def test_method_list_history(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_history_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list_history( @@ -399,7 +399,7 @@ def test_method_list_history_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_history(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.list_history( @@ -411,7 +411,7 @@ def test_raw_response_list_history(self, client: Gradient) -> None: autoscale = response.parse() assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_history(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.list_history( @@ -425,7 +425,7 @@ def test_streaming_response_list_history(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list_history(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -433,7 +433,7 @@ def test_path_params_list_history(self, client: Gradient) -> None: autoscale_pool_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_members(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list_members( @@ -441,7 +441,7 @@ def test_method_list_members(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_members_with_all_params(self, client: Gradient) -> None: autoscale = client.gpu_droplets.autoscale.list_members( @@ -451,7 +451,7 @@ def test_method_list_members_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_members(self, client: Gradient) -> None: response = client.gpu_droplets.autoscale.with_raw_response.list_members( @@ -463,7 +463,7 @@ def test_raw_response_list_members(self, client: Gradient) -> None: autoscale = response.parse() assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_members(self, client: Gradient) -> None: with client.gpu_droplets.autoscale.with_streaming_response.list_members( @@ -477,7 +477,7 @@ def test_streaming_response_list_members(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list_members(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -491,7 +491,7 @@ class TestAsyncAutoscale: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.create( @@ -509,7 +509,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.create( @@ -537,7 +537,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.create( @@ -559,7 +559,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: autoscale = await response.parse() assert_matches_type(AutoscaleCreateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.create( @@ -583,7 +583,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.retrieve( @@ -591,7 +591,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.retrieve( @@ -603,7 +603,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: autoscale = await response.parse() assert_matches_type(AutoscaleRetrieveResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.retrieve( @@ -617,7 +617,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -625,7 +625,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.update( @@ -641,7 +641,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.update( @@ -664,7 +664,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.update( @@ -684,7 +684,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: autoscale = await response.parse() assert_matches_type(AutoscaleUpdateResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.update( @@ -706,7 +706,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -722,13 +722,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: name="my-autoscale-pool", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list() assert_matches_type(AutoscaleListResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list( @@ -738,7 +738,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(AutoscaleListResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.list() @@ -748,7 +748,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: autoscale = await response.parse() assert_matches_type(AutoscaleListResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.list() as response: @@ -760,7 +760,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.delete( @@ -768,7 +768,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert autoscale is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.delete( @@ -780,7 +780,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: autoscale = await response.parse() assert autoscale is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.delete( @@ -794,7 +794,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -802,7 +802,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.delete_dangerous( @@ -811,7 +811,7 @@ async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> Non ) assert autoscale is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.delete_dangerous( @@ -824,7 +824,7 @@ async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient) autoscale = await response.parse() assert autoscale is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.delete_dangerous( @@ -839,7 +839,7 @@ async def test_streaming_response_delete_dangerous(self, async_client: AsyncGrad assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete_dangerous(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -848,7 +848,7 @@ async def test_path_params_delete_dangerous(self, async_client: AsyncGradient) - x_dangerous=True, ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_history(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list_history( @@ -856,7 +856,7 @@ async def test_method_list_history(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_history_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list_history( @@ -866,7 +866,7 @@ async def test_method_list_history_with_all_params(self, async_client: AsyncGrad ) assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_history(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.list_history( @@ -878,7 +878,7 @@ async def test_raw_response_list_history(self, async_client: AsyncGradient) -> N autoscale = await response.parse() assert_matches_type(AutoscaleListHistoryResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_history(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.list_history( @@ -892,7 +892,7 @@ async def test_streaming_response_list_history(self, async_client: AsyncGradient assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list_history(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): @@ -900,7 +900,7 @@ async def test_path_params_list_history(self, async_client: AsyncGradient) -> No autoscale_pool_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_members(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list_members( @@ -908,7 +908,7 @@ async def test_method_list_members(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_members_with_all_params(self, async_client: AsyncGradient) -> None: autoscale = await async_client.gpu_droplets.autoscale.list_members( @@ -918,7 +918,7 @@ async def test_method_list_members_with_all_params(self, async_client: AsyncGrad ) assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_members(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.autoscale.with_raw_response.list_members( @@ -930,7 +930,7 @@ async def test_raw_response_list_members(self, async_client: AsyncGradient) -> N autoscale = await response.parse() assert_matches_type(AutoscaleListMembersResponse, autoscale, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_members(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.autoscale.with_streaming_response.list_members( @@ -944,7 +944,7 @@ async def test_streaming_response_list_members(self, async_client: AsyncGradient assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list_members(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `autoscale_pool_id` but received ''"): diff --git a/tests/api_resources/gpu_droplets/test_backups.py b/tests/api_resources/gpu_droplets/test_backups.py index ecff25de..4a0d36b9 100644 --- a/tests/api_resources/gpu_droplets/test_backups.py +++ b/tests/api_resources/gpu_droplets/test_backups.py @@ -22,7 +22,7 @@ class TestBackups: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list( @@ -30,7 +30,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(BackupListResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list( @@ -40,7 +40,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(BackupListResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.backups.with_raw_response.list( @@ -52,7 +52,7 @@ def test_raw_response_list(self, client: Gradient) -> None: backup = response.parse() assert_matches_type(BackupListResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.backups.with_streaming_response.list( @@ -66,13 +66,13 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_policies(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list_policies() assert_matches_type(BackupListPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_policies_with_all_params(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list_policies( @@ -81,7 +81,7 @@ def test_method_list_policies_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(BackupListPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_policies(self, client: Gradient) -> None: response = client.gpu_droplets.backups.with_raw_response.list_policies() @@ -91,7 +91,7 @@ def test_raw_response_list_policies(self, client: Gradient) -> None: backup = response.parse() assert_matches_type(BackupListPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_policies(self, client: Gradient) -> None: with client.gpu_droplets.backups.with_streaming_response.list_policies() as response: @@ -103,13 +103,13 @@ def test_streaming_response_list_policies(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_supported_policies(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.list_supported_policies() assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_supported_policies(self, client: Gradient) -> None: response = client.gpu_droplets.backups.with_raw_response.list_supported_policies() @@ -119,7 +119,7 @@ def test_raw_response_list_supported_policies(self, client: Gradient) -> None: backup = response.parse() assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_supported_policies(self, client: Gradient) -> None: with client.gpu_droplets.backups.with_streaming_response.list_supported_policies() as response: @@ -131,7 +131,7 @@ def test_streaming_response_list_supported_policies(self, client: Gradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve_policy(self, client: Gradient) -> None: backup = client.gpu_droplets.backups.retrieve_policy( @@ -139,7 +139,7 @@ def test_method_retrieve_policy(self, client: Gradient) -> None: ) assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve_policy(self, client: Gradient) -> None: response = client.gpu_droplets.backups.with_raw_response.retrieve_policy( @@ -151,7 +151,7 @@ def test_raw_response_retrieve_policy(self, client: Gradient) -> None: backup = response.parse() assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve_policy(self, client: Gradient) -> None: with client.gpu_droplets.backups.with_streaming_response.retrieve_policy( @@ -171,7 +171,7 @@ class TestAsyncBackups: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list( @@ -179,7 +179,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(BackupListResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list( @@ -189,7 +189,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(BackupListResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.backups.with_raw_response.list( @@ -201,7 +201,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: backup = await response.parse() assert_matches_type(BackupListResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.backups.with_streaming_response.list( @@ -215,13 +215,13 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_policies(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list_policies() assert_matches_type(BackupListPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_policies_with_all_params(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list_policies( @@ -230,7 +230,7 @@ async def test_method_list_policies_with_all_params(self, async_client: AsyncGra ) assert_matches_type(BackupListPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_policies(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.backups.with_raw_response.list_policies() @@ -240,7 +240,7 @@ async def test_raw_response_list_policies(self, async_client: AsyncGradient) -> backup = await response.parse() assert_matches_type(BackupListPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_policies(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.backups.with_streaming_response.list_policies() as response: @@ -252,13 +252,13 @@ async def test_streaming_response_list_policies(self, async_client: AsyncGradien assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_supported_policies(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.list_supported_policies() assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_supported_policies(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.backups.with_raw_response.list_supported_policies() @@ -268,7 +268,7 @@ async def test_raw_response_list_supported_policies(self, async_client: AsyncGra backup = await response.parse() assert_matches_type(BackupListSupportedPoliciesResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_supported_policies(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.backups.with_streaming_response.list_supported_policies() as response: @@ -280,7 +280,7 @@ async def test_streaming_response_list_supported_policies(self, async_client: As assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve_policy(self, async_client: AsyncGradient) -> None: backup = await async_client.gpu_droplets.backups.retrieve_policy( @@ -288,7 +288,7 @@ async def test_method_retrieve_policy(self, async_client: AsyncGradient) -> None ) assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve_policy(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.backups.with_raw_response.retrieve_policy( @@ -300,7 +300,7 @@ async def test_raw_response_retrieve_policy(self, async_client: AsyncGradient) - backup = await response.parse() assert_matches_type(BackupRetrievePolicyResponse, backup, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve_policy(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.backups.with_streaming_response.retrieve_policy( diff --git a/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py b/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py index 3715ced7..166206d2 100644 --- a/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py +++ b/tests/api_resources/gpu_droplets/test_destroy_with_associated_resources.py @@ -20,7 +20,7 @@ class TestDestroyWithAssociatedResources: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.list( @@ -30,7 +30,7 @@ def test_method_list(self, client: Gradient) -> None: DestroyWithAssociatedResourceListResponse, destroy_with_associated_resource, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.list( @@ -44,7 +44,7 @@ def test_raw_response_list(self, client: Gradient) -> None: DestroyWithAssociatedResourceListResponse, destroy_with_associated_resource, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.list( @@ -60,7 +60,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_check_status(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.check_status( @@ -70,7 +70,7 @@ def test_method_check_status(self, client: Gradient) -> None: DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_check_status(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.check_status( @@ -84,7 +84,7 @@ def test_raw_response_check_status(self, client: Gradient) -> None: DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_check_status(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.check_status( @@ -100,7 +100,7 @@ def test_streaming_response_check_status(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete_dangerous(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_dangerous( @@ -109,7 +109,7 @@ def test_method_delete_dangerous(self, client: Gradient) -> None: ) assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete_dangerous(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous( @@ -122,7 +122,7 @@ def test_raw_response_delete_dangerous(self, client: Gradient) -> None: destroy_with_associated_resource = response.parse() assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete_dangerous(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous( @@ -137,7 +137,7 @@ def test_streaming_response_delete_dangerous(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete_selective(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_selective( @@ -145,7 +145,7 @@ def test_method_delete_selective(self, client: Gradient) -> None: ) assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete_selective_with_all_params(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.delete_selective( @@ -158,7 +158,7 @@ def test_method_delete_selective_with_all_params(self, client: Gradient) -> None ) assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete_selective(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_selective( @@ -170,7 +170,7 @@ def test_raw_response_delete_selective(self, client: Gradient) -> None: destroy_with_associated_resource = response.parse() assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete_selective(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_selective( @@ -184,7 +184,7 @@ def test_streaming_response_delete_selective(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retry(self, client: Gradient) -> None: destroy_with_associated_resource = client.gpu_droplets.destroy_with_associated_resources.retry( @@ -192,7 +192,7 @@ def test_method_retry(self, client: Gradient) -> None: ) assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retry(self, client: Gradient) -> None: response = client.gpu_droplets.destroy_with_associated_resources.with_raw_response.retry( @@ -204,7 +204,7 @@ def test_raw_response_retry(self, client: Gradient) -> None: destroy_with_associated_resource = response.parse() assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retry(self, client: Gradient) -> None: with client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.retry( @@ -224,7 +224,7 @@ class TestAsyncDestroyWithAssociatedResources: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = await async_client.gpu_droplets.destroy_with_associated_resources.list( @@ -234,7 +234,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: DestroyWithAssociatedResourceListResponse, destroy_with_associated_resource, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.list( @@ -248,7 +248,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: DestroyWithAssociatedResourceListResponse, destroy_with_associated_resource, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.list( @@ -264,7 +264,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_check_status(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = ( @@ -276,7 +276,7 @@ async def test_method_check_status(self, async_client: AsyncGradient) -> None: DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_check_status(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.check_status( @@ -290,7 +290,7 @@ async def test_raw_response_check_status(self, async_client: AsyncGradient) -> N DestroyWithAssociatedResourceCheckStatusResponse, destroy_with_associated_resource, path=["response"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_check_status(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.check_status( @@ -306,7 +306,7 @@ async def test_streaming_response_check_status(self, async_client: AsyncGradient assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = ( @@ -317,7 +317,7 @@ async def test_method_delete_dangerous(self, async_client: AsyncGradient) -> Non ) assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_dangerous( @@ -330,7 +330,7 @@ async def test_raw_response_delete_dangerous(self, async_client: AsyncGradient) destroy_with_associated_resource = await response.parse() assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete_dangerous(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_dangerous( @@ -345,7 +345,7 @@ async def test_streaming_response_delete_dangerous(self, async_client: AsyncGrad assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete_selective(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = ( @@ -355,7 +355,7 @@ async def test_method_delete_selective(self, async_client: AsyncGradient) -> Non ) assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete_selective_with_all_params(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = ( @@ -370,7 +370,7 @@ async def test_method_delete_selective_with_all_params(self, async_client: Async ) assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete_selective(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.delete_selective( @@ -382,7 +382,7 @@ async def test_raw_response_delete_selective(self, async_client: AsyncGradient) destroy_with_associated_resource = await response.parse() assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete_selective(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.delete_selective( @@ -396,7 +396,7 @@ async def test_streaming_response_delete_selective(self, async_client: AsyncGrad assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retry(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = await async_client.gpu_droplets.destroy_with_associated_resources.retry( @@ -404,7 +404,7 @@ async def test_method_retry(self, async_client: AsyncGradient) -> None: ) assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retry(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.destroy_with_associated_resources.with_raw_response.retry( @@ -416,7 +416,7 @@ async def test_raw_response_retry(self, async_client: AsyncGradient) -> None: destroy_with_associated_resource = await response.parse() assert destroy_with_associated_resource is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retry(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.destroy_with_associated_resources.with_streaming_response.retry( diff --git a/tests/api_resources/gpu_droplets/test_firewalls.py b/tests/api_resources/gpu_droplets/test_firewalls.py index 8585a114..83142b93 100644 --- a/tests/api_resources/gpu_droplets/test_firewalls.py +++ b/tests/api_resources/gpu_droplets/test_firewalls.py @@ -22,13 +22,13 @@ class TestFirewalls: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.create() assert_matches_type(FirewallCreateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.create( @@ -77,7 +77,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(FirewallCreateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.create() @@ -87,7 +87,7 @@ def test_raw_response_create(self, client: Gradient) -> None: firewall = response.parse() assert_matches_type(FirewallCreateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.create() as response: @@ -99,7 +99,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.retrieve( @@ -107,7 +107,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.retrieve( @@ -119,7 +119,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: firewall = response.parse() assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.retrieve( @@ -133,7 +133,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -141,7 +141,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.update( @@ -150,7 +150,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(FirewallUpdateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.update( @@ -200,7 +200,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(FirewallUpdateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.update( @@ -213,7 +213,7 @@ def test_raw_response_update(self, client: Gradient) -> None: firewall = response.parse() assert_matches_type(FirewallUpdateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.update( @@ -228,7 +228,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -237,13 +237,13 @@ def test_path_params_update(self, client: Gradient) -> None: firewall={"name": "frontend-firewall"}, ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.list() assert_matches_type(FirewallListResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.list( @@ -252,7 +252,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(FirewallListResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.list() @@ -262,7 +262,7 @@ def test_raw_response_list(self, client: Gradient) -> None: firewall = response.parse() assert_matches_type(FirewallListResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.list() as response: @@ -274,7 +274,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: firewall = client.gpu_droplets.firewalls.delete( @@ -282,7 +282,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert firewall is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.firewalls.with_raw_response.delete( @@ -294,7 +294,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: firewall = response.parse() assert firewall is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.firewalls.with_streaming_response.delete( @@ -308,7 +308,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -322,13 +322,13 @@ class TestAsyncFirewalls: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.create() assert_matches_type(FirewallCreateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.create( @@ -377,7 +377,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(FirewallCreateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.create() @@ -387,7 +387,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: firewall = await response.parse() assert_matches_type(FirewallCreateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.create() as response: @@ -399,7 +399,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.retrieve( @@ -407,7 +407,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.retrieve( @@ -419,7 +419,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: firewall = await response.parse() assert_matches_type(FirewallRetrieveResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.retrieve( @@ -433,7 +433,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -441,7 +441,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.update( @@ -450,7 +450,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(FirewallUpdateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.update( @@ -500,7 +500,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(FirewallUpdateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.update( @@ -513,7 +513,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: firewall = await response.parse() assert_matches_type(FirewallUpdateResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.update( @@ -528,7 +528,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): @@ -537,13 +537,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: firewall={"name": "frontend-firewall"}, ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.list() assert_matches_type(FirewallListResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.list( @@ -552,7 +552,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(FirewallListResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.list() @@ -562,7 +562,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: firewall = await response.parse() assert_matches_type(FirewallListResponse, firewall, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.list() as response: @@ -574,7 +574,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: firewall = await async_client.gpu_droplets.firewalls.delete( @@ -582,7 +582,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert firewall is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.firewalls.with_raw_response.delete( @@ -594,7 +594,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: firewall = await response.parse() assert firewall is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.firewalls.with_streaming_response.delete( @@ -608,7 +608,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `firewall_id` but received ''"): diff --git a/tests/api_resources/gpu_droplets/test_floating_ips.py b/tests/api_resources/gpu_droplets/test_floating_ips.py index 9ac488d6..c252a24a 100644 --- a/tests/api_resources/gpu_droplets/test_floating_ips.py +++ b/tests/api_resources/gpu_droplets/test_floating_ips.py @@ -21,7 +21,7 @@ class TestFloatingIPs: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_1(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.create( @@ -29,7 +29,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None: ) assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.create( @@ -41,7 +41,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None: floating_ip = response.parse() assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.create( @@ -55,7 +55,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_2(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.create( @@ -63,7 +63,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None: ) assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.create( @@ -72,7 +72,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non ) assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.create( @@ -84,7 +84,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None: floating_ip = response.parse() assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.create( @@ -98,7 +98,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.retrieve( @@ -106,7 +106,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.retrieve( @@ -118,7 +118,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: floating_ip = response.parse() assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.retrieve( @@ -132,7 +132,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -140,13 +140,13 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.list() assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.list( @@ -155,7 +155,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.list() @@ -165,7 +165,7 @@ def test_raw_response_list(self, client: Gradient) -> None: floating_ip = response.parse() assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.list() as response: @@ -177,7 +177,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: floating_ip = client.gpu_droplets.floating_ips.delete( @@ -185,7 +185,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert floating_ip is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.floating_ips.with_raw_response.delete( @@ -197,7 +197,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: floating_ip = response.parse() assert floating_ip is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.floating_ips.with_streaming_response.delete( @@ -211,7 +211,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -225,7 +225,7 @@ class TestAsyncFloatingIPs: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.create( @@ -233,7 +233,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.create( @@ -245,7 +245,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) floating_ip = await response.parse() assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.create( @@ -259,7 +259,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.create( @@ -267,7 +267,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No ) assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.create( @@ -276,7 +276,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn ) assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.create( @@ -288,7 +288,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) floating_ip = await response.parse() assert_matches_type(FloatingIPCreateResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.create( @@ -302,7 +302,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.retrieve( @@ -310,7 +310,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.retrieve( @@ -322,7 +322,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: floating_ip = await response.parse() assert_matches_type(FloatingIPRetrieveResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.retrieve( @@ -336,7 +336,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): @@ -344,13 +344,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.list() assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.list( @@ -359,7 +359,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.list() @@ -369,7 +369,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: floating_ip = await response.parse() assert_matches_type(FloatingIPListResponse, floating_ip, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.list() as response: @@ -381,7 +381,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: floating_ip = await async_client.gpu_droplets.floating_ips.delete( @@ -389,7 +389,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert floating_ip is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.floating_ips.with_raw_response.delete( @@ -401,7 +401,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: floating_ip = await response.parse() assert floating_ip is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.floating_ips.with_streaming_response.delete( @@ -415,7 +415,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `floating_ip` but received ''"): diff --git a/tests/api_resources/gpu_droplets/test_images.py b/tests/api_resources/gpu_droplets/test_images.py index bf6bfa4f..8f81912d 100644 --- a/tests/api_resources/gpu_droplets/test_images.py +++ b/tests/api_resources/gpu_droplets/test_images.py @@ -22,13 +22,13 @@ class TestImages: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: image = client.gpu_droplets.images.create() assert_matches_type(ImageCreateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: image = client.gpu_droplets.images.create( @@ -41,7 +41,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(ImageCreateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.create() @@ -51,7 +51,7 @@ def test_raw_response_create(self, client: Gradient) -> None: image = response.parse() assert_matches_type(ImageCreateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.create() as response: @@ -63,7 +63,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: image = client.gpu_droplets.images.retrieve( @@ -71,7 +71,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(ImageRetrieveResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.retrieve( @@ -83,7 +83,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: image = response.parse() assert_matches_type(ImageRetrieveResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.retrieve( @@ -97,7 +97,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: image = client.gpu_droplets.images.update( @@ -105,7 +105,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(ImageUpdateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: image = client.gpu_droplets.images.update( @@ -116,7 +116,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(ImageUpdateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.update( @@ -128,7 +128,7 @@ def test_raw_response_update(self, client: Gradient) -> None: image = response.parse() assert_matches_type(ImageUpdateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.update( @@ -142,13 +142,13 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: image = client.gpu_droplets.images.list() assert_matches_type(ImageListResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: image = client.gpu_droplets.images.list( @@ -160,7 +160,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(ImageListResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.list() @@ -170,7 +170,7 @@ def test_raw_response_list(self, client: Gradient) -> None: image = response.parse() assert_matches_type(ImageListResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.list() as response: @@ -182,7 +182,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: image = client.gpu_droplets.images.delete( @@ -190,7 +190,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert image is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.images.with_raw_response.delete( @@ -202,7 +202,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: image = response.parse() assert image is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.images.with_streaming_response.delete( @@ -222,13 +222,13 @@ class TestAsyncImages: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.create() assert_matches_type(ImageCreateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.create( @@ -241,7 +241,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(ImageCreateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.create() @@ -251,7 +251,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: image = await response.parse() assert_matches_type(ImageCreateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.create() as response: @@ -263,7 +263,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.retrieve( @@ -271,7 +271,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ImageRetrieveResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.retrieve( @@ -283,7 +283,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: image = await response.parse() assert_matches_type(ImageRetrieveResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.retrieve( @@ -297,7 +297,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.update( @@ -305,7 +305,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ImageUpdateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.update( @@ -316,7 +316,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(ImageUpdateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.update( @@ -328,7 +328,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: image = await response.parse() assert_matches_type(ImageUpdateResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.update( @@ -342,13 +342,13 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.list() assert_matches_type(ImageListResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.list( @@ -360,7 +360,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(ImageListResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.list() @@ -370,7 +370,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: image = await response.parse() assert_matches_type(ImageListResponse, image, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.list() as response: @@ -382,7 +382,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: image = await async_client.gpu_droplets.images.delete( @@ -390,7 +390,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert image is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.images.with_raw_response.delete( @@ -402,7 +402,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: image = await response.parse() assert image is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.images.with_streaming_response.delete( diff --git a/tests/api_resources/gpu_droplets/test_load_balancers.py b/tests/api_resources/gpu_droplets/test_load_balancers.py index f660f8f3..5db3c20b 100644 --- a/tests/api_resources/gpu_droplets/test_load_balancers.py +++ b/tests/api_resources/gpu_droplets/test_load_balancers.py @@ -22,7 +22,7 @@ class TestLoadBalancers: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_1(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.create( @@ -37,7 +37,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None: ) assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.create( @@ -108,7 +108,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non ) assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.create( @@ -127,7 +127,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None: load_balancer = response.parse() assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.create( @@ -148,7 +148,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_2(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.create( @@ -163,7 +163,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None: ) assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.create( @@ -234,7 +234,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non ) assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.create( @@ -253,7 +253,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None: load_balancer = response.parse() assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.create( @@ -274,7 +274,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.retrieve( @@ -282,7 +282,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.retrieve( @@ -294,7 +294,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: load_balancer = response.parse() assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.retrieve( @@ -308,7 +308,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -316,7 +316,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_overload_1(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.update( @@ -332,7 +332,7 @@ def test_method_update_overload_1(self, client: Gradient) -> None: ) assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params_overload_1(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.update( @@ -404,7 +404,7 @@ def test_method_update_with_all_params_overload_1(self, client: Gradient) -> Non ) assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.update( @@ -424,7 +424,7 @@ def test_raw_response_update_overload_1(self, client: Gradient) -> None: load_balancer = response.parse() assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.update( @@ -446,7 +446,7 @@ def test_streaming_response_update_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update_overload_1(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -462,7 +462,7 @@ def test_path_params_update_overload_1(self, client: Gradient) -> None: ], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_overload_2(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.update( @@ -478,7 +478,7 @@ def test_method_update_overload_2(self, client: Gradient) -> None: ) assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params_overload_2(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.update( @@ -550,7 +550,7 @@ def test_method_update_with_all_params_overload_2(self, client: Gradient) -> Non ) assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.update( @@ -570,7 +570,7 @@ def test_raw_response_update_overload_2(self, client: Gradient) -> None: load_balancer = response.parse() assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.update( @@ -592,7 +592,7 @@ def test_streaming_response_update_overload_2(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update_overload_2(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -608,13 +608,13 @@ def test_path_params_update_overload_2(self, client: Gradient) -> None: ], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.list() assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.list( @@ -623,7 +623,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.list() @@ -633,7 +633,7 @@ def test_raw_response_list(self, client: Gradient) -> None: load_balancer = response.parse() assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.list() as response: @@ -645,7 +645,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.delete( @@ -653,7 +653,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert load_balancer is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.delete( @@ -665,7 +665,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: load_balancer = response.parse() assert load_balancer is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.delete( @@ -679,7 +679,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -687,7 +687,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete_cache(self, client: Gradient) -> None: load_balancer = client.gpu_droplets.load_balancers.delete_cache( @@ -695,7 +695,7 @@ def test_method_delete_cache(self, client: Gradient) -> None: ) assert load_balancer is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete_cache(self, client: Gradient) -> None: response = client.gpu_droplets.load_balancers.with_raw_response.delete_cache( @@ -707,7 +707,7 @@ def test_raw_response_delete_cache(self, client: Gradient) -> None: load_balancer = response.parse() assert load_balancer is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete_cache(self, client: Gradient) -> None: with client.gpu_droplets.load_balancers.with_streaming_response.delete_cache( @@ -721,7 +721,7 @@ def test_streaming_response_delete_cache(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete_cache(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -735,7 +735,7 @@ class TestAsyncLoadBalancers: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.create( @@ -750,7 +750,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.create( @@ -821,7 +821,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn ) assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.create( @@ -840,7 +840,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) load_balancer = await response.parse() assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.create( @@ -861,7 +861,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.create( @@ -876,7 +876,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No ) assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.create( @@ -947,7 +947,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn ) assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.create( @@ -966,7 +966,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) load_balancer = await response.parse() assert_matches_type(LoadBalancerCreateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.create( @@ -987,7 +987,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.retrieve( @@ -995,7 +995,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.retrieve( @@ -1007,7 +1007,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: load_balancer = await response.parse() assert_matches_type(LoadBalancerRetrieveResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.retrieve( @@ -1021,7 +1021,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -1029,7 +1029,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_overload_1(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.update( @@ -1045,7 +1045,7 @@ async def test_method_update_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.update( @@ -1117,7 +1117,7 @@ async def test_method_update_with_all_params_overload_1(self, async_client: Asyn ) assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.update( @@ -1137,7 +1137,7 @@ async def test_raw_response_update_overload_1(self, async_client: AsyncGradient) load_balancer = await response.parse() assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.update( @@ -1159,7 +1159,7 @@ async def test_streaming_response_update_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update_overload_1(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -1175,7 +1175,7 @@ async def test_path_params_update_overload_1(self, async_client: AsyncGradient) ], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_overload_2(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.update( @@ -1191,7 +1191,7 @@ async def test_method_update_overload_2(self, async_client: AsyncGradient) -> No ) assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.update( @@ -1263,7 +1263,7 @@ async def test_method_update_with_all_params_overload_2(self, async_client: Asyn ) assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.update( @@ -1283,7 +1283,7 @@ async def test_raw_response_update_overload_2(self, async_client: AsyncGradient) load_balancer = await response.parse() assert_matches_type(LoadBalancerUpdateResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.update( @@ -1305,7 +1305,7 @@ async def test_streaming_response_update_overload_2(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update_overload_2(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -1321,13 +1321,13 @@ async def test_path_params_update_overload_2(self, async_client: AsyncGradient) ], ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.list() assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.list( @@ -1336,7 +1336,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.list() @@ -1346,7 +1346,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: load_balancer = await response.parse() assert_matches_type(LoadBalancerListResponse, load_balancer, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.list() as response: @@ -1358,7 +1358,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.delete( @@ -1366,7 +1366,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert load_balancer is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.delete( @@ -1378,7 +1378,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: load_balancer = await response.parse() assert load_balancer is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.delete( @@ -1392,7 +1392,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): @@ -1400,7 +1400,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete_cache(self, async_client: AsyncGradient) -> None: load_balancer = await async_client.gpu_droplets.load_balancers.delete_cache( @@ -1408,7 +1408,7 @@ async def test_method_delete_cache(self, async_client: AsyncGradient) -> None: ) assert load_balancer is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete_cache(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.load_balancers.with_raw_response.delete_cache( @@ -1420,7 +1420,7 @@ async def test_raw_response_delete_cache(self, async_client: AsyncGradient) -> N load_balancer = await response.parse() assert load_balancer is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete_cache(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.load_balancers.with_streaming_response.delete_cache( @@ -1434,7 +1434,7 @@ async def test_streaming_response_delete_cache(self, async_client: AsyncGradient assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete_cache(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `lb_id` but received ''"): diff --git a/tests/api_resources/gpu_droplets/test_sizes.py b/tests/api_resources/gpu_droplets/test_sizes.py index ec934e9f..7fc4fe80 100644 --- a/tests/api_resources/gpu_droplets/test_sizes.py +++ b/tests/api_resources/gpu_droplets/test_sizes.py @@ -17,13 +17,13 @@ class TestSizes: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: size = client.gpu_droplets.sizes.list() assert_matches_type(SizeListResponse, size, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: size = client.gpu_droplets.sizes.list( @@ -32,7 +32,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(SizeListResponse, size, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.sizes.with_raw_response.list() @@ -42,7 +42,7 @@ def test_raw_response_list(self, client: Gradient) -> None: size = response.parse() assert_matches_type(SizeListResponse, size, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.sizes.with_streaming_response.list() as response: @@ -60,13 +60,13 @@ class TestAsyncSizes: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: size = await async_client.gpu_droplets.sizes.list() assert_matches_type(SizeListResponse, size, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: size = await async_client.gpu_droplets.sizes.list( @@ -75,7 +75,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(SizeListResponse, size, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.sizes.with_raw_response.list() @@ -85,7 +85,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: size = await response.parse() assert_matches_type(SizeListResponse, size, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.sizes.with_streaming_response.list() as response: diff --git a/tests/api_resources/gpu_droplets/test_snapshots.py b/tests/api_resources/gpu_droplets/test_snapshots.py index d4574ece..5f8da45a 100644 --- a/tests/api_resources/gpu_droplets/test_snapshots.py +++ b/tests/api_resources/gpu_droplets/test_snapshots.py @@ -17,7 +17,7 @@ class TestSnapshots: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: snapshot = client.gpu_droplets.snapshots.retrieve( @@ -25,7 +25,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.snapshots.with_raw_response.retrieve( @@ -37,7 +37,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: snapshot = response.parse() assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.snapshots.with_streaming_response.retrieve( @@ -51,13 +51,13 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: snapshot = client.gpu_droplets.snapshots.list() assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: snapshot = client.gpu_droplets.snapshots.list( @@ -67,7 +67,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.snapshots.with_raw_response.list() @@ -77,7 +77,7 @@ def test_raw_response_list(self, client: Gradient) -> None: snapshot = response.parse() assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.snapshots.with_streaming_response.list() as response: @@ -89,7 +89,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: snapshot = client.gpu_droplets.snapshots.delete( @@ -97,7 +97,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert snapshot is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.snapshots.with_raw_response.delete( @@ -109,7 +109,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: snapshot = response.parse() assert snapshot is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.snapshots.with_streaming_response.delete( @@ -129,7 +129,7 @@ class TestAsyncSnapshots: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.snapshots.retrieve( @@ -137,7 +137,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.snapshots.with_raw_response.retrieve( @@ -149,7 +149,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: snapshot = await response.parse() assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.snapshots.with_streaming_response.retrieve( @@ -163,13 +163,13 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.snapshots.list() assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.snapshots.list( @@ -179,7 +179,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.snapshots.with_raw_response.list() @@ -189,7 +189,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: snapshot = await response.parse() assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.snapshots.with_streaming_response.list() as response: @@ -201,7 +201,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.snapshots.delete( @@ -209,7 +209,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert snapshot is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.snapshots.with_raw_response.delete( @@ -221,7 +221,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: snapshot = await response.parse() assert snapshot is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.snapshots.with_streaming_response.delete( diff --git a/tests/api_resources/gpu_droplets/test_volumes.py b/tests/api_resources/gpu_droplets/test_volumes.py index 49436220..f9b3778c 100644 --- a/tests/api_resources/gpu_droplets/test_volumes.py +++ b/tests/api_resources/gpu_droplets/test_volumes.py @@ -21,7 +21,7 @@ class TestVolumes: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_1(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.create( @@ -31,7 +31,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None: ) assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.create( @@ -46,7 +46,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non ) assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.create( @@ -60,7 +60,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None: volume = response.parse() assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.create( @@ -76,7 +76,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_2(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.create( @@ -86,7 +86,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None: ) assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.create( @@ -101,7 +101,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non ) assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.create( @@ -115,7 +115,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None: volume = response.parse() assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.create( @@ -131,7 +131,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.retrieve( @@ -139,7 +139,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(VolumeRetrieveResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.retrieve( @@ -151,7 +151,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: volume = response.parse() assert_matches_type(VolumeRetrieveResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.retrieve( @@ -165,7 +165,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -173,13 +173,13 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.list() assert_matches_type(VolumeListResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.list( @@ -190,7 +190,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(VolumeListResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.list() @@ -200,7 +200,7 @@ def test_raw_response_list(self, client: Gradient) -> None: volume = response.parse() assert_matches_type(VolumeListResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.list() as response: @@ -212,7 +212,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.delete( @@ -220,7 +220,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.delete( @@ -232,7 +232,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: volume = response.parse() assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.delete( @@ -246,7 +246,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -254,13 +254,13 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete_by_name(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.delete_by_name() assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete_by_name_with_all_params(self, client: Gradient) -> None: volume = client.gpu_droplets.volumes.delete_by_name( @@ -269,7 +269,7 @@ def test_method_delete_by_name_with_all_params(self, client: Gradient) -> None: ) assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete_by_name(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.with_raw_response.delete_by_name() @@ -279,7 +279,7 @@ def test_raw_response_delete_by_name(self, client: Gradient) -> None: volume = response.parse() assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete_by_name(self, client: Gradient) -> None: with client.gpu_droplets.volumes.with_streaming_response.delete_by_name() as response: @@ -297,7 +297,7 @@ class TestAsyncVolumes: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.create( @@ -307,7 +307,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.create( @@ -322,7 +322,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn ) assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.create( @@ -336,7 +336,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) volume = await response.parse() assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.create( @@ -352,7 +352,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.create( @@ -362,7 +362,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No ) assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.create( @@ -377,7 +377,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn ) assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.create( @@ -391,7 +391,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) volume = await response.parse() assert_matches_type(VolumeCreateResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.create( @@ -407,7 +407,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.retrieve( @@ -415,7 +415,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(VolumeRetrieveResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.retrieve( @@ -427,7 +427,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: volume = await response.parse() assert_matches_type(VolumeRetrieveResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.retrieve( @@ -441,7 +441,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -449,13 +449,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.list() assert_matches_type(VolumeListResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.list( @@ -466,7 +466,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(VolumeListResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.list() @@ -476,7 +476,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: volume = await response.parse() assert_matches_type(VolumeListResponse, volume, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.list() as response: @@ -488,7 +488,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.delete( @@ -496,7 +496,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.delete( @@ -508,7 +508,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: volume = await response.parse() assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.delete( @@ -522,7 +522,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -530,13 +530,13 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete_by_name(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.delete_by_name() assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGradient) -> None: volume = await async_client.gpu_droplets.volumes.delete_by_name( @@ -545,7 +545,7 @@ async def test_method_delete_by_name_with_all_params(self, async_client: AsyncGr ) assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete_by_name(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.with_raw_response.delete_by_name() @@ -555,7 +555,7 @@ async def test_raw_response_delete_by_name(self, async_client: AsyncGradient) -> volume = await response.parse() assert volume is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete_by_name(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.with_streaming_response.delete_by_name() as response: diff --git a/tests/api_resources/gpu_droplets/volumes/test_actions.py b/tests/api_resources/gpu_droplets/volumes/test_actions.py index 19088e9e..7159db48 100644 --- a/tests/api_resources/gpu_droplets/volumes/test_actions.py +++ b/tests/api_resources/gpu_droplets/volumes/test_actions.py @@ -22,7 +22,7 @@ class TestActions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.retrieve( @@ -31,7 +31,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve_with_all_params(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.retrieve( @@ -42,7 +42,7 @@ def test_method_retrieve_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.retrieve( @@ -55,7 +55,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.retrieve( @@ -70,7 +70,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -79,7 +79,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: volume_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.list( @@ -87,7 +87,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.list( @@ -97,7 +97,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.list( @@ -109,7 +109,7 @@ def test_raw_response_list(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.list( @@ -123,7 +123,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -131,7 +131,7 @@ def test_path_params_list(self, client: Gradient) -> None: volume_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_id_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( @@ -141,7 +141,7 @@ def test_method_initiate_by_id_overload_1(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_id_with_all_params_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( @@ -155,7 +155,7 @@ def test_method_initiate_by_id_with_all_params_overload_1(self, client: Gradient ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_by_id_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( @@ -169,7 +169,7 @@ def test_raw_response_initiate_by_id_overload_1(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_by_id_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( @@ -185,7 +185,7 @@ def test_streaming_response_initiate_by_id_overload_1(self, client: Gradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_initiate_by_id_overload_1(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -195,7 +195,7 @@ def test_path_params_initiate_by_id_overload_1(self, client: Gradient) -> None: type="attach", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_id_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( @@ -205,7 +205,7 @@ def test_method_initiate_by_id_overload_2(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_id_with_all_params_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( @@ -218,7 +218,7 @@ def test_method_initiate_by_id_with_all_params_overload_2(self, client: Gradient ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_by_id_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( @@ -232,7 +232,7 @@ def test_raw_response_initiate_by_id_overload_2(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_by_id_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( @@ -248,7 +248,7 @@ def test_streaming_response_initiate_by_id_overload_2(self, client: Gradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_initiate_by_id_overload_2(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -258,7 +258,7 @@ def test_path_params_initiate_by_id_overload_2(self, client: Gradient) -> None: type="attach", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_id_overload_3(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( @@ -268,7 +268,7 @@ def test_method_initiate_by_id_overload_3(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_id_with_all_params_overload_3(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_id( @@ -281,7 +281,7 @@ def test_method_initiate_by_id_with_all_params_overload_3(self, client: Gradient ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_by_id_overload_3(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( @@ -295,7 +295,7 @@ def test_raw_response_initiate_by_id_overload_3(self, client: Gradient) -> None: action = response.parse() assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_by_id_overload_3(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( @@ -311,7 +311,7 @@ def test_streaming_response_initiate_by_id_overload_3(self, client: Gradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_initiate_by_id_overload_3(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -321,7 +321,7 @@ def test_path_params_initiate_by_id_overload_3(self, client: Gradient) -> None: type="attach", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_name_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_name( @@ -330,7 +330,7 @@ def test_method_initiate_by_name_overload_1(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_name_with_all_params_overload_1(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_name( @@ -343,7 +343,7 @@ def test_method_initiate_by_name_with_all_params_overload_1(self, client: Gradie ) assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_by_name_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name( @@ -356,7 +356,7 @@ def test_raw_response_initiate_by_name_overload_1(self, client: Gradient) -> Non action = response.parse() assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_by_name_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name( @@ -371,7 +371,7 @@ def test_streaming_response_initiate_by_name_overload_1(self, client: Gradient) assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_name_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_name( @@ -380,7 +380,7 @@ def test_method_initiate_by_name_overload_2(self, client: Gradient) -> None: ) assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_initiate_by_name_with_all_params_overload_2(self, client: Gradient) -> None: action = client.gpu_droplets.volumes.actions.initiate_by_name( @@ -392,7 +392,7 @@ def test_method_initiate_by_name_with_all_params_overload_2(self, client: Gradie ) assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_initiate_by_name_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name( @@ -405,7 +405,7 @@ def test_raw_response_initiate_by_name_overload_2(self, client: Gradient) -> Non action = response.parse() assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_initiate_by_name_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name( @@ -426,7 +426,7 @@ class TestAsyncActions: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.retrieve( @@ -435,7 +435,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.retrieve( @@ -446,7 +446,7 @@ async def test_method_retrieve_with_all_params(self, async_client: AsyncGradient ) assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.retrieve( @@ -459,7 +459,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: action = await response.parse() assert_matches_type(ActionRetrieveResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.retrieve( @@ -474,7 +474,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -483,7 +483,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: volume_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.list( @@ -491,7 +491,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.list( @@ -501,7 +501,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.list( @@ -513,7 +513,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: action = await response.parse() assert_matches_type(ActionListResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.list( @@ -527,7 +527,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -535,7 +535,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None: volume_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( @@ -545,7 +545,7 @@ async def test_method_initiate_by_id_overload_1(self, async_client: AsyncGradien ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_id_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( @@ -559,7 +559,7 @@ async def test_method_initiate_by_id_with_all_params_overload_1(self, async_clie ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( @@ -573,7 +573,7 @@ async def test_raw_response_initiate_by_id_overload_1(self, async_client: AsyncG action = await response.parse() assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( @@ -589,7 +589,7 @@ async def test_streaming_response_initiate_by_id_overload_1(self, async_client: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -599,7 +599,7 @@ async def test_path_params_initiate_by_id_overload_1(self, async_client: AsyncGr type="attach", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( @@ -609,7 +609,7 @@ async def test_method_initiate_by_id_overload_2(self, async_client: AsyncGradien ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_id_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( @@ -622,7 +622,7 @@ async def test_method_initiate_by_id_with_all_params_overload_2(self, async_clie ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( @@ -636,7 +636,7 @@ async def test_raw_response_initiate_by_id_overload_2(self, async_client: AsyncG action = await response.parse() assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( @@ -652,7 +652,7 @@ async def test_streaming_response_initiate_by_id_overload_2(self, async_client: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -662,7 +662,7 @@ async def test_path_params_initiate_by_id_overload_2(self, async_client: AsyncGr type="attach", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( @@ -672,7 +672,7 @@ async def test_method_initiate_by_id_overload_3(self, async_client: AsyncGradien ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_id_with_all_params_overload_3(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_id( @@ -685,7 +685,7 @@ async def test_method_initiate_by_id_with_all_params_overload_3(self, async_clie ) assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_id( @@ -699,7 +699,7 @@ async def test_raw_response_initiate_by_id_overload_3(self, async_client: AsyncG action = await response.parse() assert_matches_type(ActionInitiateByIDResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_id( @@ -715,7 +715,7 @@ async def test_streaming_response_initiate_by_id_overload_3(self, async_client: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -725,7 +725,7 @@ async def test_path_params_initiate_by_id_overload_3(self, async_client: AsyncGr type="attach", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_name( @@ -734,7 +734,7 @@ async def test_method_initiate_by_name_overload_1(self, async_client: AsyncGradi ) assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_name_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_name( @@ -747,7 +747,7 @@ async def test_method_initiate_by_name_with_all_params_overload_1(self, async_cl ) assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name( @@ -760,7 +760,7 @@ async def test_raw_response_initiate_by_name_overload_1(self, async_client: Asyn action = await response.parse() assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_by_name_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name( @@ -775,7 +775,7 @@ async def test_streaming_response_initiate_by_name_overload_1(self, async_client assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_name( @@ -784,7 +784,7 @@ async def test_method_initiate_by_name_overload_2(self, async_client: AsyncGradi ) assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_initiate_by_name_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: action = await async_client.gpu_droplets.volumes.actions.initiate_by_name( @@ -796,7 +796,7 @@ async def test_method_initiate_by_name_with_all_params_overload_2(self, async_cl ) assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.actions.with_raw_response.initiate_by_name( @@ -809,7 +809,7 @@ async def test_raw_response_initiate_by_name_overload_2(self, async_client: Asyn action = await response.parse() assert_matches_type(ActionInitiateByNameResponse, action, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_initiate_by_name_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.actions.with_streaming_response.initiate_by_name( diff --git a/tests/api_resources/gpu_droplets/volumes/test_snapshots.py b/tests/api_resources/gpu_droplets/volumes/test_snapshots.py index 5037c7bb..e3450001 100644 --- a/tests/api_resources/gpu_droplets/volumes/test_snapshots.py +++ b/tests/api_resources/gpu_droplets/volumes/test_snapshots.py @@ -21,7 +21,7 @@ class TestSnapshots: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.create( @@ -30,7 +30,7 @@ def test_method_create(self, client: Gradient) -> None: ) assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.create( @@ -40,7 +40,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.snapshots.with_raw_response.create( @@ -53,7 +53,7 @@ def test_raw_response_create(self, client: Gradient) -> None: snapshot = response.parse() assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.gpu_droplets.volumes.snapshots.with_streaming_response.create( @@ -68,7 +68,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_create(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -77,7 +77,7 @@ def test_path_params_create(self, client: Gradient) -> None: name="big-data-snapshot1475261774", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.retrieve( @@ -85,7 +85,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve( @@ -97,7 +97,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: snapshot = response.parse() assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.volumes.snapshots.with_streaming_response.retrieve( @@ -111,7 +111,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"): @@ -119,7 +119,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.list( @@ -127,7 +127,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.list( @@ -137,7 +137,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.snapshots.with_raw_response.list( @@ -149,7 +149,7 @@ def test_raw_response_list(self, client: Gradient) -> None: snapshot = response.parse() assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.volumes.snapshots.with_streaming_response.list( @@ -163,7 +163,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -171,7 +171,7 @@ def test_path_params_list(self, client: Gradient) -> None: volume_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: snapshot = client.gpu_droplets.volumes.snapshots.delete( @@ -179,7 +179,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert snapshot is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.volumes.snapshots.with_raw_response.delete( @@ -191,7 +191,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: snapshot = response.parse() assert snapshot is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.volumes.snapshots.with_streaming_response.delete( @@ -205,7 +205,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"): @@ -219,7 +219,7 @@ class TestAsyncSnapshots: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.create( @@ -228,7 +228,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None: ) assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.create( @@ -238,7 +238,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.create( @@ -251,7 +251,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: snapshot = await response.parse() assert_matches_type(SnapshotCreateResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.create( @@ -266,7 +266,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_create(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -275,7 +275,7 @@ async def test_path_params_create(self, async_client: AsyncGradient) -> None: name="big-data-snapshot1475261774", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.retrieve( @@ -283,7 +283,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.retrieve( @@ -295,7 +295,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: snapshot = await response.parse() assert_matches_type(SnapshotRetrieveResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.retrieve( @@ -309,7 +309,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"): @@ -317,7 +317,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.list( @@ -325,7 +325,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.list( @@ -335,7 +335,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.list( @@ -347,7 +347,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: snapshot = await response.parse() assert_matches_type(SnapshotListResponse, snapshot, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.list( @@ -361,7 +361,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `volume_id` but received ''"): @@ -369,7 +369,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None: volume_id="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: snapshot = await async_client.gpu_droplets.volumes.snapshots.delete( @@ -377,7 +377,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert snapshot is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.volumes.snapshots.with_raw_response.delete( @@ -389,7 +389,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: snapshot = await response.parse() assert snapshot is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.volumes.snapshots.with_streaming_response.delete( @@ -403,7 +403,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `snapshot_id` but received ''"): diff --git a/tests/api_resources/inference/test_api_keys.py b/tests/api_resources/inference/test_api_keys.py index f22947ed..d9745710 100644 --- a/tests/api_resources/inference/test_api_keys.py +++ b/tests/api_resources/inference/test_api_keys.py @@ -23,13 +23,13 @@ class TestAPIKeys: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: api_key = client.inference.api_keys.create() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: api_key = client.inference.api_keys.create( @@ -37,7 +37,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.create() @@ -47,7 +47,7 @@ def test_raw_response_create(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.create() as response: @@ -59,7 +59,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: api_key = client.inference.api_keys.update( @@ -67,7 +67,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: api_key = client.inference.api_keys.update( @@ -77,7 +77,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.update( @@ -89,7 +89,7 @@ def test_raw_response_update(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.update( @@ -103,7 +103,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -111,13 +111,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: api_key = client.inference.api_keys.list() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: api_key = client.inference.api_keys.list( @@ -126,7 +126,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.list() @@ -136,7 +136,7 @@ def test_raw_response_list(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.list() as response: @@ -148,7 +148,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: api_key = client.inference.api_keys.delete( @@ -156,7 +156,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.delete( @@ -168,7 +168,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.delete( @@ -182,7 +182,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -190,7 +190,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_regenerate(self, client: Gradient) -> None: api_key = client.inference.api_keys.update_regenerate( @@ -198,7 +198,7 @@ def test_method_update_regenerate(self, client: Gradient) -> None: ) assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update_regenerate(self, client: Gradient) -> None: response = client.inference.api_keys.with_raw_response.update_regenerate( @@ -210,7 +210,7 @@ def test_raw_response_update_regenerate(self, client: Gradient) -> None: api_key = response.parse() assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update_regenerate(self, client: Gradient) -> None: with client.inference.api_keys.with_streaming_response.update_regenerate( @@ -224,7 +224,7 @@ def test_streaming_response_update_regenerate(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update_regenerate(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -238,13 +238,13 @@ class TestAsyncAPIKeys: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.create() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.create( @@ -252,7 +252,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.create() @@ -262,7 +262,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: api_key = await response.parse() assert_matches_type(APIKeyCreateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.create() as response: @@ -274,7 +274,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.update( @@ -282,7 +282,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.update( @@ -292,7 +292,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.update( @@ -304,7 +304,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: api_key = await response.parse() assert_matches_type(APIKeyUpdateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.update( @@ -318,7 +318,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -326,13 +326,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.list() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.list( @@ -341,7 +341,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.list() @@ -351,7 +351,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: api_key = await response.parse() assert_matches_type(APIKeyListResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.list() as response: @@ -363,7 +363,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.delete( @@ -371,7 +371,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.delete( @@ -383,7 +383,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: api_key = await response.parse() assert_matches_type(APIKeyDeleteResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.delete( @@ -397,7 +397,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -405,7 +405,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_regenerate(self, async_client: AsyncGradient) -> None: api_key = await async_client.inference.api_keys.update_regenerate( @@ -413,7 +413,7 @@ async def test_method_update_regenerate(self, async_client: AsyncGradient) -> No ) assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update_regenerate(self, async_client: AsyncGradient) -> None: response = await async_client.inference.api_keys.with_raw_response.update_regenerate( @@ -425,7 +425,7 @@ async def test_raw_response_update_regenerate(self, async_client: AsyncGradient) api_key = await response.parse() assert_matches_type(APIKeyUpdateRegenerateResponse, api_key, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update_regenerate(self, async_client: AsyncGradient) -> None: async with async_client.inference.api_keys.with_streaming_response.update_regenerate( @@ -439,7 +439,7 @@ async def test_streaming_response_update_regenerate(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update_regenerate(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): diff --git a/tests/api_resources/knowledge_bases/test_data_sources.py b/tests/api_resources/knowledge_bases/test_data_sources.py index 0e44b584..bd7158d2 100644 --- a/tests/api_resources/knowledge_bases/test_data_sources.py +++ b/tests/api_resources/knowledge_bases/test_data_sources.py @@ -21,7 +21,7 @@ class TestDataSources: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.create( @@ -29,7 +29,7 @@ def test_method_create(self, client: Gradient) -> None: ) assert_matches_type(DataSourceCreateResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.create( @@ -55,7 +55,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(DataSourceCreateResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.knowledge_bases.data_sources.with_raw_response.create( @@ -67,7 +67,7 @@ def test_raw_response_create(self, client: Gradient) -> None: data_source = response.parse() assert_matches_type(DataSourceCreateResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.knowledge_bases.data_sources.with_streaming_response.create( @@ -81,7 +81,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_create(self, client: Gradient) -> None: with pytest.raises( @@ -91,7 +91,7 @@ def test_path_params_create(self, client: Gradient) -> None: path_knowledge_base_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.list( @@ -99,7 +99,7 @@ def test_method_list(self, client: Gradient) -> None: ) assert_matches_type(DataSourceListResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.list( @@ -109,7 +109,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(DataSourceListResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.knowledge_bases.data_sources.with_raw_response.list( @@ -121,7 +121,7 @@ def test_raw_response_list(self, client: Gradient) -> None: data_source = response.parse() assert_matches_type(DataSourceListResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.knowledge_bases.data_sources.with_streaming_response.list( @@ -135,7 +135,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): @@ -143,7 +143,7 @@ def test_path_params_list(self, client: Gradient) -> None: knowledge_base_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: data_source = client.knowledge_bases.data_sources.delete( @@ -152,7 +152,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(DataSourceDeleteResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.knowledge_bases.data_sources.with_raw_response.delete( @@ -165,7 +165,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: data_source = response.parse() assert_matches_type(DataSourceDeleteResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.knowledge_bases.data_sources.with_streaming_response.delete( @@ -180,7 +180,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): @@ -201,7 +201,7 @@ class TestAsyncDataSources: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.create( @@ -209,7 +209,7 @@ async def test_method_create(self, async_client: AsyncGradient) -> None: ) assert_matches_type(DataSourceCreateResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.create( @@ -235,7 +235,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(DataSourceCreateResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.create( @@ -247,7 +247,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: data_source = await response.parse() assert_matches_type(DataSourceCreateResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.create( @@ -261,7 +261,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_create(self, async_client: AsyncGradient) -> None: with pytest.raises( @@ -271,7 +271,7 @@ async def test_path_params_create(self, async_client: AsyncGradient) -> None: path_knowledge_base_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.list( @@ -279,7 +279,7 @@ async def test_method_list(self, async_client: AsyncGradient) -> None: ) assert_matches_type(DataSourceListResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.list( @@ -289,7 +289,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(DataSourceListResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.list( @@ -301,7 +301,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: data_source = await response.parse() assert_matches_type(DataSourceListResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.list( @@ -315,7 +315,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): @@ -323,7 +323,7 @@ async def test_path_params_list(self, async_client: AsyncGradient) -> None: knowledge_base_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: data_source = await async_client.knowledge_bases.data_sources.delete( @@ -332,7 +332,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(DataSourceDeleteResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.data_sources.with_raw_response.delete( @@ -345,7 +345,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: data_source = await response.parse() assert_matches_type(DataSourceDeleteResponse, data_source, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.data_sources.with_streaming_response.delete( @@ -360,7 +360,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `knowledge_base_uuid` but received ''"): diff --git a/tests/api_resources/knowledge_bases/test_indexing_jobs.py b/tests/api_resources/knowledge_bases/test_indexing_jobs.py index 231b22af..8840edfe 100644 --- a/tests/api_resources/knowledge_bases/test_indexing_jobs.py +++ b/tests/api_resources/knowledge_bases/test_indexing_jobs.py @@ -23,13 +23,13 @@ class TestIndexingJobs: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.create() assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.create( @@ -38,7 +38,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.create() @@ -48,7 +48,7 @@ def test_raw_response_create(self, client: Gradient) -> None: indexing_job = response.parse() assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.create() as response: @@ -60,7 +60,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.retrieve( @@ -68,7 +68,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(IndexingJobRetrieveResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.retrieve( @@ -80,7 +80,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: indexing_job = response.parse() assert_matches_type(IndexingJobRetrieveResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve( @@ -94,7 +94,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -102,13 +102,13 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.list() assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.list( @@ -117,7 +117,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.list() @@ -127,7 +127,7 @@ def test_raw_response_list(self, client: Gradient) -> None: indexing_job = response.parse() assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.list() as response: @@ -139,7 +139,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve_data_sources(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.retrieve_data_sources( @@ -147,7 +147,7 @@ def test_method_retrieve_data_sources(self, client: Gradient) -> None: ) assert_matches_type(IndexingJobRetrieveDataSourcesResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve_data_sources(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_data_sources( @@ -159,7 +159,7 @@ def test_raw_response_retrieve_data_sources(self, client: Gradient) -> None: indexing_job = response.parse() assert_matches_type(IndexingJobRetrieveDataSourcesResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve_data_sources(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve_data_sources( @@ -173,7 +173,7 @@ def test_streaming_response_retrieve_data_sources(self, client: Gradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve_data_sources(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"): @@ -181,7 +181,7 @@ def test_path_params_retrieve_data_sources(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_cancel(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.update_cancel( @@ -189,7 +189,7 @@ def test_method_update_cancel(self, client: Gradient) -> None: ) assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_cancel_with_all_params(self, client: Gradient) -> None: indexing_job = client.knowledge_bases.indexing_jobs.update_cancel( @@ -198,7 +198,7 @@ def test_method_update_cancel_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update_cancel(self, client: Gradient) -> None: response = client.knowledge_bases.indexing_jobs.with_raw_response.update_cancel( @@ -210,7 +210,7 @@ def test_raw_response_update_cancel(self, client: Gradient) -> None: indexing_job = response.parse() assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update_cancel(self, client: Gradient) -> None: with client.knowledge_bases.indexing_jobs.with_streaming_response.update_cancel( @@ -224,7 +224,7 @@ def test_streaming_response_update_cancel(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update_cancel(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): @@ -238,13 +238,13 @@ class TestAsyncIndexingJobs: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.create() assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.create( @@ -253,7 +253,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.create() @@ -263,7 +263,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: indexing_job = await response.parse() assert_matches_type(IndexingJobCreateResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.create() as response: @@ -275,7 +275,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.retrieve( @@ -283,7 +283,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(IndexingJobRetrieveResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve( @@ -295,7 +295,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: indexing_job = await response.parse() assert_matches_type(IndexingJobRetrieveResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve( @@ -309,7 +309,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -317,13 +317,13 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.list() assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.list( @@ -332,7 +332,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.list() @@ -342,7 +342,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: indexing_job = await response.parse() assert_matches_type(IndexingJobListResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.list() as response: @@ -354,7 +354,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve_data_sources(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.retrieve_data_sources( @@ -362,7 +362,7 @@ async def test_method_retrieve_data_sources(self, async_client: AsyncGradient) - ) assert_matches_type(IndexingJobRetrieveDataSourcesResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve_data_sources(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.retrieve_data_sources( @@ -374,7 +374,7 @@ async def test_raw_response_retrieve_data_sources(self, async_client: AsyncGradi indexing_job = await response.parse() assert_matches_type(IndexingJobRetrieveDataSourcesResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve_data_sources(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.retrieve_data_sources( @@ -388,7 +388,7 @@ async def test_streaming_response_retrieve_data_sources(self, async_client: Asyn assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve_data_sources(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `indexing_job_uuid` but received ''"): @@ -396,7 +396,7 @@ async def test_path_params_retrieve_data_sources(self, async_client: AsyncGradie "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_cancel(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.update_cancel( @@ -404,7 +404,7 @@ async def test_method_update_cancel(self, async_client: AsyncGradient) -> None: ) assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_cancel_with_all_params(self, async_client: AsyncGradient) -> None: indexing_job = await async_client.knowledge_bases.indexing_jobs.update_cancel( @@ -413,7 +413,7 @@ async def test_method_update_cancel_with_all_params(self, async_client: AsyncGra ) assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update_cancel(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.indexing_jobs.with_raw_response.update_cancel( @@ -425,7 +425,7 @@ async def test_raw_response_update_cancel(self, async_client: AsyncGradient) -> indexing_job = await response.parse() assert_matches_type(IndexingJobUpdateCancelResponse, indexing_job, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update_cancel(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.indexing_jobs.with_streaming_response.update_cancel( @@ -439,7 +439,7 @@ async def test_streaming_response_update_cancel(self, async_client: AsyncGradien assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update_cancel(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): diff --git a/tests/api_resources/models/providers/test_anthropic.py b/tests/api_resources/models/providers/test_anthropic.py index 5bb7a1e9..60cb0c16 100644 --- a/tests/api_resources/models/providers/test_anthropic.py +++ b/tests/api_resources/models/providers/test_anthropic.py @@ -24,13 +24,13 @@ class TestAnthropic: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.create() assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.create( @@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.create() @@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None: anthropic = response.parse() assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.create() as response: @@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.retrieve( @@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.retrieve( @@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: anthropic = response.parse() assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.retrieve( @@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.update( @@ -111,7 +111,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.update( @@ -122,7 +122,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.update( @@ -134,7 +134,7 @@ def test_raw_response_update(self, client: Gradient) -> None: anthropic = response.parse() assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.update( @@ -148,7 +148,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -156,13 +156,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.list() assert_matches_type(AnthropicListResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.list( @@ -171,7 +171,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AnthropicListResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.list() @@ -181,7 +181,7 @@ def test_raw_response_list(self, client: Gradient) -> None: anthropic = response.parse() assert_matches_type(AnthropicListResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.list() as response: @@ -193,7 +193,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.delete( @@ -201,7 +201,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.delete( @@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: anthropic = response.parse() assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.delete( @@ -227,7 +227,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -235,7 +235,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_agents(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.list_agents( @@ -243,7 +243,7 @@ def test_method_list_agents(self, client: Gradient) -> None: ) assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_agents_with_all_params(self, client: Gradient) -> None: anthropic = client.models.providers.anthropic.list_agents( @@ -253,7 +253,7 @@ def test_method_list_agents_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_agents(self, client: Gradient) -> None: response = client.models.providers.anthropic.with_raw_response.list_agents( @@ -265,7 +265,7 @@ def test_raw_response_list_agents(self, client: Gradient) -> None: anthropic = response.parse() assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_agents(self, client: Gradient) -> None: with client.models.providers.anthropic.with_streaming_response.list_agents( @@ -279,7 +279,7 @@ def test_streaming_response_list_agents(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_list_agents(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -293,13 +293,13 @@ class TestAsyncAnthropic: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.create() assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.create( @@ -308,7 +308,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.create() @@ -318,7 +318,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: anthropic = await response.parse() assert_matches_type(AnthropicCreateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.create() as response: @@ -330,7 +330,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.retrieve( @@ -338,7 +338,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.retrieve( @@ -350,7 +350,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: anthropic = await response.parse() assert_matches_type(AnthropicRetrieveResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.retrieve( @@ -364,7 +364,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -372,7 +372,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.update( @@ -380,7 +380,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.update( @@ -391,7 +391,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.update( @@ -403,7 +403,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: anthropic = await response.parse() assert_matches_type(AnthropicUpdateResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.update( @@ -417,7 +417,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -425,13 +425,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.list() assert_matches_type(AnthropicListResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.list( @@ -440,7 +440,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(AnthropicListResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.list() @@ -450,7 +450,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: anthropic = await response.parse() assert_matches_type(AnthropicListResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.list() as response: @@ -462,7 +462,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.delete( @@ -470,7 +470,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.delete( @@ -482,7 +482,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: anthropic = await response.parse() assert_matches_type(AnthropicDeleteResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.delete( @@ -496,7 +496,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -504,7 +504,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_agents(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.list_agents( @@ -512,7 +512,7 @@ async def test_method_list_agents(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_agents_with_all_params(self, async_client: AsyncGradient) -> None: anthropic = await async_client.models.providers.anthropic.list_agents( @@ -522,7 +522,7 @@ async def test_method_list_agents_with_all_params(self, async_client: AsyncGradi ) assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.anthropic.with_raw_response.list_agents( @@ -534,7 +534,7 @@ async def test_raw_response_list_agents(self, async_client: AsyncGradient) -> No anthropic = await response.parse() assert_matches_type(AnthropicListAgentsResponse, anthropic, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_agents(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.anthropic.with_streaming_response.list_agents( @@ -548,7 +548,7 @@ async def test_streaming_response_list_agents(self, async_client: AsyncGradient) assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_list_agents(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): diff --git a/tests/api_resources/models/providers/test_openai.py b/tests/api_resources/models/providers/test_openai.py index ed2cfc8e..8f9c1f80 100644 --- a/tests/api_resources/models/providers/test_openai.py +++ b/tests/api_resources/models/providers/test_openai.py @@ -24,13 +24,13 @@ class TestOpenAI: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: openai = client.models.providers.openai.create() assert_matches_type(OpenAICreateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: openai = client.models.providers.openai.create( @@ -39,7 +39,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(OpenAICreateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.create() @@ -49,7 +49,7 @@ def test_raw_response_create(self, client: Gradient) -> None: openai = response.parse() assert_matches_type(OpenAICreateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.create() as response: @@ -61,7 +61,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: openai = client.models.providers.openai.retrieve( @@ -69,7 +69,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.retrieve( @@ -81,7 +81,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: openai = response.parse() assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.retrieve( @@ -95,7 +95,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -103,7 +103,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: openai = client.models.providers.openai.update( @@ -111,7 +111,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(OpenAIUpdateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: openai = client.models.providers.openai.update( @@ -122,7 +122,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(OpenAIUpdateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.update( @@ -134,7 +134,7 @@ def test_raw_response_update(self, client: Gradient) -> None: openai = response.parse() assert_matches_type(OpenAIUpdateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.update( @@ -148,7 +148,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -156,13 +156,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: openai = client.models.providers.openai.list() assert_matches_type(OpenAIListResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: openai = client.models.providers.openai.list( @@ -171,7 +171,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(OpenAIListResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.list() @@ -181,7 +181,7 @@ def test_raw_response_list(self, client: Gradient) -> None: openai = response.parse() assert_matches_type(OpenAIListResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.list() as response: @@ -193,7 +193,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: openai = client.models.providers.openai.delete( @@ -201,7 +201,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(OpenAIDeleteResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.delete( @@ -213,7 +213,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: openai = response.parse() assert_matches_type(OpenAIDeleteResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.delete( @@ -227,7 +227,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -235,7 +235,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve_agents(self, client: Gradient) -> None: openai = client.models.providers.openai.retrieve_agents( @@ -243,7 +243,7 @@ def test_method_retrieve_agents(self, client: Gradient) -> None: ) assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve_agents_with_all_params(self, client: Gradient) -> None: openai = client.models.providers.openai.retrieve_agents( @@ -253,7 +253,7 @@ def test_method_retrieve_agents_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve_agents(self, client: Gradient) -> None: response = client.models.providers.openai.with_raw_response.retrieve_agents( @@ -265,7 +265,7 @@ def test_raw_response_retrieve_agents(self, client: Gradient) -> None: openai = response.parse() assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve_agents(self, client: Gradient) -> None: with client.models.providers.openai.with_streaming_response.retrieve_agents( @@ -279,7 +279,7 @@ def test_streaming_response_retrieve_agents(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve_agents(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -293,13 +293,13 @@ class TestAsyncOpenAI: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.create() assert_matches_type(OpenAICreateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.create( @@ -308,7 +308,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(OpenAICreateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.create() @@ -318,7 +318,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: openai = await response.parse() assert_matches_type(OpenAICreateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.create() as response: @@ -330,7 +330,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.retrieve( @@ -338,7 +338,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.retrieve( @@ -350,7 +350,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: openai = await response.parse() assert_matches_type(OpenAIRetrieveResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.retrieve( @@ -364,7 +364,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -372,7 +372,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.update( @@ -380,7 +380,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(OpenAIUpdateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.update( @@ -391,7 +391,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(OpenAIUpdateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.update( @@ -403,7 +403,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: openai = await response.parse() assert_matches_type(OpenAIUpdateResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.update( @@ -417,7 +417,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_api_key_uuid` but received ''"): @@ -425,13 +425,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_api_key_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.list() assert_matches_type(OpenAIListResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.list( @@ -440,7 +440,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(OpenAIListResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.list() @@ -450,7 +450,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: openai = await response.parse() assert_matches_type(OpenAIListResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.list() as response: @@ -462,7 +462,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.delete( @@ -470,7 +470,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(OpenAIDeleteResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.delete( @@ -482,7 +482,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: openai = await response.parse() assert_matches_type(OpenAIDeleteResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.delete( @@ -496,7 +496,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `api_key_uuid` but received ''"): @@ -504,7 +504,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve_agents(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.retrieve_agents( @@ -512,7 +512,7 @@ async def test_method_retrieve_agents(self, async_client: AsyncGradient) -> None ) assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncGradient) -> None: openai = await async_client.models.providers.openai.retrieve_agents( @@ -522,7 +522,7 @@ async def test_method_retrieve_agents_with_all_params(self, async_client: AsyncG ) assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve_agents(self, async_client: AsyncGradient) -> None: response = await async_client.models.providers.openai.with_raw_response.retrieve_agents( @@ -534,7 +534,7 @@ async def test_raw_response_retrieve_agents(self, async_client: AsyncGradient) - openai = await response.parse() assert_matches_type(OpenAIRetrieveAgentsResponse, openai, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradient) -> None: async with async_client.models.providers.openai.with_streaming_response.retrieve_agents( @@ -548,7 +548,7 @@ async def test_streaming_response_retrieve_agents(self, async_client: AsyncGradi assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve_agents(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): diff --git a/tests/api_resources/test_agents.py b/tests/api_resources/test_agents.py index 77825f7e..dd4dbdc4 100644 --- a/tests/api_resources/test_agents.py +++ b/tests/api_resources/test_agents.py @@ -24,13 +24,13 @@ class TestAgents: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: agent = client.agents.create() assert_matches_type(AgentCreateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: agent = client.agents.create( @@ -47,7 +47,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AgentCreateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.agents.with_raw_response.create() @@ -57,7 +57,7 @@ def test_raw_response_create(self, client: Gradient) -> None: agent = response.parse() assert_matches_type(AgentCreateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.agents.with_streaming_response.create() as response: @@ -69,7 +69,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: agent = client.agents.retrieve( @@ -77,7 +77,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(AgentRetrieveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.agents.with_raw_response.retrieve( @@ -89,7 +89,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: agent = response.parse() assert_matches_type(AgentRetrieveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.agents.with_streaming_response.retrieve( @@ -103,7 +103,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -111,7 +111,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: agent = client.agents.update( @@ -119,7 +119,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(AgentUpdateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: agent = client.agents.update( @@ -144,7 +144,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AgentUpdateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.agents.with_raw_response.update( @@ -156,7 +156,7 @@ def test_raw_response_update(self, client: Gradient) -> None: agent = response.parse() assert_matches_type(AgentUpdateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.agents.with_streaming_response.update( @@ -170,7 +170,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): @@ -178,13 +178,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: agent = client.agents.list() assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: agent = client.agents.list( @@ -194,7 +194,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.agents.with_raw_response.list() @@ -204,7 +204,7 @@ def test_raw_response_list(self, client: Gradient) -> None: agent = response.parse() assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.agents.with_streaming_response.list() as response: @@ -216,7 +216,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: agent = client.agents.delete( @@ -224,7 +224,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(AgentDeleteResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.agents.with_raw_response.delete( @@ -236,7 +236,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: agent = response.parse() assert_matches_type(AgentDeleteResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.agents.with_streaming_response.delete( @@ -250,7 +250,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -258,7 +258,7 @@ def test_path_params_delete(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_status(self, client: Gradient) -> None: agent = client.agents.update_status( @@ -266,7 +266,7 @@ def test_method_update_status(self, client: Gradient) -> None: ) assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_status_with_all_params(self, client: Gradient) -> None: agent = client.agents.update_status( @@ -276,7 +276,7 @@ def test_method_update_status_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update_status(self, client: Gradient) -> None: response = client.agents.with_raw_response.update_status( @@ -288,7 +288,7 @@ def test_raw_response_update_status(self, client: Gradient) -> None: agent = response.parse() assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update_status(self, client: Gradient) -> None: with client.agents.with_streaming_response.update_status( @@ -302,7 +302,7 @@ def test_streaming_response_update_status(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update_status(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): @@ -316,13 +316,13 @@ class TestAsyncAgents: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.create() assert_matches_type(AgentCreateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.create( @@ -339,7 +339,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(AgentCreateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.create() @@ -349,7 +349,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: agent = await response.parse() assert_matches_type(AgentCreateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.create() as response: @@ -361,7 +361,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.retrieve( @@ -369,7 +369,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AgentRetrieveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.retrieve( @@ -381,7 +381,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: agent = await response.parse() assert_matches_type(AgentRetrieveResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.retrieve( @@ -395,7 +395,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -403,7 +403,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.update( @@ -411,7 +411,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AgentUpdateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.update( @@ -436,7 +436,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(AgentUpdateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.update( @@ -448,7 +448,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: agent = await response.parse() assert_matches_type(AgentUpdateResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.update( @@ -462,7 +462,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): @@ -470,13 +470,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.list() assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.list( @@ -486,7 +486,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.list() @@ -496,7 +496,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: agent = await response.parse() assert_matches_type(AgentListResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.list() as response: @@ -508,7 +508,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.delete( @@ -516,7 +516,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AgentDeleteResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.delete( @@ -528,7 +528,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: agent = await response.parse() assert_matches_type(AgentDeleteResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.delete( @@ -542,7 +542,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -550,7 +550,7 @@ async def test_path_params_delete(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_status(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.update_status( @@ -558,7 +558,7 @@ async def test_method_update_status(self, async_client: AsyncGradient) -> None: ) assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_status_with_all_params(self, async_client: AsyncGradient) -> None: agent = await async_client.agents.update_status( @@ -568,7 +568,7 @@ async def test_method_update_status_with_all_params(self, async_client: AsyncGra ) assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update_status(self, async_client: AsyncGradient) -> None: response = await async_client.agents.with_raw_response.update_status( @@ -580,7 +580,7 @@ async def test_raw_response_update_status(self, async_client: AsyncGradient) -> agent = await response.parse() assert_matches_type(AgentUpdateStatusResponse, agent, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update_status(self, async_client: AsyncGradient) -> None: async with async_client.agents.with_streaming_response.update_status( @@ -594,7 +594,7 @@ async def test_streaming_response_update_status(self, async_client: AsyncGradien assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update_status(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): diff --git a/tests/api_resources/test_gpu_droplets.py b/tests/api_resources/test_gpu_droplets.py index 485fd5f9..0cb27fbb 100644 --- a/tests/api_resources/test_gpu_droplets.py +++ b/tests/api_resources/test_gpu_droplets.py @@ -25,7 +25,7 @@ class TestGPUDroplets: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_1(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.create( @@ -35,7 +35,7 @@ def test_method_create_overload_1(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_1(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.create( @@ -61,7 +61,7 @@ def test_method_create_with_all_params_overload_1(self, client: Gradient) -> Non ) assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_1(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.create( @@ -75,7 +75,7 @@ def test_raw_response_create_overload_1(self, client: Gradient) -> None: gpu_droplet = response.parse() assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_1(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.create( @@ -91,7 +91,7 @@ def test_streaming_response_create_overload_1(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_overload_2(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.create( @@ -101,7 +101,7 @@ def test_method_create_overload_2(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params_overload_2(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.create( @@ -127,7 +127,7 @@ def test_method_create_with_all_params_overload_2(self, client: Gradient) -> Non ) assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create_overload_2(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.create( @@ -141,7 +141,7 @@ def test_raw_response_create_overload_2(self, client: Gradient) -> None: gpu_droplet = response.parse() assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create_overload_2(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.create( @@ -157,7 +157,7 @@ def test_streaming_response_create_overload_2(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.retrieve( @@ -165,7 +165,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.retrieve( @@ -177,7 +177,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: gpu_droplet = response.parse() assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.retrieve( @@ -191,13 +191,13 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list() assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list( @@ -209,7 +209,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list() @@ -219,7 +219,7 @@ def test_raw_response_list(self, client: Gradient) -> None: gpu_droplet = response.parse() assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list() as response: @@ -231,7 +231,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.delete( @@ -239,7 +239,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert gpu_droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.delete( @@ -251,7 +251,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: gpu_droplet = response.parse() assert gpu_droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.delete( @@ -265,7 +265,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete_by_tag(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.delete_by_tag( @@ -273,7 +273,7 @@ def test_method_delete_by_tag(self, client: Gradient) -> None: ) assert gpu_droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete_by_tag(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.delete_by_tag( @@ -285,7 +285,7 @@ def test_raw_response_delete_by_tag(self, client: Gradient) -> None: gpu_droplet = response.parse() assert gpu_droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete_by_tag(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.delete_by_tag( @@ -299,7 +299,7 @@ def test_streaming_response_delete_by_tag(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_firewalls(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_firewalls( @@ -307,7 +307,7 @@ def test_method_list_firewalls(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_firewalls_with_all_params(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_firewalls( @@ -317,7 +317,7 @@ def test_method_list_firewalls_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_firewalls(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list_firewalls( @@ -329,7 +329,7 @@ def test_raw_response_list_firewalls(self, client: Gradient) -> None: gpu_droplet = response.parse() assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_firewalls(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list_firewalls( @@ -343,7 +343,7 @@ def test_streaming_response_list_firewalls(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_kernels(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_kernels( @@ -351,7 +351,7 @@ def test_method_list_kernels(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_kernels_with_all_params(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_kernels( @@ -361,7 +361,7 @@ def test_method_list_kernels_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_kernels(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list_kernels( @@ -373,7 +373,7 @@ def test_raw_response_list_kernels(self, client: Gradient) -> None: gpu_droplet = response.parse() assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_kernels(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list_kernels( @@ -387,7 +387,7 @@ def test_streaming_response_list_kernels(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_neighbors(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_neighbors( @@ -395,7 +395,7 @@ def test_method_list_neighbors(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_neighbors(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list_neighbors( @@ -407,7 +407,7 @@ def test_raw_response_list_neighbors(self, client: Gradient) -> None: gpu_droplet = response.parse() assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_neighbors(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list_neighbors( @@ -421,7 +421,7 @@ def test_streaming_response_list_neighbors(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_snapshots(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_snapshots( @@ -429,7 +429,7 @@ def test_method_list_snapshots(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_snapshots_with_all_params(self, client: Gradient) -> None: gpu_droplet = client.gpu_droplets.list_snapshots( @@ -439,7 +439,7 @@ def test_method_list_snapshots_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list_snapshots(self, client: Gradient) -> None: response = client.gpu_droplets.with_raw_response.list_snapshots( @@ -451,7 +451,7 @@ def test_raw_response_list_snapshots(self, client: Gradient) -> None: gpu_droplet = response.parse() assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list_snapshots(self, client: Gradient) -> None: with client.gpu_droplets.with_streaming_response.list_snapshots( @@ -471,7 +471,7 @@ class TestAsyncGPUDroplets: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_1(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.create( @@ -481,7 +481,7 @@ async def test_method_create_overload_1(self, async_client: AsyncGradient) -> No ) assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_1(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.create( @@ -507,7 +507,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn ) assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.create( @@ -521,7 +521,7 @@ async def test_raw_response_create_overload_1(self, async_client: AsyncGradient) gpu_droplet = await response.parse() assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_1(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.create( @@ -537,7 +537,7 @@ async def test_streaming_response_create_overload_1(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_overload_2(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.create( @@ -547,7 +547,7 @@ async def test_method_create_overload_2(self, async_client: AsyncGradient) -> No ) assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params_overload_2(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.create( @@ -573,7 +573,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn ) assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.create( @@ -587,7 +587,7 @@ async def test_raw_response_create_overload_2(self, async_client: AsyncGradient) gpu_droplet = await response.parse() assert_matches_type(GPUDropletCreateResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create_overload_2(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.create( @@ -603,7 +603,7 @@ async def test_streaming_response_create_overload_2(self, async_client: AsyncGra assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.retrieve( @@ -611,7 +611,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.retrieve( @@ -623,7 +623,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: gpu_droplet = await response.parse() assert_matches_type(GPUDropletRetrieveResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.retrieve( @@ -637,13 +637,13 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list() assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list( @@ -655,7 +655,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list() @@ -665,7 +665,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: gpu_droplet = await response.parse() assert_matches_type(GPUDropletListResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list() as response: @@ -677,7 +677,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.delete( @@ -685,7 +685,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert gpu_droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.delete( @@ -697,7 +697,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: gpu_droplet = await response.parse() assert gpu_droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.delete( @@ -711,7 +711,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete_by_tag(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.delete_by_tag( @@ -719,7 +719,7 @@ async def test_method_delete_by_tag(self, async_client: AsyncGradient) -> None: ) assert gpu_droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete_by_tag(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.delete_by_tag( @@ -731,7 +731,7 @@ async def test_raw_response_delete_by_tag(self, async_client: AsyncGradient) -> gpu_droplet = await response.parse() assert gpu_droplet is None - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete_by_tag(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.delete_by_tag( @@ -745,7 +745,7 @@ async def test_streaming_response_delete_by_tag(self, async_client: AsyncGradien assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_firewalls(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_firewalls( @@ -753,7 +753,7 @@ async def test_method_list_firewalls(self, async_client: AsyncGradient) -> None: ) assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_firewalls_with_all_params(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_firewalls( @@ -763,7 +763,7 @@ async def test_method_list_firewalls_with_all_params(self, async_client: AsyncGr ) assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_firewalls(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list_firewalls( @@ -775,7 +775,7 @@ async def test_raw_response_list_firewalls(self, async_client: AsyncGradient) -> gpu_droplet = await response.parse() assert_matches_type(GPUDropletListFirewallsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_firewalls(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list_firewalls( @@ -789,7 +789,7 @@ async def test_streaming_response_list_firewalls(self, async_client: AsyncGradie assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_kernels(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_kernels( @@ -797,7 +797,7 @@ async def test_method_list_kernels(self, async_client: AsyncGradient) -> None: ) assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_kernels_with_all_params(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_kernels( @@ -807,7 +807,7 @@ async def test_method_list_kernels_with_all_params(self, async_client: AsyncGrad ) assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_kernels(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list_kernels( @@ -819,7 +819,7 @@ async def test_raw_response_list_kernels(self, async_client: AsyncGradient) -> N gpu_droplet = await response.parse() assert_matches_type(GPUDropletListKernelsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_kernels(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list_kernels( @@ -833,7 +833,7 @@ async def test_streaming_response_list_kernels(self, async_client: AsyncGradient assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_neighbors(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_neighbors( @@ -841,7 +841,7 @@ async def test_method_list_neighbors(self, async_client: AsyncGradient) -> None: ) assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_neighbors(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list_neighbors( @@ -853,7 +853,7 @@ async def test_raw_response_list_neighbors(self, async_client: AsyncGradient) -> gpu_droplet = await response.parse() assert_matches_type(GPUDropletListNeighborsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_neighbors(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list_neighbors( @@ -867,7 +867,7 @@ async def test_streaming_response_list_neighbors(self, async_client: AsyncGradie assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_snapshots(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_snapshots( @@ -875,7 +875,7 @@ async def test_method_list_snapshots(self, async_client: AsyncGradient) -> None: ) assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_snapshots_with_all_params(self, async_client: AsyncGradient) -> None: gpu_droplet = await async_client.gpu_droplets.list_snapshots( @@ -885,7 +885,7 @@ async def test_method_list_snapshots_with_all_params(self, async_client: AsyncGr ) assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list_snapshots(self, async_client: AsyncGradient) -> None: response = await async_client.gpu_droplets.with_raw_response.list_snapshots( @@ -897,7 +897,7 @@ async def test_raw_response_list_snapshots(self, async_client: AsyncGradient) -> gpu_droplet = await response.parse() assert_matches_type(GPUDropletListSnapshotsResponse, gpu_droplet, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list_snapshots(self, async_client: AsyncGradient) -> None: async with async_client.gpu_droplets.with_streaming_response.list_snapshots( diff --git a/tests/api_resources/test_knowledge_bases.py b/tests/api_resources/test_knowledge_bases.py index 23945480..82698131 100644 --- a/tests/api_resources/test_knowledge_bases.py +++ b/tests/api_resources/test_knowledge_bases.py @@ -23,13 +23,13 @@ class TestKnowledgeBases: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.create() assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_create_with_all_params(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.create( @@ -76,7 +76,7 @@ def test_method_create_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_create(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.create() @@ -86,7 +86,7 @@ def test_raw_response_create(self, client: Gradient) -> None: knowledge_base = response.parse() assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_create(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.create() as response: @@ -98,7 +98,7 @@ def test_streaming_response_create(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_retrieve(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.retrieve( @@ -106,7 +106,7 @@ def test_method_retrieve(self, client: Gradient) -> None: ) assert_matches_type(KnowledgeBaseRetrieveResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_retrieve(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.retrieve( @@ -118,7 +118,7 @@ def test_raw_response_retrieve(self, client: Gradient) -> None: knowledge_base = response.parse() assert_matches_type(KnowledgeBaseRetrieveResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_retrieve(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.retrieve( @@ -132,7 +132,7 @@ def test_streaming_response_retrieve(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_retrieve(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -140,7 +140,7 @@ def test_path_params_retrieve(self, client: Gradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.update( @@ -148,7 +148,7 @@ def test_method_update(self, client: Gradient) -> None: ) assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_update_with_all_params(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.update( @@ -162,7 +162,7 @@ def test_method_update_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_update(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.update( @@ -174,7 +174,7 @@ def test_raw_response_update(self, client: Gradient) -> None: knowledge_base = response.parse() assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_update(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.update( @@ -188,7 +188,7 @@ def test_streaming_response_update(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_update(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): @@ -196,13 +196,13 @@ def test_path_params_update(self, client: Gradient) -> None: path_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.list() assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.list( @@ -211,7 +211,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.list() @@ -221,7 +221,7 @@ def test_raw_response_list(self, client: Gradient) -> None: knowledge_base = response.parse() assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.list() as response: @@ -233,7 +233,7 @@ def test_streaming_response_list(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_delete(self, client: Gradient) -> None: knowledge_base = client.knowledge_bases.delete( @@ -241,7 +241,7 @@ def test_method_delete(self, client: Gradient) -> None: ) assert_matches_type(KnowledgeBaseDeleteResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_delete(self, client: Gradient) -> None: response = client.knowledge_bases.with_raw_response.delete( @@ -253,7 +253,7 @@ def test_raw_response_delete(self, client: Gradient) -> None: knowledge_base = response.parse() assert_matches_type(KnowledgeBaseDeleteResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_delete(self, client: Gradient) -> None: with client.knowledge_bases.with_streaming_response.delete( @@ -267,7 +267,7 @@ def test_streaming_response_delete(self, client: Gradient) -> None: assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_path_params_delete(self, client: Gradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -281,13 +281,13 @@ class TestAsyncKnowledgeBases: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.create() assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_create_with_all_params(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.create( @@ -334,7 +334,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_create(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.create() @@ -344,7 +344,7 @@ async def test_raw_response_create(self, async_client: AsyncGradient) -> None: knowledge_base = await response.parse() assert_matches_type(KnowledgeBaseCreateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_create(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.create() as response: @@ -356,7 +356,7 @@ async def test_streaming_response_create(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_retrieve(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.retrieve( @@ -364,7 +364,7 @@ async def test_method_retrieve(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KnowledgeBaseRetrieveResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.retrieve( @@ -376,7 +376,7 @@ async def test_raw_response_retrieve(self, async_client: AsyncGradient) -> None: knowledge_base = await response.parse() assert_matches_type(KnowledgeBaseRetrieveResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.retrieve( @@ -390,7 +390,7 @@ async def test_streaming_response_retrieve(self, async_client: AsyncGradient) -> assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): @@ -398,7 +398,7 @@ async def test_path_params_retrieve(self, async_client: AsyncGradient) -> None: "", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.update( @@ -406,7 +406,7 @@ async def test_method_update(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_update_with_all_params(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.update( @@ -420,7 +420,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGradient) ) assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_update(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.update( @@ -432,7 +432,7 @@ async def test_raw_response_update(self, async_client: AsyncGradient) -> None: knowledge_base = await response.parse() assert_matches_type(KnowledgeBaseUpdateResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_update(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.update( @@ -446,7 +446,7 @@ async def test_streaming_response_update(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_update(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `path_uuid` but received ''"): @@ -454,13 +454,13 @@ async def test_path_params_update(self, async_client: AsyncGradient) -> None: path_uuid="", ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.list() assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.list( @@ -469,7 +469,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.list() @@ -479,7 +479,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: knowledge_base = await response.parse() assert_matches_type(KnowledgeBaseListResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.list() as response: @@ -491,7 +491,7 @@ async def test_streaming_response_list(self, async_client: AsyncGradient) -> Non assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_delete(self, async_client: AsyncGradient) -> None: knowledge_base = await async_client.knowledge_bases.delete( @@ -499,7 +499,7 @@ async def test_method_delete(self, async_client: AsyncGradient) -> None: ) assert_matches_type(KnowledgeBaseDeleteResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: response = await async_client.knowledge_bases.with_raw_response.delete( @@ -511,7 +511,7 @@ async def test_raw_response_delete(self, async_client: AsyncGradient) -> None: knowledge_base = await response.parse() assert_matches_type(KnowledgeBaseDeleteResponse, knowledge_base, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_delete(self, async_client: AsyncGradient) -> None: async with async_client.knowledge_bases.with_streaming_response.delete( @@ -525,7 +525,7 @@ async def test_streaming_response_delete(self, async_client: AsyncGradient) -> N assert cast(Any, response.is_closed) is True - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_path_params_delete(self, async_client: AsyncGradient) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `uuid` but received ''"): diff --git a/tests/api_resources/test_models.py b/tests/api_resources/test_models.py index 7b2a5a4a..8e6edaef 100644 --- a/tests/api_resources/test_models.py +++ b/tests/api_resources/test_models.py @@ -17,13 +17,13 @@ class TestModels: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: model = client.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: model = client.models.list( @@ -34,7 +34,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.models.with_raw_response.list() @@ -44,7 +44,7 @@ def test_raw_response_list(self, client: Gradient) -> None: model = response.parse() assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.models.with_streaming_response.list() as response: @@ -62,13 +62,13 @@ class TestAsyncModels: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: model = await async_client.models.list() assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: model = await async_client.models.list( @@ -79,7 +79,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.models.with_raw_response.list() @@ -89,7 +89,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: model = await response.parse() assert_matches_type(ModelListResponse, model, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.models.with_streaming_response.list() as response: diff --git a/tests/api_resources/test_regions.py b/tests/api_resources/test_regions.py index 5bf67e91..8cbf6afb 100644 --- a/tests/api_resources/test_regions.py +++ b/tests/api_resources/test_regions.py @@ -17,13 +17,13 @@ class TestRegions: parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list(self, client: Gradient) -> None: region = client.regions.list() assert_matches_type(RegionListResponse, region, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_method_list_with_all_params(self, client: Gradient) -> None: region = client.regions.list( @@ -32,7 +32,7 @@ def test_method_list_with_all_params(self, client: Gradient) -> None: ) assert_matches_type(RegionListResponse, region, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_raw_response_list(self, client: Gradient) -> None: response = client.regions.with_raw_response.list() @@ -42,7 +42,7 @@ def test_raw_response_list(self, client: Gradient) -> None: region = response.parse() assert_matches_type(RegionListResponse, region, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize def test_streaming_response_list(self, client: Gradient) -> None: with client.regions.with_streaming_response.list() as response: @@ -60,13 +60,13 @@ class TestAsyncRegions: "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] ) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list(self, async_client: AsyncGradient) -> None: region = await async_client.regions.list() assert_matches_type(RegionListResponse, region, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> None: region = await async_client.regions.list( @@ -75,7 +75,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncGradient) -> ) assert_matches_type(RegionListResponse, region, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_raw_response_list(self, async_client: AsyncGradient) -> None: response = await async_client.regions.with_raw_response.list() @@ -85,7 +85,7 @@ async def test_raw_response_list(self, async_client: AsyncGradient) -> None: region = await response.parse() assert_matches_type(RegionListResponse, region, path=["response"]) - @pytest.mark.skip() + @pytest.mark.skip(reason="Prism tests are disabled") @parametrize async def test_streaming_response_list(self, async_client: AsyncGradient) -> None: async with async_client.regions.with_streaming_response.list() as response: From 286011cd3b4dd86aa8e40d3c86737c2b10e19c36 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 02:21:53 +0000 Subject: [PATCH 4/4] release: 3.0.0-beta.4 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 10 ++++++++++ pyproject.toml | 2 +- src/gradient/_version.py | 2 +- 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 5e212f31..2ce88448 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.0.0-beta.3" + ".": "3.0.0-beta.4" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index d41133fd..351216f4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## 3.0.0-beta.4 (2025-08-12) + +Full Changelog: [v3.0.0-beta.3...v3.0.0-beta.4](https://github.com/digitalocean/gradient-python/compare/v3.0.0-beta.3...v3.0.0-beta.4) + +### Chores + +* **internal:** codegen related update ([4757cc5](https://github.com/digitalocean/gradient-python/commit/4757cc594565cf8500b4087205e6eb5fd8c5d5c5)) +* **internal:** update comment in script ([c324412](https://github.com/digitalocean/gradient-python/commit/c32441201c3156cc4fe5b400a4f396eaf19ecaad)) +* update @stainless-api/prism-cli to v5.15.0 ([835aa7c](https://github.com/digitalocean/gradient-python/commit/835aa7c204f5def64cdcd8b863581fd6a1ea37b6)) + ## 3.0.0-beta.3 (2025-08-08) Full Changelog: [v3.0.0-beta.2...v3.0.0-beta.3](https://github.com/digitalocean/gradient-python/compare/v3.0.0-beta.2...v3.0.0-beta.3) diff --git a/pyproject.toml b/pyproject.toml index 813edb3e..3d37f719 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "gradient" -version = "3.0.0-beta.3" +version = "3.0.0-beta.4" description = "The official Python library for the Gradient API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/gradient/_version.py b/src/gradient/_version.py index 483c7ac9..428a5fa9 100644 --- a/src/gradient/_version.py +++ b/src/gradient/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "gradient" -__version__ = "3.0.0-beta.3" # x-release-please-version +__version__ = "3.0.0-beta.4" # x-release-please-version