-
Notifications
You must be signed in to change notification settings - Fork 3.8k
/
Copy pathtest_moderations.py
100 lines (77 loc) · 3.81 KB
/
test_moderations.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
from __future__ import annotations
import os
from typing import Any, cast
import pytest
from openai import OpenAI, AsyncOpenAI
from tests.utils import assert_matches_type
from openai.types import ModerationCreateResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
class TestModerations:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
def test_method_create(self, client: OpenAI) -> None:
moderation = client.moderations.create(
input="I want to kill them.",
)
assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
@parametrize
def test_method_create_with_all_params(self, client: OpenAI) -> None:
moderation = client.moderations.create(
input="I want to kill them.",
model="omni-moderation-2024-09-26",
)
assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
@parametrize
def test_raw_response_create(self, client: OpenAI) -> None:
response = client.moderations.with_raw_response.create(
input="I want to kill them.",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
moderation = response.parse()
assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
@parametrize
def test_streaming_response_create(self, client: OpenAI) -> None:
with client.moderations.with_streaming_response.create(
input="I want to kill them.",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
moderation = response.parse()
assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
assert cast(Any, response.is_closed) is True
class TestAsyncModerations:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
@parametrize
async def test_method_create(self, async_client: AsyncOpenAI) -> None:
moderation = await async_client.moderations.create(
input="I want to kill them.",
)
assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
@parametrize
async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None:
moderation = await async_client.moderations.create(
input="I want to kill them.",
model="omni-moderation-2024-09-26",
)
assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
@parametrize
async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None:
response = await async_client.moderations.with_raw_response.create(
input="I want to kill them.",
)
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
moderation = response.parse()
assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
@parametrize
async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None:
async with async_client.moderations.with_streaming_response.create(
input="I want to kill them.",
) as response:
assert not response.is_closed
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
moderation = await response.parse()
assert_matches_type(ModerationCreateResponse, moderation, path=["response"])
assert cast(Any, response.is_closed) is True