diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 2a8f4ff..3e9af1b 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.3.0" + ".": "1.4.0" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 87138d0..8cba63f 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 11 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/deeprails-inc%2Fdeeprails-5f0bb342de09a42c51e94feacb97cb4c11c513120637868e4bd0cdaedff14c0c.yml -openapi_spec_hash: 616b686ef84ded4978605efdbb72183e +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/deeprails-inc%2Fdeeprails-1b76015cbe2a339cfce6bdbdd0b09dcc23535e2dcd992306697d1e40d4a0f035.yml +openapi_spec_hash: ab1376d561bd2ee20973ba549a1d73f7 config_hash: 63c6f27e0ba2846cf2d04e70777b3b21 diff --git a/CHANGELOG.md b/CHANGELOG.md index c8efc78..7b33a04 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 1.4.0 (2025-10-10) + +Full Changelog: [v1.3.0...v1.4.0](https://github.com/deeprails/deeprails-sdk-python/compare/v1.3.0...v1.4.0) + +### Features + +* **api:** manual updates ([b6beebc](https://github.com/deeprails/deeprails-sdk-python/commit/b6beebca6323df5ae35ef30d419d96c831bebbb8)) + ## 1.3.0 (2025-10-08) Full Changelog: [v1.2.0...v1.3.0](https://github.com/deeprails/deeprails-sdk-python/compare/v1.2.0...v1.3.0) diff --git a/README.md b/README.md index 75eb99c..68db30d 100644 --- a/README.md +++ b/README.md @@ -141,7 +141,6 @@ workflow_event_response = client.defend.submit_event( model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", ) print(workflow_event_response.model_input) diff --git a/pyproject.toml b/pyproject.toml index 7fb0b36..7883cab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "deeprails" -version = "1.3.0" +version = "1.4.0" description = "The official Python library for the deeprails API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/src/deeprails/_version.py b/src/deeprails/_version.py index cd855b6..a88ba94 100644 --- a/src/deeprails/_version.py +++ b/src/deeprails/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "deeprails" -__version__ = "1.3.0" # x-release-please-version +__version__ = "1.4.0" # x-release-please-version diff --git a/src/deeprails/resources/defend.py b/src/deeprails/resources/defend.py index e62c0b6..fd55678 100644 --- a/src/deeprails/resources/defend.py +++ b/src/deeprails/resources/defend.py @@ -199,8 +199,8 @@ def submit_event( model_input: defend_submit_event_params.ModelInput, model_output: str, model_used: str, - nametag: str, run_mode: Literal["precision_plus", "precision", "smart", "economy"], + nametag: str | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -221,13 +221,13 @@ def submit_event( model_used: Model ID used to generate the output, like `gpt-4o` or `o3`. - nametag: An optional, user-defined tag for the event. - run_mode: Run mode for the workflow event. The run mode allows the user to optimize for speed, accuracy, and cost by determining which models are used to evaluate the event. Available run modes include `precision_plus`, `precision`, `smart`, and `economy`. Defaults to `smart`. + nametag: An optional, user-defined tag for the event. + extra_headers: Send extra headers extra_query: Add additional query parameters to the request @@ -245,8 +245,8 @@ def submit_event( "model_input": model_input, "model_output": model_output, "model_used": model_used, - "nametag": nametag, "run_mode": run_mode, + "nametag": nametag, }, defend_submit_event_params.DefendSubmitEventParams, ), @@ -477,8 +477,8 @@ async def submit_event( model_input: defend_submit_event_params.ModelInput, model_output: str, model_used: str, - nametag: str, run_mode: Literal["precision_plus", "precision", "smart", "economy"], + nametag: str | Omit = omit, # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. # The extra values given here take precedence over values defined on the client or passed to this method. extra_headers: Headers | None = None, @@ -499,13 +499,13 @@ async def submit_event( model_used: Model ID used to generate the output, like `gpt-4o` or `o3`. - nametag: An optional, user-defined tag for the event. - run_mode: Run mode for the workflow event. The run mode allows the user to optimize for speed, accuracy, and cost by determining which models are used to evaluate the event. Available run modes include `precision_plus`, `precision`, `smart`, and `economy`. Defaults to `smart`. + nametag: An optional, user-defined tag for the event. + extra_headers: Send extra headers extra_query: Add additional query parameters to the request @@ -523,8 +523,8 @@ async def submit_event( "model_input": model_input, "model_output": model_output, "model_used": model_used, - "nametag": nametag, "run_mode": run_mode, + "nametag": nametag, }, defend_submit_event_params.DefendSubmitEventParams, ), diff --git a/src/deeprails/types/defend_submit_event_params.py b/src/deeprails/types/defend_submit_event_params.py index ef1f9fb..97a80c5 100644 --- a/src/deeprails/types/defend_submit_event_params.py +++ b/src/deeprails/types/defend_submit_event_params.py @@ -22,9 +22,6 @@ class DefendSubmitEventParams(TypedDict, total=False): model_used: Required[str] """Model ID used to generate the output, like `gpt-4o` or `o3`.""" - nametag: Required[str] - """An optional, user-defined tag for the event.""" - run_mode: Required[Literal["precision_plus", "precision", "smart", "economy"]] """Run mode for the workflow event. @@ -34,6 +31,9 @@ class DefendSubmitEventParams(TypedDict, total=False): `smart`. """ + nametag: str + """An optional, user-defined tag for the event.""" + class ModelInputTyped(TypedDict, total=False): user_prompt: Required[str] diff --git a/tests/api_resources/test_defend.py b/tests/api_resources/test_defend.py index bcaa060..bf1a8fa 100644 --- a/tests/api_resources/test_defend.py +++ b/tests/api_resources/test_defend.py @@ -179,7 +179,6 @@ def test_method_submit_event(self, client: Deeprails) -> None: model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", ) assert_matches_type(WorkflowEventResponse, defend, path=["response"]) @@ -195,8 +194,8 @@ def test_method_submit_event_with_all_params(self, client: Deeprails) -> None: }, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", + nametag="nametag", ) assert_matches_type(WorkflowEventResponse, defend, path=["response"]) @@ -208,7 +207,6 @@ def test_raw_response_submit_event(self, client: Deeprails) -> None: model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", ) @@ -225,7 +223,6 @@ def test_streaming_response_submit_event(self, client: Deeprails) -> None: model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", ) as response: assert not response.is_closed @@ -245,7 +242,6 @@ def test_path_params_submit_event(self, client: Deeprails) -> None: model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", ) @@ -466,7 +462,6 @@ async def test_method_submit_event(self, async_client: AsyncDeeprails) -> None: model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", ) assert_matches_type(WorkflowEventResponse, defend, path=["response"]) @@ -482,8 +477,8 @@ async def test_method_submit_event_with_all_params(self, async_client: AsyncDeep }, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", + nametag="nametag", ) assert_matches_type(WorkflowEventResponse, defend, path=["response"]) @@ -495,7 +490,6 @@ async def test_raw_response_submit_event(self, async_client: AsyncDeeprails) -> model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", ) @@ -512,7 +506,6 @@ async def test_streaming_response_submit_event(self, async_client: AsyncDeeprail model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", ) as response: assert not response.is_closed @@ -532,7 +525,6 @@ async def test_path_params_submit_event(self, async_client: AsyncDeeprails) -> N model_input={"user_prompt": "user_prompt"}, model_output="model_output", model_used="model_used", - nametag="nametag", run_mode="precision_plus", )