Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "1.3.0"
".": "1.4.0"
}
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 11
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/deeprails-inc%2Fdeeprails-5f0bb342de09a42c51e94feacb97cb4c11c513120637868e4bd0cdaedff14c0c.yml
openapi_spec_hash: 616b686ef84ded4978605efdbb72183e
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/deeprails-inc%2Fdeeprails-1b76015cbe2a339cfce6bdbdd0b09dcc23535e2dcd992306697d1e40d4a0f035.yml
openapi_spec_hash: ab1376d561bd2ee20973ba549a1d73f7
config_hash: 63c6f27e0ba2846cf2d04e70777b3b21
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## 1.4.0 (2025-10-10)

Full Changelog: [v1.3.0...v1.4.0](https://github.com/deeprails/deeprails-sdk-python/compare/v1.3.0...v1.4.0)

### Features

* **api:** manual updates ([b6beebc](https://github.com/deeprails/deeprails-sdk-python/commit/b6beebca6323df5ae35ef30d419d96c831bebbb8))

## 1.3.0 (2025-10-08)

Full Changelog: [v1.2.0...v1.3.0](https://github.com/deeprails/deeprails-sdk-python/compare/v1.2.0...v1.3.0)
Expand Down
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,6 @@ workflow_event_response = client.defend.submit_event(
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
)
print(workflow_event_response.model_input)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "deeprails"
version = "1.3.0"
version = "1.4.0"
description = "The official Python library for the deeprails API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
2 changes: 1 addition & 1 deletion src/deeprails/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "deeprails"
__version__ = "1.3.0" # x-release-please-version
__version__ = "1.4.0" # x-release-please-version
16 changes: 8 additions & 8 deletions src/deeprails/resources/defend.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,8 +199,8 @@ def submit_event(
model_input: defend_submit_event_params.ModelInput,
model_output: str,
model_used: str,
nametag: str,
run_mode: Literal["precision_plus", "precision", "smart", "economy"],
nametag: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
Expand All @@ -221,13 +221,13 @@ def submit_event(

model_used: Model ID used to generate the output, like `gpt-4o` or `o3`.

nametag: An optional, user-defined tag for the event.

run_mode: Run mode for the workflow event. The run mode allows the user to optimize for
speed, accuracy, and cost by determining which models are used to evaluate the
event. Available run modes include `precision_plus`, `precision`, `smart`, and
`economy`. Defaults to `smart`.

nametag: An optional, user-defined tag for the event.

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request
Expand All @@ -245,8 +245,8 @@ def submit_event(
"model_input": model_input,
"model_output": model_output,
"model_used": model_used,
"nametag": nametag,
"run_mode": run_mode,
"nametag": nametag,
},
defend_submit_event_params.DefendSubmitEventParams,
),
Expand Down Expand Up @@ -477,8 +477,8 @@ async def submit_event(
model_input: defend_submit_event_params.ModelInput,
model_output: str,
model_used: str,
nametag: str,
run_mode: Literal["precision_plus", "precision", "smart", "economy"],
nametag: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
Expand All @@ -499,13 +499,13 @@ async def submit_event(

model_used: Model ID used to generate the output, like `gpt-4o` or `o3`.

nametag: An optional, user-defined tag for the event.

run_mode: Run mode for the workflow event. The run mode allows the user to optimize for
speed, accuracy, and cost by determining which models are used to evaluate the
event. Available run modes include `precision_plus`, `precision`, `smart`, and
`economy`. Defaults to `smart`.

nametag: An optional, user-defined tag for the event.

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request
Expand All @@ -523,8 +523,8 @@ async def submit_event(
"model_input": model_input,
"model_output": model_output,
"model_used": model_used,
"nametag": nametag,
"run_mode": run_mode,
"nametag": nametag,
},
defend_submit_event_params.DefendSubmitEventParams,
),
Expand Down
6 changes: 3 additions & 3 deletions src/deeprails/types/defend_submit_event_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,6 @@ class DefendSubmitEventParams(TypedDict, total=False):
model_used: Required[str]
"""Model ID used to generate the output, like `gpt-4o` or `o3`."""

nametag: Required[str]
"""An optional, user-defined tag for the event."""

run_mode: Required[Literal["precision_plus", "precision", "smart", "economy"]]
"""Run mode for the workflow event.

Expand All @@ -34,6 +31,9 @@ class DefendSubmitEventParams(TypedDict, total=False):
`smart`.
"""

nametag: str
"""An optional, user-defined tag for the event."""


class ModelInputTyped(TypedDict, total=False):
user_prompt: Required[str]
Expand Down
12 changes: 2 additions & 10 deletions tests/api_resources/test_defend.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,6 @@ def test_method_submit_event(self, client: Deeprails) -> None:
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
)
assert_matches_type(WorkflowEventResponse, defend, path=["response"])
Expand All @@ -195,8 +194,8 @@ def test_method_submit_event_with_all_params(self, client: Deeprails) -> None:
},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
nametag="nametag",
)
assert_matches_type(WorkflowEventResponse, defend, path=["response"])

Expand All @@ -208,7 +207,6 @@ def test_raw_response_submit_event(self, client: Deeprails) -> None:
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
)

Expand All @@ -225,7 +223,6 @@ def test_streaming_response_submit_event(self, client: Deeprails) -> None:
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
) as response:
assert not response.is_closed
Expand All @@ -245,7 +242,6 @@ def test_path_params_submit_event(self, client: Deeprails) -> None:
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
)

Expand Down Expand Up @@ -466,7 +462,6 @@ async def test_method_submit_event(self, async_client: AsyncDeeprails) -> None:
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
)
assert_matches_type(WorkflowEventResponse, defend, path=["response"])
Expand All @@ -482,8 +477,8 @@ async def test_method_submit_event_with_all_params(self, async_client: AsyncDeep
},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
nametag="nametag",
)
assert_matches_type(WorkflowEventResponse, defend, path=["response"])

Expand All @@ -495,7 +490,6 @@ async def test_raw_response_submit_event(self, async_client: AsyncDeeprails) ->
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
)

Expand All @@ -512,7 +506,6 @@ async def test_streaming_response_submit_event(self, async_client: AsyncDeeprail
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
) as response:
assert not response.is_closed
Expand All @@ -532,7 +525,6 @@ async def test_path_params_submit_event(self, async_client: AsyncDeeprails) -> N
model_input={"user_prompt": "user_prompt"},
model_output="model_output",
model_used="model_used",
nametag="nametag",
run_mode="precision_plus",
)

Expand Down