Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "1.14.2"
".": "1.15.0"
}
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 27
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/moderation-api%2Fmoderation-api-a4934bf1e7f1348c021b48224f7a7110a6e41838253dda4fbcc720dd2d2ed6b7.yml
openapi_spec_hash: 537542216811907b1d4ebf23a54dc669
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/moderation-api%2Fmoderation-api-c8e66e827fc2b1465b740a29e87da71c3b1ddca1a4bdb1023aa96c569b80e9be.yml
openapi_spec_hash: 35fdc3e34feb56cafaf4de2834201978
config_hash: 0a024bca1710e3a3194925edfedc513c
18 changes: 18 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,23 @@
# Changelog

## 1.15.0 (2026-04-24)

Full Changelog: [v1.14.2...v1.15.0](https://github.com/moderation-api/sdk-python/compare/v1.14.2...v1.15.0)

### Features

* **api:** api update ([427f2f7](https://github.com/moderation-api/sdk-python/commit/427f2f7b4b34e5a3c659247da17dd2f2bfb4b8d2))


### Performance Improvements

* **client:** optimize file structure copying in multipart requests ([2faff3e](https://github.com/moderation-api/sdk-python/commit/2faff3eea5e706eaa238b9db26a35617b2b8e8fd))


### Chores

* **internal:** more robust bootstrap script ([b13f27f](https://github.com/moderation-api/sdk-python/commit/b13f27f8fd5673b841bca8796a767a8218db497a))

## 1.14.2 (2026-04-11)

Full Changelog: [v1.14.1...v1.14.2](https://github.com/moderation-api/sdk-python/compare/v1.14.1...v1.14.2)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "moderation_api"
version = "1.14.2"
version = "1.15.0"
description = "The official Python library for the moderation-api API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
2 changes: 1 addition & 1 deletion scripts/bootstrap
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ set -e

cd "$(dirname "$0")/.."

if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then
if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "${SKIP_BREW:-}" != "1" ] && [ -t 0 ]; then
brew bundle check >/dev/null 2>&1 || {
echo -n "==> Install Homebrew dependencies? (y/N): "
read -r response
Expand Down
56 changes: 53 additions & 3 deletions src/moderation_api/_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import io
import os
import pathlib
from typing import overload
from typing_extensions import TypeGuard
from typing import Sequence, cast, overload
from typing_extensions import TypeVar, TypeGuard

import anyio

Expand All @@ -17,7 +17,9 @@
HttpxFileContent,
HttpxRequestFiles,
)
from ._utils import is_tuple_t, is_mapping_t, is_sequence_t
from ._utils import is_list, is_mapping, is_tuple_t, is_mapping_t, is_sequence_t

_T = TypeVar("_T")


def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]:
Expand Down Expand Up @@ -121,3 +123,51 @@ async def async_read_file_content(file: FileContent) -> HttpxFileContent:
return await anyio.Path(file).read_bytes()

return file


def deepcopy_with_paths(item: _T, paths: Sequence[Sequence[str]]) -> _T:
"""Copy only the containers along the given paths.

Used to guard against mutation by extract_files without copying the entire structure.
Only dicts and lists that lie on a path are copied; everything else
is returned by reference.

For example, given paths=[["foo", "files", "file"]] and the structure:
{
"foo": {
"bar": {"baz": {}},
"files": {"file": <content>}
}
}
The root dict, "foo", and "files" are copied (they lie on the path).
"bar" and "baz" are returned by reference (off the path).
"""
return _deepcopy_with_paths(item, paths, 0)


def _deepcopy_with_paths(item: _T, paths: Sequence[Sequence[str]], index: int) -> _T:
if not paths:
return item
if is_mapping(item):
key_to_paths: dict[str, list[Sequence[str]]] = {}
for path in paths:
if index < len(path):
key_to_paths.setdefault(path[index], []).append(path)

# if no path continues through this mapping, it won't be mutated and copying it is redundant
if not key_to_paths:
return item

result = dict(item)
for key, subpaths in key_to_paths.items():
if key in result:
result[key] = _deepcopy_with_paths(result[key], subpaths, index + 1)
return cast(_T, result)
if is_list(item):
array_paths = [path for path in paths if index < len(path) and path[index] == "<array>"]

# if no path expects a list here, nothing will be mutated inside it - return by reference
if not array_paths:
return cast(_T, item)
return cast(_T, [_deepcopy_with_paths(entry, array_paths, index + 1) for entry in item])
return item
1 change: 0 additions & 1 deletion src/moderation_api/_utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
coerce_integer as coerce_integer,
file_from_path as file_from_path,
strip_not_given as strip_not_given,
deepcopy_minimal as deepcopy_minimal,
get_async_library as get_async_library,
maybe_coerce_float as maybe_coerce_float,
get_required_header as get_required_header,
Expand Down
15 changes: 0 additions & 15 deletions src/moderation_api/_utils/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,21 +177,6 @@ def is_iterable(obj: object) -> TypeGuard[Iterable[object]]:
return isinstance(obj, Iterable)


def deepcopy_minimal(item: _T) -> _T:
"""Minimal reimplementation of copy.deepcopy() that will only copy certain object types:

- mappings, e.g. `dict`
- list

This is done for performance reasons.
"""
if is_mapping(item):
return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()})
if is_list(item):
return cast(_T, [deepcopy_minimal(entry) for entry in item])
return item


# copied from https://github.com/Rapptz/RoboDanny
def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str:
size = len(seq)
Expand Down
2 changes: 1 addition & 1 deletion src/moderation_api/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "moderation_api"
__version__ = "1.14.2" # x-release-please-version
__version__ = "1.15.0" # x-release-please-version
14 changes: 14 additions & 0 deletions src/moderation_api/types/content_submit_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import Dict, Union, Iterable
from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict

from .._types import SequenceNotStr
from .._utils import PropertyInfo

__all__ = [
Expand Down Expand Up @@ -418,6 +419,19 @@ class PolicyURLRisk(TypedDict, total=False):

flag: Required[bool]

allowlist_wordlist_ids: Annotated[SequenceNotStr[str], PropertyInfo(alias="allowlistWordlistIds")]
"""IDs of wordlists whose entries are treated as allowed URL domains.

Matches short-circuit the risk model and are never flagged.
"""

blocklist_wordlist_ids: Annotated[SequenceNotStr[str], PropertyInfo(alias="blocklistWordlistIds")]
"""IDs of wordlists whose entries are treated as blocked URL domains.

Matches short-circuit the risk model and are always flagged. Blocklists take
precedence over allowlists.
"""

threshold: float


Expand Down
49 changes: 49 additions & 0 deletions src/moderation_api/types/content_submit_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@
"PolicyClassifierOutputLabel",
"PolicyEntityMatcherOutput",
"PolicyEntityMatcherOutputMatch",
"PolicyEntityMatcherOutputMatchSignals",
"PolicyEntityMatcherOutputMatchSignalsBrandImpersonation",
"Recommendation",
"Error",
]
Expand Down Expand Up @@ -245,13 +247,60 @@ class PolicyClassifierOutput(BaseModel):
labels: Optional[List[PolicyClassifierOutputLabel]] = None


class PolicyEntityMatcherOutputMatchSignalsBrandImpersonation(BaseModel):
brand: str

method: Literal["registered_domain_token", "subdomain_token"]


class PolicyEntityMatcherOutputMatchSignals(BaseModel):
"""Observable properties of a URL (URL Risk only).

Absent for allow/block list matches.
"""

bot_protection: Optional[bool] = None

brand_impersonation: Optional[PolicyEntityMatcherOutputMatchSignalsBrandImpersonation] = None

domain_age_days: Optional[int] = None

final_url: Optional[str] = None

has_email_setup: Optional[bool] = None

has_suspicious_characters: bool

is_link_shortener: bool

is_reported: bool

redirect_count: Optional[int] = None


class PolicyEntityMatcherOutputMatch(BaseModel):
match: str

probability: float

span: List[int]

entity_type: Optional[str] = None
"""Sub-type of the entity match — e.g.

the NER key (email, phone, name, …) for PII matches. Absent for URL Risk and
wordlist matches where the type is already encoded in the parent label.
"""

reasons: Optional[List[str]] = None
"""Stable codes explaining why a URL was flagged (URL Risk only)."""

signals: Optional[PolicyEntityMatcherOutputMatchSignals] = None
"""Observable properties of a URL (URL Risk only).

Absent for allow/block list matches.
"""


class PolicyEntityMatcherOutput(BaseModel):
"""Entity matcher policy."""
Expand Down
58 changes: 0 additions & 58 deletions tests/test_deepcopy.py

This file was deleted.

Loading
Loading