Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
de87222
codegen metadata
stainless-app[bot] Apr 17, 2026
3c25919
codegen metadata
stainless-app[bot] Apr 17, 2026
87fe899
perf(client): optimize file structure copying in multipart requests
stainless-app[bot] Apr 17, 2026
2f66372
codegen metadata
stainless-app[bot] Apr 17, 2026
91cf36a
codegen metadata
stainless-app[bot] Apr 18, 2026
3594da6
codegen metadata
stainless-app[bot] Apr 18, 2026
f288a1e
codegen metadata
stainless-app[bot] Apr 18, 2026
ecc5552
codegen metadata
stainless-app[bot] Apr 18, 2026
48fab18
codegen metadata
stainless-app[bot] Apr 18, 2026
b67b84c
codegen metadata
stainless-app[bot] Apr 18, 2026
406a2d1
codegen metadata
stainless-app[bot] Apr 18, 2026
7fa7e89
codegen metadata
stainless-app[bot] Apr 18, 2026
3552087
codegen metadata
stainless-app[bot] Apr 18, 2026
9fc867e
codegen metadata
stainless-app[bot] Apr 18, 2026
1d5dd6b
codegen metadata
stainless-app[bot] Apr 18, 2026
28db15d
codegen metadata
stainless-app[bot] Apr 18, 2026
86767d8
codegen metadata
stainless-app[bot] Apr 18, 2026
8925679
codegen metadata
stainless-app[bot] Apr 18, 2026
f8be558
codegen metadata
stainless-app[bot] Apr 18, 2026
8ce580f
codegen metadata
stainless-app[bot] Apr 18, 2026
c44d7f5
codegen metadata
stainless-app[bot] Apr 19, 2026
89bc853
codegen metadata
stainless-app[bot] Apr 19, 2026
99a4ca2
codegen metadata
stainless-app[bot] Apr 19, 2026
6022bf0
codegen metadata
stainless-app[bot] Apr 19, 2026
84388f9
codegen metadata
stainless-app[bot] Apr 19, 2026
60a33a2
codegen metadata
stainless-app[bot] Apr 19, 2026
daf22e1
codegen metadata
stainless-app[bot] Apr 19, 2026
56f40dd
codegen metadata
stainless-app[bot] Apr 19, 2026
765697c
codegen metadata
stainless-app[bot] Apr 19, 2026
b6281e8
codegen metadata
stainless-app[bot] Apr 19, 2026
45d11c2
codegen metadata
stainless-app[bot] Apr 20, 2026
d6f2b82
codegen metadata
stainless-app[bot] Apr 20, 2026
a3be852
codegen metadata
stainless-app[bot] Apr 20, 2026
ee329ec
codegen metadata
stainless-app[bot] Apr 20, 2026
b56ab8f
codegen metadata
stainless-app[bot] Apr 20, 2026
ae4dddb
codegen metadata
stainless-app[bot] Apr 20, 2026
3c6b2a0
codegen metadata
stainless-app[bot] Apr 20, 2026
b2f9ab4
codegen metadata
stainless-app[bot] Apr 20, 2026
b84f8ed
codegen metadata
stainless-app[bot] Apr 21, 2026
18d352f
codegen metadata
stainless-app[bot] Apr 21, 2026
dda9bdd
codegen metadata
stainless-app[bot] Apr 21, 2026
d5b9945
feat(api): api update
stainless-app[bot] Apr 21, 2026
b59d6d8
fix(adk): fix to queue drain (#327)
levilentz Apr 21, 2026
567984c
codegen metadata
stainless-app[bot] Apr 21, 2026
0a120c7
Add task_id to span creation (#329)
declan-scale Apr 21, 2026
9c7a772
release: 0.10.2
stainless-app[bot] Apr 21, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.10.1"
".": "0.10.2"
}
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 45
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/sgp%2Fagentex-sdk-636aa63c588134e6f47fc45212049593d91f810a9c7bd8d7a57810cf1b5ffc92.yml
openapi_spec_hash: c76a42d4510aeafd896bc0d596f17af4
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/sgp%2Fagentex-sdk-eeb5bf63b18d948611eec48d0225e9bba63b170f64eeeb35d91825724b7cf6c3.yml
openapi_spec_hash: 5bbd18a405a11e8497d38a5a88b98018
config_hash: fb079ef7936611b032568661b8165f19
18 changes: 18 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,23 @@
# Changelog

## 0.10.2 (2026-04-21)

Full Changelog: [v0.10.1...v0.10.2](https://github.com/scaleapi/scale-agentex-python/compare/v0.10.1...v0.10.2)

### Features

* **api:** api update ([d5b9945](https://github.com/scaleapi/scale-agentex-python/commit/d5b99455c248a629bb2c56a2b5daf192d9f70db8))


### Bug Fixes

* **adk:** fix to queue drain ([#327](https://github.com/scaleapi/scale-agentex-python/issues/327)) ([b59d6d8](https://github.com/scaleapi/scale-agentex-python/commit/b59d6d8b59cec9548ec468cae3827d785c9f86f7))


### Performance Improvements

* **client:** optimize file structure copying in multipart requests ([87fe899](https://github.com/scaleapi/scale-agentex-python/commit/87fe899713a2ec88f1c32b347a7d5c78124aaf56))

## 0.10.1 (2026-04-17)

Full Changelog: [v0.10.0...v0.10.1](https://github.com/scaleapi/scale-agentex-python/compare/v0.10.0...v0.10.1)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "agentex-sdk"
version = "0.10.1"
version = "0.10.2"
description = "The official Python library for the agentex API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand Down
56 changes: 53 additions & 3 deletions src/agentex/_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import io
import os
import pathlib
from typing import overload
from typing_extensions import TypeGuard
from typing import Sequence, cast, overload
from typing_extensions import TypeVar, TypeGuard

import anyio

Expand All @@ -17,7 +17,9 @@
HttpxFileContent,
HttpxRequestFiles,
)
from ._utils import is_tuple_t, is_mapping_t, is_sequence_t
from ._utils import is_list, is_mapping, is_tuple_t, is_mapping_t, is_sequence_t

_T = TypeVar("_T")


def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]:
Expand Down Expand Up @@ -121,3 +123,51 @@ async def async_read_file_content(file: FileContent) -> HttpxFileContent:
return await anyio.Path(file).read_bytes()

return file


def deepcopy_with_paths(item: _T, paths: Sequence[Sequence[str]]) -> _T:
"""Copy only the containers along the given paths.

Used to guard against mutation by extract_files without copying the entire structure.
Only dicts and lists that lie on a path are copied; everything else
is returned by reference.

For example, given paths=[["foo", "files", "file"]] and the structure:
{
"foo": {
"bar": {"baz": {}},
"files": {"file": <content>}
}
}
The root dict, "foo", and "files" are copied (they lie on the path).
"bar" and "baz" are returned by reference (off the path).
"""
return _deepcopy_with_paths(item, paths, 0)


def _deepcopy_with_paths(item: _T, paths: Sequence[Sequence[str]], index: int) -> _T:
if not paths:
return item
if is_mapping(item):
key_to_paths: dict[str, list[Sequence[str]]] = {}
for path in paths:
if index < len(path):
key_to_paths.setdefault(path[index], []).append(path)

# if no path continues through this mapping, it won't be mutated and copying it is redundant
if not key_to_paths:
return item

result = dict(item)
for key, subpaths in key_to_paths.items():
if key in result:
result[key] = _deepcopy_with_paths(result[key], subpaths, index + 1)
return cast(_T, result)
if is_list(item):
array_paths = [path for path in paths if index < len(path) and path[index] == "<array>"]

# if no path expects a list here, nothing will be mutated inside it - return by reference
if not array_paths:
return cast(_T, item)
return cast(_T, [_deepcopy_with_paths(entry, array_paths, index + 1) for entry in item])
return item
1 change: 0 additions & 1 deletion src/agentex/_utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
coerce_integer as coerce_integer,
file_from_path as file_from_path,
strip_not_given as strip_not_given,
deepcopy_minimal as deepcopy_minimal,
get_async_library as get_async_library,
maybe_coerce_float as maybe_coerce_float,
get_required_header as get_required_header,
Expand Down
15 changes: 0 additions & 15 deletions src/agentex/_utils/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,21 +177,6 @@ def is_iterable(obj: object) -> TypeGuard[Iterable[object]]:
return isinstance(obj, Iterable)


def deepcopy_minimal(item: _T) -> _T:
"""Minimal reimplementation of copy.deepcopy() that will only copy certain object types:

- mappings, e.g. `dict`
- list

This is done for performance reasons.
"""
if is_mapping(item):
return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()})
if is_list(item):
return cast(_T, [deepcopy_minimal(entry) for entry in item])
return item


# copied from https://github.com/Rapptz/RoboDanny
def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str:
size = len(seq)
Expand Down
2 changes: 1 addition & 1 deletion src/agentex/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "agentex"
__version__ = "0.10.1" # x-release-please-version
__version__ = "0.10.2" # x-release-please-version
11 changes: 8 additions & 3 deletions src/agentex/lib/adk/_modules/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,7 @@ def _tracing_service(self) -> TracingService:
# Re-create the underlying httpx client when the event loop changes
# (e.g. between HTTP requests in a sync ASGI server) to avoid
# "Event loop is closed" / "bound to a different event loop" errors.
if self._tracing_service_lazy is None or (
loop_id is not None and loop_id != self._bound_loop_id
):
if self._tracing_service_lazy is None or (loop_id is not None and loop_id != self._bound_loop_id):
import httpx

# Disable keepalive so each span HTTP call gets a fresh TCP
Expand All @@ -93,6 +91,7 @@ async def span(
input: list[Any] | dict[str, Any] | BaseModel | None = None,
data: list[Any] | dict[str, Any] | BaseModel | None = None,
parent_id: str | None = None,
task_id: str | None = None,
start_to_close_timeout: timedelta = timedelta(seconds=5),
heartbeat_timeout: timedelta = timedelta(seconds=5),
retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
Expand All @@ -109,6 +108,7 @@ async def span(
input (Union[List, Dict, BaseModel]): The input for the span.
parent_id (Optional[str]): The parent span ID for the span.
data (Optional[Union[List, Dict, BaseModel]]): The data for the span.
task_id (Optional[str]): The task ID this span belongs to.
start_to_close_timeout (timedelta): The start to close timeout for the span.
heartbeat_timeout (timedelta): The heartbeat timeout for the span.
retry_policy (RetryPolicy): The retry policy for the span.
Expand All @@ -126,6 +126,7 @@ async def span(
input=input,
parent_id=parent_id,
data=data,
task_id=task_id,
start_to_close_timeout=start_to_close_timeout,
heartbeat_timeout=heartbeat_timeout,
retry_policy=retry_policy,
Expand All @@ -149,6 +150,7 @@ async def start_span(
input: list[Any] | dict[str, Any] | BaseModel | None = None,
parent_id: str | None = None,
data: list[Any] | dict[str, Any] | BaseModel | None = None,
task_id: str | None = None,
start_to_close_timeout: timedelta = timedelta(seconds=5),
heartbeat_timeout: timedelta = timedelta(seconds=1),
retry_policy: RetryPolicy = DEFAULT_RETRY_POLICY,
Expand All @@ -162,6 +164,7 @@ async def start_span(
input (Union[List, Dict, BaseModel]): The input for the span.
parent_id (Optional[str]): The parent span ID for the span.
data (Optional[Union[List, Dict, BaseModel]]): The data for the span.
task_id (Optional[str]): The task ID this span belongs to.
start_to_close_timeout (timedelta): The start to close timeout for the span.
heartbeat_timeout (timedelta): The heartbeat timeout for the span.
retry_policy (RetryPolicy): The retry policy for the span.
Expand All @@ -175,6 +178,7 @@ async def start_span(
name=name,
input=input,
data=data,
task_id=task_id,
)
if in_temporal_workflow():
return await ActivityHelpers.execute_activity(
Expand All @@ -192,6 +196,7 @@ async def start_span(
input=input,
parent_id=parent_id,
data=data,
task_id=task_id,
)

async def end_span(
Expand Down
2 changes: 2 additions & 0 deletions src/agentex/lib/core/services/adk/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,15 @@ async def start_span(
parent_id: str | None = None,
input: list[Any] | dict[str, Any] | BaseModel | None = None,
data: list[Any] | dict[str, Any] | BaseModel | None = None,
task_id: str | None = None,
) -> Span | None:
trace = self._tracer.trace(trace_id)
span = await trace.start_span(
name=name,
parent_id=parent_id,
input=input or {},
data=data,
task_id=task_id,
)
heartbeat_if_in_workflow("start span")
return span
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class StartSpanParams(BaseModel):
name: str
input: list[Any] | dict[str, Any] | BaseModel | None = None
data: list[Any] | dict[str, Any] | BaseModel | None = None
task_id: str | None = None


class EndSpanParams(BaseModel):
Expand All @@ -47,6 +48,7 @@ async def start_span(self, params: StartSpanParams) -> Span | None:
name=params.name,
input=params.input,
data=params.data,
task_id=params.task_id,
)

@activity.defn(name=TracingActivityName.END_SPAN)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,9 @@ def __init__(self, config: AgentexTracingProcessorConfig): # noqa: ARG002
),
)

# TODO(AGX1-199): Add batch create/update endpoints to Agentex API and use
# them here instead of one HTTP call per span.
# https://linear.app/scale-epd/issue/AGX1-199/add-agentex-batch-endpoint-for-traces
@override
async def on_span_start(self, span: Span) -> None:
await self.client.spans.create(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,9 @@ async def on_span_start(self, span: Span) -> None:
if self.disabled:
logger.warning("SGP is disabled, skipping span upsert")
return
# TODO(AGX1-198): Batch multiple spans into a single upsert_batch call
# instead of one span per HTTP request.
# https://linear.app/scale-epd/issue/AGX1-198/actually-use-sgp-batching-for-spans
await self.sgp_async_client.spans.upsert_batch( # type: ignore[union-attr]
items=[sgp_span.to_request_params()]
)
Expand All @@ -155,6 +158,7 @@ async def on_span_end(self, span: Span) -> None:
return

self._add_source_to_span(span)
sgp_span.input = span.input # type: ignore[assignment]
sgp_span.output = span.output # type: ignore[assignment]
sgp_span.metadata = span.data # type: ignore[assignment]
sgp_span.end_time = span.end_time.isoformat() # type: ignore[union-attr]
Expand Down
63 changes: 55 additions & 8 deletions src/agentex/lib/core/tracing/span_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@

logger = make_logger(__name__)

_DEFAULT_BATCH_SIZE = 50


class SpanEventType(str, Enum):
START = "start"
Expand All @@ -28,15 +30,18 @@ class _SpanQueueItem:
class AsyncSpanQueue:
"""Background FIFO queue for async span processing.

Span events are enqueued synchronously (non-blocking) and processed
sequentially by a background drain task. This keeps tracing HTTP calls
off the critical request path while preserving start-before-end ordering.
Span events are enqueued synchronously (non-blocking) and drained by a
background task. Items are processed in batches: all START events in a
batch are flushed concurrently, then all END events, so that per-span
start-before-end ordering is preserved while HTTP calls for independent
spans execute in parallel.
"""

def __init__(self) -> None:
def __init__(self, batch_size: int = _DEFAULT_BATCH_SIZE) -> None:
self._queue: asyncio.Queue[_SpanQueueItem] = asyncio.Queue()
self._drain_task: asyncio.Task[None] | None = None
self._stopping = False
self._batch_size = batch_size

def enqueue(
self,
Expand All @@ -54,9 +59,45 @@ def _ensure_drain_running(self) -> None:
if self._drain_task is None or self._drain_task.done():
self._drain_task = asyncio.create_task(self._drain_loop())

# ------------------------------------------------------------------
# Drain loop
# ------------------------------------------------------------------

async def _drain_loop(self) -> None:
while True:
item = await self._queue.get()
# Block until at least one item is available.
first = await self._queue.get()
batch: list[_SpanQueueItem] = [first]

# Opportunistically grab more ready items (non-blocking).
while len(batch) < self._batch_size:
try:
batch.append(self._queue.get_nowait())
except asyncio.QueueEmpty:
break

try:
# Separate START and END events. Processing all STARTs before
# ENDs ensures that on_span_start completes before on_span_end
# for any span whose both events land in the same batch.
starts = [i for i in batch if i.event_type == SpanEventType.START]
ends = [i for i in batch if i.event_type == SpanEventType.END]

if starts:
await self._process_items(starts)
if ends:
await self._process_items(ends)
finally:
for _ in batch:
self._queue.task_done()
# Release span data for GC.
batch.clear()

@staticmethod
async def _process_items(items: list[_SpanQueueItem]) -> None:
"""Process a list of span events concurrently."""

async def _handle(item: _SpanQueueItem) -> None:
try:
if item.event_type == SpanEventType.START:
coros = [p.on_span_start(item.span) for p in item.processors]
Expand All @@ -72,9 +113,15 @@ async def _drain_loop(self) -> None:
exc_info=result,
)
except Exception:
logger.exception("Unexpected error in span queue drain loop for span %s", item.span.id)
finally:
self._queue.task_done()
logger.exception(
"Unexpected error in span queue for span %s", item.span.id
)

await asyncio.gather(*[_handle(item) for item in items])

# ------------------------------------------------------------------
# Shutdown
# ------------------------------------------------------------------

async def shutdown(self, timeout: float = 30.0) -> None:
self._stopping = True
Expand Down
Loading
Loading