Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions livekit-agents/livekit/agents/voice/agent_activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,6 +326,7 @@ def endpointing_opts(self) -> EndpointingOptions:
mode=agent_endpointing.get("mode", session_endpointing["mode"]),
min_delay=agent_endpointing.get("min_delay", session_endpointing["min_delay"]),
max_delay=agent_endpointing.get("max_delay", session_endpointing["max_delay"]),
alpha=agent_endpointing.get("alpha", session_endpointing["alpha"]),
)

@property
Expand Down Expand Up @@ -450,6 +451,7 @@ def update_options(
max_delay=max_endpointing_delay
if is_given(max_endpointing_delay)
else self.endpointing_opts["max_delay"],
alpha=self.endpointing_opts["alpha"],
)

if utils.is_given(tool_choice):
Expand Down
2 changes: 2 additions & 0 deletions livekit-agents/livekit/agents/voice/agent_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -1058,6 +1058,8 @@ def update_options(
self._opts.endpointing["min_delay"] = min_delay
if (max_delay := endpointing_opts.get("max_delay")) is not None:
self._opts.endpointing["max_delay"] = max_delay
if (alpha := endpointing_opts.get("alpha")) is not None:
self._opts.endpointing["alpha"] = alpha

if is_given(turn_detection):
self._turn_detection = turn_detection
Expand Down
6 changes: 6 additions & 0 deletions livekit-agents/livekit/agents/voice/endpointing.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,7 @@ def update_options(
*,
min_delay: NotGivenOr[float] = NOT_GIVEN,
max_delay: NotGivenOr[float] = NOT_GIVEN,
alpha: NotGivenOr[float] = NOT_GIVEN,
) -> None:
if is_given(min_delay):
self._min_delay = min_delay
Expand All @@ -301,13 +302,18 @@ def update_options(
self._turn_pause.reset(initial=self._max_delay, max_val=self._max_delay)
self._utterance_pause.reset(max_val=self._max_delay)

if is_given(alpha):
self._utterance_pause.reset(alpha=alpha)
self._turn_pause.reset(alpha=alpha)


def create_endpointing(options: EndpointingOptions) -> BaseEndpointing:
match options.get("mode", "fixed"):
case "dynamic":
return DynamicEndpointing(
min_delay=options["min_delay"],
max_delay=options["max_delay"],
alpha=options["alpha"],
)
case _:
return BaseEndpointing(
Expand Down
5 changes: 5 additions & 0 deletions livekit-agents/livekit/agents/voice/turn.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,12 +60,17 @@ class EndpointingOptions(TypedDict, total=False):
max_delay: float
"""Maximum time (s) the agent waits before terminating the turn.
Defaults to ``3.0``."""
alpha: float
"""Exponential moving average coefficient for dynamic endpointing.
The higher the value, the more weight is given to the history.
Defaults to ``0.9``. Only applies when mode is ``dynamic``."""
Comment thread
devin-ai-integration[bot] marked this conversation as resolved.


_ENDPOINTING_DEFAULTS: EndpointingOptions = {
"mode": "fixed",
"min_delay": 0.5,
"max_delay": 3.0,
"alpha": 0.9,
}


Expand Down
1 change: 1 addition & 0 deletions makefile
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ unit-tests:
tests/test_interruption/test_overlapping_speech_event.py \
tests/test_tool_search.py \
tests/test_tool_proxy.py \
tests/test_endpointing.py \
tests/test_session_host.py

# ============================================
Expand Down
38 changes: 37 additions & 1 deletion tests/test_endpointing.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pytest

from livekit.agents.utils.exp_filter import ExpFilter
from livekit.agents.voice.endpointing import DynamicEndpointing
from livekit.agents.voice.endpointing import DynamicEndpointing, create_endpointing


class TestExponentialMovingAverage:
Expand Down Expand Up @@ -270,6 +270,23 @@ def test_update_options_preserves_filter_alpha(self) -> None:
assert ep._utterance_pause._alpha == pytest.approx(0.5, rel=1e-5)
assert ep._turn_pause._alpha == pytest.approx(0.5, rel=1e-5)

def test_update_options_updates_alpha_in_place(self) -> None:
"""update_options(alpha=...) should update both EMA filters without resetting learned state."""
ep = DynamicEndpointing(min_delay=0.3, max_delay=1.0, alpha=0.5)

# Record some history so the filter has a non-initial value.
ep.on_end_of_speech(ended_at=100.0)
ep.on_start_of_speech(started_at=100.2)
ep.on_end_of_speech(ended_at=101.0)
learned_min = ep.min_delay

ep.update_options(alpha=0.2)

assert ep._utterance_pause._alpha == pytest.approx(0.2, rel=1e-5)
assert ep._turn_pause._alpha == pytest.approx(0.2, rel=1e-5)
# Learned value should be preserved — only the coefficient changed.
assert ep.min_delay == pytest.approx(learned_min, rel=1e-5)

def test_update_options_updates_filter_clamp_bounds(self) -> None:
"""Changing delays should propagate into exp-filter min/max clamp limits."""
ep = DynamicEndpointing(min_delay=0.3, max_delay=1.0, alpha=0.5)
Expand Down Expand Up @@ -543,3 +560,22 @@ def test_full_conversation_sequence(self) -> None:
# so between_utterance_delay = 0 → no update
assert ep._speaking is False
assert ep._agent_speech_started_at is None


class TestCreateEndpointing:
def test_dynamic_mode_wires_alpha(self) -> None:
"""create_endpointing should pass alpha through to both EMA filters."""
ep = create_endpointing(
{"mode": "dynamic", "min_delay": 0.3, "max_delay": 1.0, "alpha": 0.7}
)

assert isinstance(ep, DynamicEndpointing)
assert ep._utterance_pause._alpha == pytest.approx(0.7, rel=1e-5)
assert ep._turn_pause._alpha == pytest.approx(0.7, rel=1e-5)

def test_fixed_mode_returns_base_endpointing(self) -> None:
ep = create_endpointing({"mode": "fixed", "min_delay": 0.5, "max_delay": 3.0, "alpha": 0.9})

assert not isinstance(ep, DynamicEndpointing)
assert ep.min_delay == 0.5
assert ep.max_delay == 3.0
1 change: 0 additions & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading