From 09c5ca7c87db4fea0ea1995a7d2e2affbb107b5a Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Thu, 25 Sep 2025 15:26:51 +0200 Subject: [PATCH 01/13] feat(integrations): add litellm integration --- sentry_sdk/integrations/__init__.py | 1 + sentry_sdk/integrations/litellm.py | 281 ++++++++++++++++++++++++++++ 2 files changed, 282 insertions(+) create mode 100644 sentry_sdk/integrations/litellm.py diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index e397c9986a..40530da0db 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -97,6 +97,7 @@ def iter_default_integrations(with_auto_enabling_integrations): "sentry_sdk.integrations.langchain.LangchainIntegration", "sentry_sdk.integrations.langgraph.LanggraphIntegration", "sentry_sdk.integrations.litestar.LitestarIntegration", + "sentry_sdk.integrations.litellm.LiteLLMIntegration", "sentry_sdk.integrations.loguru.LoguruIntegration", "sentry_sdk.integrations.openai.OpenAIIntegration", "sentry_sdk.integrations.pymongo.PyMongoIntegration", diff --git a/sentry_sdk/integrations/litellm.py b/sentry_sdk/integrations/litellm.py new file mode 100644 index 0000000000..81062737bf --- /dev/null +++ b/sentry_sdk/integrations/litellm.py @@ -0,0 +1,281 @@ +from typing import TYPE_CHECKING + +import sentry_sdk +from sentry_sdk import consts +from sentry_sdk.ai.monitoring import record_token_usage +from sentry_sdk.ai.utils import get_start_span_function, set_data_normalized +from sentry_sdk.consts import SPANDATA +from sentry_sdk.integrations import DidNotEnable, Integration +from sentry_sdk.scope import should_send_default_pii +from sentry_sdk.utils import event_from_exception + +if TYPE_CHECKING: + from typing import Any, Dict + from datetime import datetime + +try: + import litellm +except ImportError: + raise DidNotEnable("LiteLLM not installed") + + +def _get_provider_from_model(model): + # type: (str) -> str + """Extract provider name from model string using LiteLLM's logic""" + if not model: + return "unknown" + + # Common provider prefixes/patterns + if model.startswith("gpt-") or model.startswith("o1-") or "openai/" in model: + return "openai" + elif model.startswith("claude-") or "anthropic/" in model: + return "anthropic" + elif ( + model.startswith("gemini-") + or "google/" in model + or model.startswith("vertex_ai/") + ): + return "google" + elif "cohere/" in model or model.startswith("command-"): + return "cohere" + elif "azure/" in model: + return "azure" + elif "bedrock/" in model: + return "bedrock" + elif "ollama/" in model: + return "ollama" + else: + # Try to use LiteLLM's internal provider detection if available + try: + if hasattr(litellm, "get_llm_provider"): + provider_info = litellm.get_llm_provider(model) + if isinstance(provider_info, tuple) and len(provider_info) > 1: + return provider_info[1] or "unknown" + return "unknown" + except Exception: + return "unknown" + + +def _input_callback( + kwargs, # type: Dict[str, Any] +): + # type: (...) -> None + """Handle the start of a request.""" + integration = sentry_sdk.get_client().get_integration(LiteLLMIntegration) + + if integration is None: + return + + # Get key parameters + model = kwargs.get("model", "") + messages = kwargs.get("messages", []) + operation = "chat" if messages else "embeddings" + + # Start a new span/transaction + span = get_start_span_function()( + op=( + consts.OP.GEN_AI_CHAT + if operation == "chat" + else consts.OP.GEN_AI_EMBEDDINGS + ), + name=f"{operation} {model}", + origin=LiteLLMIntegration.origin, + ) + span.__enter__() + + # Store span for later + kwargs["_sentry_span"] = span + + # Set basic data + set_data_normalized(span, SPANDATA.GEN_AI_SYSTEM, "litellm") + set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, operation) + set_data_normalized( + span, "gen_ai.litellm.provider", _get_provider_from_model(model) + ) + + # Record messages if allowed + if messages and should_send_default_pii() and integration.include_prompts: + set_data_normalized( + span, SPANDATA.GEN_AI_REQUEST_MESSAGES, messages, unpack=False + ) + + # Record other parameters + params = { + "model": SPANDATA.GEN_AI_REQUEST_MODEL, + "stream": SPANDATA.GEN_AI_RESPONSE_STREAMING, + "max_tokens": SPANDATA.GEN_AI_REQUEST_MAX_TOKENS, + "presence_penalty": SPANDATA.GEN_AI_REQUEST_PRESENCE_PENALTY, + "frequency_penalty": SPANDATA.GEN_AI_REQUEST_FREQUENCY_PENALTY, + "temperature": SPANDATA.GEN_AI_REQUEST_TEMPERATURE, + "top_p": SPANDATA.GEN_AI_REQUEST_TOP_P, + } + for key, attribute in params.items(): + value = kwargs.get(key) + if value is not None: + set_data_normalized(span, attribute, value) + + # Record LiteLLM-specific parameters + litellm_params = { + "api_base": kwargs.get("api_base"), + "api_version": kwargs.get("api_version"), + "custom_llm_provider": kwargs.get("custom_llm_provider"), + } + for key, value in litellm_params.items(): + if value is not None: + set_data_normalized(span, f"gen_ai.litellm.{key}", value) + + +def _success_callback( + kwargs, # type: Dict[str, Any] + completion_response, # type: Any + start_time, # type: datetime + end_time, # type: datetime +): + # type: (...) -> None + """Handle successful completion.""" + + span = kwargs.get("_sentry_span") + if span is None: + return + + integration = sentry_sdk.get_client().get_integration(LiteLLMIntegration) + if integration is None: + return + + try: + # Record model information + if hasattr(completion_response, "model"): + set_data_normalized( + span, SPANDATA.GEN_AI_RESPONSE_MODEL, completion_response.model + ) + + # Record response content if allowed + if should_send_default_pii() and integration.include_prompts: + if hasattr(completion_response, "choices"): + response_messages = [] + for choice in completion_response.choices: + if hasattr(choice, "message"): + if hasattr(choice.message, "model_dump"): + response_messages.append(choice.message.model_dump()) + elif hasattr(choice.message, "dict"): + response_messages.append(choice.message.dict()) + else: + # Fallback for basic message objects + msg = {} + if hasattr(choice.message, "role"): + msg["role"] = choice.message.role + if hasattr(choice.message, "content"): + msg["content"] = choice.message.content + if hasattr(choice.message, "tool_calls"): + msg["tool_calls"] = choice.message.tool_calls + response_messages.append(msg) + + if response_messages: + set_data_normalized( + span, SPANDATA.GEN_AI_RESPONSE_TEXT, response_messages + ) + + # Record token usage + if hasattr(completion_response, "usage"): + usage = completion_response.usage + record_token_usage( + span, + input_tokens=getattr(usage, "prompt_tokens", None), + output_tokens=getattr(usage, "completion_tokens", None), + total_tokens=getattr(usage, "total_tokens", None), + ) + + finally: + # Always finish the span and clean up + span.__exit__(None, None, None) + + +def _failure_callback( + kwargs, # type: Dict[str, Any] + exception, # type: Exception + start_time, # type: datetime + end_time, # type: datetime +): + # type: (...) -> None + """Handle request failure.""" + span = kwargs.get("_sentry_span") + + try: + # Capture the exception + event, hint = event_from_exception( + exception, + client_options=sentry_sdk.get_client().options, + mechanism={"type": "litellm", "handled": False}, + ) + sentry_sdk.capture_event(event, hint=hint) + finally: + # Always finish the span and clean up + span.__exit__(None, None, None) + + +class LiteLLMIntegration(Integration): + """ + LiteLLM integration for Sentry. + + This integration automatically captures LiteLLM API calls and sends them to Sentry + for monitoring and error tracking. It supports all 100+ LLM providers that LiteLLM + supports, including OpenAI, Anthropic, Google, Cohere, and many others. + + Features: + - Automatic exception capture for all LiteLLM calls + - Token usage tracking across all providers + - Provider detection and attribution + - Input/output message capture (configurable) + - Streaming response support + - Cost tracking integration + + Usage: + + ```python + import litellm + import sentry_sdk + + # Initialize Sentry with the LiteLLM integration + sentry_sdk.init( + dsn="your-dsn", + integrations=[ + sentry_sdk.integrations.LiteLLMIntegration( + include_prompts=True # Set to False to exclude message content + ) + ] + ) + + # All LiteLLM calls will now be monitored + response = litellm.completion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Hello!"}] + ) + ``` + + Configuration: + - include_prompts (bool): Whether to include prompts and responses in spans. + Defaults to True. Set to False to exclude potentially sensitive data. + """ + + identifier = "litellm" + origin = f"auto.ai.{identifier}" + + def __init__(self, include_prompts=True): + # type: (LiteLLMIntegration, bool) -> None + self.include_prompts = include_prompts + + @staticmethod + def setup_once(): + # type: () -> None + """Set up LiteLLM callbacks for monitoring.""" + litellm.input_callback = litellm.input_callback or [] + if _input_callback not in litellm.input_callback: + litellm.input_callback.append(_input_callback) + + litellm.success_callback = litellm.success_callback or [] + if _success_callback not in litellm.success_callback: + litellm.success_callback.append(_success_callback) + + litellm.failure_callback = litellm.failure_callback or [] + if _failure_callback not in litellm.failure_callback: + litellm.failure_callback.append(_failure_callback) From 1f85e016b51757faacc9588f7e4cfebe84e85f7c Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Thu, 25 Sep 2025 15:34:27 +0200 Subject: [PATCH 02/13] fix(integrations): early redurn when we don't have a span in failure_callback --- sentry_sdk/integrations/litellm.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sentry_sdk/integrations/litellm.py b/sentry_sdk/integrations/litellm.py index 81062737bf..07382084ac 100644 --- a/sentry_sdk/integrations/litellm.py +++ b/sentry_sdk/integrations/litellm.py @@ -199,6 +199,8 @@ def _failure_callback( # type: (...) -> None """Handle request failure.""" span = kwargs.get("_sentry_span") + if span is None: + return try: # Capture the exception From 1458c760d823996ef6d14e5bb063ebfe91bb1267 Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Tue, 30 Sep 2025 11:44:16 +0200 Subject: [PATCH 03/13] fix(litellm): store current span under `litellm_params.metadata` --- sentry_sdk/integrations/litellm.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/integrations/litellm.py b/sentry_sdk/integrations/litellm.py index 07382084ac..538cb9d2e6 100644 --- a/sentry_sdk/integrations/litellm.py +++ b/sentry_sdk/integrations/litellm.py @@ -56,6 +56,12 @@ def _get_provider_from_model(model): return "unknown" +def _get_metadata_dict(kwargs): + # type: (Dict[str, Any]) -> Dict[str, Any] + """Get the metadata dictionary from the kwargs.""" + return kwargs.setdefault("litellm_params", {}).setdefault("metadata", {}) + + def _input_callback( kwargs, # type: Dict[str, Any] ): @@ -84,7 +90,7 @@ def _input_callback( span.__enter__() # Store span for later - kwargs["_sentry_span"] = span + _get_metadata_dict(kwargs)["_sentry_span"] = span # Set basic data set_data_normalized(span, SPANDATA.GEN_AI_SYSTEM, "litellm") @@ -134,7 +140,7 @@ def _success_callback( # type: (...) -> None """Handle successful completion.""" - span = kwargs.get("_sentry_span") + span = _get_metadata_dict(kwargs).get("_sentry_span") if span is None: return @@ -198,7 +204,7 @@ def _failure_callback( ): # type: (...) -> None """Handle request failure.""" - span = kwargs.get("_sentry_span") + span = _get_metadata_dict(kwargs).get("_sentry_span") if span is None: return @@ -240,6 +246,7 @@ class LiteLLMIntegration(Integration): # Initialize Sentry with the LiteLLM integration sentry_sdk.init( dsn="your-dsn", + send_default_pii=True integrations=[ sentry_sdk.integrations.LiteLLMIntegration( include_prompts=True # Set to False to exclude message content From fea80090461f2a07ea789d98d56371c4d70d6349 Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Tue, 30 Sep 2025 14:09:28 +0200 Subject: [PATCH 04/13] fix(litellm): removing LiteLLM from default enabled integration list --- sentry_sdk/integrations/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index 40530da0db..e397c9986a 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -97,7 +97,6 @@ def iter_default_integrations(with_auto_enabling_integrations): "sentry_sdk.integrations.langchain.LangchainIntegration", "sentry_sdk.integrations.langgraph.LanggraphIntegration", "sentry_sdk.integrations.litestar.LitestarIntegration", - "sentry_sdk.integrations.litellm.LiteLLMIntegration", "sentry_sdk.integrations.loguru.LoguruIntegration", "sentry_sdk.integrations.openai.OpenAIIntegration", "sentry_sdk.integrations.pymongo.PyMongoIntegration", From 2b8a00744257a79ecabb3cfb69da22b115ff8b6f Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Tue, 30 Sep 2025 14:09:59 +0200 Subject: [PATCH 05/13] fix(litellm): simplify model/provider extraction and improve metadata handling --- sentry_sdk/integrations/litellm.py | 83 ++++++++---------------------- 1 file changed, 22 insertions(+), 61 deletions(-) diff --git a/sentry_sdk/integrations/litellm.py b/sentry_sdk/integrations/litellm.py index 538cb9d2e6..7b2a483927 100644 --- a/sentry_sdk/integrations/litellm.py +++ b/sentry_sdk/integrations/litellm.py @@ -19,53 +19,21 @@ raise DidNotEnable("LiteLLM not installed") -def _get_provider_from_model(model): - # type: (str) -> str - """Extract provider name from model string using LiteLLM's logic""" - if not model: - return "unknown" - - # Common provider prefixes/patterns - if model.startswith("gpt-") or model.startswith("o1-") or "openai/" in model: - return "openai" - elif model.startswith("claude-") or "anthropic/" in model: - return "anthropic" - elif ( - model.startswith("gemini-") - or "google/" in model - or model.startswith("vertex_ai/") - ): - return "google" - elif "cohere/" in model or model.startswith("command-"): - return "cohere" - elif "azure/" in model: - return "azure" - elif "bedrock/" in model: - return "bedrock" - elif "ollama/" in model: - return "ollama" - else: - # Try to use LiteLLM's internal provider detection if available - try: - if hasattr(litellm, "get_llm_provider"): - provider_info = litellm.get_llm_provider(model) - if isinstance(provider_info, tuple) and len(provider_info) > 1: - return provider_info[1] or "unknown" - return "unknown" - except Exception: - return "unknown" - - def _get_metadata_dict(kwargs): # type: (Dict[str, Any]) -> Dict[str, Any] """Get the metadata dictionary from the kwargs.""" - return kwargs.setdefault("litellm_params", {}).setdefault("metadata", {}) + litellm_params = kwargs.setdefault("litellm_params", {}) + + # we need this weird little dance, as metadata might be set but may be None initially + metadata = litellm_params.get("metadata") + if metadata is None: + metadata = {} + litellm_params["metadata"] = metadata + return metadata -def _input_callback( - kwargs, # type: Dict[str, Any] -): - # type: (...) -> None +def _input_callback(kwargs): + # type: (Dict[str, Any]) -> None """Handle the start of a request.""" integration = sentry_sdk.get_client().get_integration(LiteLLMIntegration) @@ -73,7 +41,13 @@ def _input_callback( return # Get key parameters - model = kwargs.get("model", "") + full_model = kwargs.get("model", "") + try: + model, provider, _, _ = litellm.get_llm_provider(full_model) + except Exception: + model = full_model + provider = "unknown" + messages = kwargs.get("messages", []) operation = "chat" if messages else "embeddings" @@ -93,11 +67,8 @@ def _input_callback( _get_metadata_dict(kwargs)["_sentry_span"] = span # Set basic data - set_data_normalized(span, SPANDATA.GEN_AI_SYSTEM, "litellm") + set_data_normalized(span, SPANDATA.GEN_AI_SYSTEM, provider) set_data_normalized(span, SPANDATA.GEN_AI_OPERATION_NAME, operation) - set_data_normalized( - span, "gen_ai.litellm.provider", _get_provider_from_model(model) - ) # Record messages if allowed if messages and should_send_default_pii() and integration.include_prompts: @@ -131,13 +102,8 @@ def _input_callback( set_data_normalized(span, f"gen_ai.litellm.{key}", value) -def _success_callback( - kwargs, # type: Dict[str, Any] - completion_response, # type: Any - start_time, # type: datetime - end_time, # type: datetime -): - # type: (...) -> None +def _success_callback(kwargs, completion_response, start_time, end_time): + # type: (Dict[str, Any], Any, datetime, datetime) -> None """Handle successful completion.""" span = _get_metadata_dict(kwargs).get("_sentry_span") @@ -196,13 +162,8 @@ def _success_callback( span.__exit__(None, None, None) -def _failure_callback( - kwargs, # type: Dict[str, Any] - exception, # type: Exception - start_time, # type: datetime - end_time, # type: datetime -): - # type: (...) -> None +def _failure_callback(kwargs, exception, start_time, end_time): + # type: (Dict[str, Any], Exception, datetime, datetime) -> None """Handle request failure.""" span = _get_metadata_dict(kwargs).get("_sentry_span") if span is None: From 1ecd559392f9526c884e3cec12a152c65f098965 Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Tue, 30 Sep 2025 14:27:40 +0200 Subject: [PATCH 06/13] fix(litellm): properly propagating exception in error callback --- sentry_sdk/integrations/litellm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/litellm.py b/sentry_sdk/integrations/litellm.py index 7b2a483927..261a8d029a 100644 --- a/sentry_sdk/integrations/litellm.py +++ b/sentry_sdk/integrations/litellm.py @@ -179,7 +179,7 @@ def _failure_callback(kwargs, exception, start_time, end_time): sentry_sdk.capture_event(event, hint=hint) finally: # Always finish the span and clean up - span.__exit__(None, None, None) + span.__exit__(type(exception), exception, None) class LiteLLMIntegration(Integration): From fbc78176304549fe11fecd67acc191562b2804d6 Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Thu, 2 Oct 2025 13:30:11 +0200 Subject: [PATCH 07/13] fix(litellm): add litellm as an extra dependency --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 7119e20e90..6629726798 100644 --- a/setup.py +++ b/setup.py @@ -65,6 +65,7 @@ def get_file_text(file_name): "langchain": ["langchain>=0.0.210"], "langgraph": ["langgraph>=0.6.6"], "launchdarkly": ["launchdarkly-server-sdk>=9.8.0"], + "litellm": ["litellm>=1.77.5"], "litestar": ["litestar>=2.0.0"], "loguru": ["loguru>=0.5"], "openai": ["openai>=1.0.0", "tiktoken>=0.3.0"], From 891b9510ae620023f7f3c300a739c76af968597f Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Thu, 2 Oct 2025 15:32:49 +0200 Subject: [PATCH 08/13] feat(tests): add tests for LiteLLM integration --- tests/integrations/litellm/__init__.py | 0 tests/integrations/litellm/test_litellm.py | 547 +++++++++++++++++++++ 2 files changed, 547 insertions(+) create mode 100644 tests/integrations/litellm/__init__.py create mode 100644 tests/integrations/litellm/test_litellm.py diff --git a/tests/integrations/litellm/__init__.py b/tests/integrations/litellm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/integrations/litellm/test_litellm.py b/tests/integrations/litellm/test_litellm.py new file mode 100644 index 0000000000..b600c32905 --- /dev/null +++ b/tests/integrations/litellm/test_litellm.py @@ -0,0 +1,547 @@ +import pytest +from unittest import mock +from datetime import datetime + +try: + from unittest.mock import AsyncMock +except ImportError: + + class AsyncMock(mock.MagicMock): + async def __call__(self, *args, **kwargs): + return super(AsyncMock, self).__call__(*args, **kwargs) + + +try: + import litellm +except ImportError: + pytest.skip("litellm not installed", allow_module_level=True) + +from sentry_sdk import start_transaction +from sentry_sdk.consts import OP, SPANDATA +from sentry_sdk.integrations.litellm import ( + LiteLLMIntegration, + _input_callback, + _success_callback, + _failure_callback, +) +from sentry_sdk.utils import package_version + + +LITELLM_VERSION = package_version("litellm") + + +# Mock response objects +class MockMessage: + def __init__(self, role="assistant", content="Test response"): + self.role = role + self.content = content + self.tool_calls = None + + def model_dump(self): + return {"role": self.role, "content": self.content} + + +class MockChoice: + def __init__(self, message=None): + self.message = message or MockMessage() + self.index = 0 + self.finish_reason = "stop" + + +class MockUsage: + def __init__(self, prompt_tokens=10, completion_tokens=20, total_tokens=30): + self.prompt_tokens = prompt_tokens + self.completion_tokens = completion_tokens + self.total_tokens = total_tokens + + +class MockCompletionResponse: + def __init__( + self, + model="gpt-3.5-turbo", + choices=None, + usage=None, + ): + self.id = "chatcmpl-test" + self.model = model + self.choices = choices or [MockChoice()] + self.usage = usage or MockUsage() + self.object = "chat.completion" + self.created = 1234567890 + + +class MockEmbeddingData: + def __init__(self, embedding=None): + self.embedding = embedding or [0.1, 0.2, 0.3] + self.index = 0 + self.object = "embedding" + + +class MockEmbeddingResponse: + def __init__(self, model="text-embedding-ada-002", data=None, usage=None): + self.model = model + self.data = data or [MockEmbeddingData()] + self.usage = usage or MockUsage( + prompt_tokens=5, completion_tokens=0, total_tokens=5 + ) + self.object = "list" + + +@pytest.mark.parametrize( + "send_default_pii, include_prompts", + [ + (True, True), + (True, False), + (False, True), + (False, False), + ], +) +def test_nonstreaming_chat_completion( + sentry_init, capture_events, send_default_pii, include_prompts +): + sentry_init( + integrations=[LiteLLMIntegration(include_prompts=include_prompts)], + traces_sample_rate=1.0, + send_default_pii=send_default_pii, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + mock_response = MockCompletionResponse() + + with start_transaction(name="litellm test"): + # Simulate what litellm does: call input callback, then success callback + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + assert len(events) == 1 + (event,) = events + + assert event["type"] == "transaction" + assert event["transaction"] == "litellm test" + + assert len(event["spans"]) == 1 + (span,) = event["spans"] + + assert span["op"] == OP.GEN_AI_CHAT + assert span["description"] == "chat gpt-3.5-turbo" + assert span["data"][SPANDATA.GEN_AI_REQUEST_MODEL] == "gpt-3.5-turbo" + assert span["data"][SPANDATA.GEN_AI_RESPONSE_MODEL] == "gpt-3.5-turbo" + assert span["data"][SPANDATA.GEN_AI_SYSTEM] == "openai" + assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "chat" + + if send_default_pii and include_prompts: + assert SPANDATA.GEN_AI_REQUEST_MESSAGES in span["data"] + assert SPANDATA.GEN_AI_RESPONSE_TEXT in span["data"] + else: + assert SPANDATA.GEN_AI_REQUEST_MESSAGES not in span["data"] + assert SPANDATA.GEN_AI_RESPONSE_TEXT not in span["data"] + + assert span["data"][SPANDATA.GEN_AI_USAGE_INPUT_TOKENS] == 10 + assert span["data"][SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS] == 20 + assert span["data"][SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS] == 30 + + +@pytest.mark.parametrize( + "send_default_pii, include_prompts", + [ + (True, True), + (True, False), + (False, True), + (False, False), + ], +) +def test_streaming_chat_completion( + sentry_init, capture_events, send_default_pii, include_prompts +): + sentry_init( + integrations=[LiteLLMIntegration(include_prompts=include_prompts)], + traces_sample_rate=1.0, + send_default_pii=send_default_pii, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + mock_response = MockCompletionResponse() + + with start_transaction(name="litellm test"): + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + "stream": True, + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + assert len(events) == 1 + (event,) = events + + assert event["type"] == "transaction" + assert len(event["spans"]) == 1 + (span,) = event["spans"] + + assert span["op"] == OP.GEN_AI_CHAT + assert span["data"][SPANDATA.GEN_AI_RESPONSE_STREAMING] is True + + +def test_embeddings_create(sentry_init, capture_events): + sentry_init( + integrations=[LiteLLMIntegration(include_prompts=True)], + traces_sample_rate=1.0, + send_default_pii=True, + ) + events = capture_events() + + mock_response = MockEmbeddingResponse() + + with start_transaction(name="litellm test"): + # For embeddings, messages would be empty + kwargs = { + "model": "text-embedding-ada-002", + "input": "Hello!", + "messages": [], # Empty for embeddings + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + assert len(events) == 1 + (event,) = events + + assert event["type"] == "transaction" + assert len(event["spans"]) == 1 + (span,) = event["spans"] + + assert span["op"] == OP.GEN_AI_EMBEDDINGS + assert span["description"] == "embeddings text-embedding-ada-002" + assert span["data"][SPANDATA.GEN_AI_OPERATION_NAME] == "embeddings" + assert span["data"][SPANDATA.GEN_AI_USAGE_INPUT_TOKENS] == 5 + + +def test_exception_handling(sentry_init, capture_events): + sentry_init( + integrations=[LiteLLMIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + + with start_transaction(name="litellm test"): + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + } + + _input_callback(kwargs) + _failure_callback( + kwargs, + Exception("API rate limit reached"), + datetime.now(), + datetime.now(), + ) + + # Should have error event and transaction + assert len(events) >= 1 + # Find the error event + error_events = [e for e in events if e.get("level") == "error"] + assert len(error_events) == 1 + + +def test_span_origin(sentry_init, capture_events): + sentry_init( + integrations=[LiteLLMIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + mock_response = MockCompletionResponse() + + with start_transaction(name="litellm test"): + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + (event,) = events + + assert event["contexts"]["trace"]["origin"] == "manual" + assert event["spans"][0]["origin"] == "auto.ai.litellm" + + +def test_multiple_providers(sentry_init, capture_events): + """Test that the integration correctly identifies different providers.""" + sentry_init( + integrations=[LiteLLMIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + + # Test with different model prefixes + test_cases = [ + ("gpt-3.5-turbo", "openai"), + ("claude-3-opus-20240229", "anthropic"), + ("gemini/gemini-pro", "gemini"), + ] + + for model, _ in test_cases: + mock_response = MockCompletionResponse(model=model) + with start_transaction(name=f"test {model}"): + kwargs = { + "model": model, + "messages": messages, + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + assert len(events) == len(test_cases) + + for i in range(len(test_cases)): + span = events[i]["spans"][0] + # The provider should be detected by litellm.get_llm_provider + assert SPANDATA.GEN_AI_SYSTEM in span["data"] + + +def test_additional_parameters(sentry_init, capture_events): + """Test that additional parameters are captured.""" + sentry_init( + integrations=[LiteLLMIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + mock_response = MockCompletionResponse() + + with start_transaction(name="litellm test"): + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + "temperature": 0.7, + "max_tokens": 100, + "top_p": 0.9, + "frequency_penalty": 0.5, + "presence_penalty": 0.5, + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + (event,) = events + (span,) = event["spans"] + + assert span["data"][SPANDATA.GEN_AI_REQUEST_TEMPERATURE] == 0.7 + assert span["data"][SPANDATA.GEN_AI_REQUEST_MAX_TOKENS] == 100 + assert span["data"][SPANDATA.GEN_AI_REQUEST_TOP_P] == 0.9 + assert span["data"][SPANDATA.GEN_AI_REQUEST_FREQUENCY_PENALTY] == 0.5 + assert span["data"][SPANDATA.GEN_AI_REQUEST_PRESENCE_PENALTY] == 0.5 + + +def test_litellm_specific_parameters(sentry_init, capture_events): + """Test that LiteLLM-specific parameters are captured.""" + sentry_init( + integrations=[LiteLLMIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + mock_response = MockCompletionResponse() + + with start_transaction(name="litellm test"): + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + "api_base": "https://custom-api.example.com", + "api_version": "2023-01-01", + "custom_llm_provider": "custom_provider", + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + (event,) = events + (span,) = event["spans"] + + assert span["data"]["gen_ai.litellm.api_base"] == "https://custom-api.example.com" + assert span["data"]["gen_ai.litellm.api_version"] == "2023-01-01" + assert span["data"]["gen_ai.litellm.custom_llm_provider"] == "custom_provider" + + +def test_no_integration(sentry_init, capture_events): + """Test that when integration is not enabled, callbacks don't break.""" + sentry_init( + traces_sample_rate=1.0, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + mock_response = MockCompletionResponse() + + with start_transaction(name="litellm test"): + # When the integration isn't enabled, the callbacks should exit early + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + } + + # These should not crash, just do nothing + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + (event,) = events + # Should still have the transaction, but no child spans since integration is off + assert event["type"] == "transaction" + assert len(event.get("spans", [])) == 0 + + +def test_response_without_usage(sentry_init, capture_events): + """Test handling of responses without usage information.""" + sentry_init( + integrations=[LiteLLMIntegration()], + traces_sample_rate=1.0, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + + # Create a mock response without usage + mock_response = type( + "obj", + (object,), + { + "model": "gpt-3.5-turbo", + "choices": [MockChoice()], + }, + )() + + with start_transaction(name="litellm test"): + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + (event,) = events + (span,) = event["spans"] + + # Span should still be created even without usage info + assert span["op"] == OP.GEN_AI_CHAT + assert span["description"] == "chat gpt-3.5-turbo" + + +def test_integration_setup(sentry_init): + """Test that the integration sets up the callbacks correctly.""" + sentry_init( + integrations=[LiteLLMIntegration()], + traces_sample_rate=1.0, + ) + + # Check that callbacks are registered + assert _input_callback in (litellm.input_callback or []) + assert _success_callback in (litellm.success_callback or []) + assert _failure_callback in (litellm.failure_callback or []) + + +def test_message_dict_extraction(sentry_init, capture_events): + """Test that response messages are properly extracted with dict() fallback.""" + sentry_init( + integrations=[LiteLLMIntegration(include_prompts=True)], + traces_sample_rate=1.0, + send_default_pii=True, + ) + events = capture_events() + + messages = [{"role": "user", "content": "Hello!"}] + + # Create a message that has dict() method instead of model_dump() + class DictMessage: + def __init__(self): + self.role = "assistant" + self.content = "Response" + self.tool_calls = None + + def dict(self): + return {"role": self.role, "content": self.content} + + mock_response = MockCompletionResponse(choices=[MockChoice(message=DictMessage())]) + + with start_transaction(name="litellm test"): + kwargs = { + "model": "gpt-3.5-turbo", + "messages": messages, + } + + _input_callback(kwargs) + _success_callback( + kwargs, + mock_response, + datetime.now(), + datetime.now(), + ) + + (event,) = events + (span,) = event["spans"] + + # Should have extracted the response message + assert SPANDATA.GEN_AI_RESPONSE_TEXT in span["data"] From 24214220456b6f713fa417bc75e4c46502abfca2 Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Thu, 2 Oct 2025 15:47:29 +0200 Subject: [PATCH 09/13] feat(integrations): add minimum version for litellm integration --- sentry_sdk/integrations/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index e397c9986a..c34c18ad83 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -146,6 +146,7 @@ def iter_default_integrations(with_auto_enabling_integrations): "langchain": (0, 1, 0), "langgraph": (0, 6, 6), "launchdarkly": (9, 8, 0), + "litellm": (1, 77, 2), "loguru": (0, 7, 0), "openai": (1, 0, 0), "openai_agents": (0, 0, 19), From 29b2e351a1154110e9f0121398be05b6addc6851 Mon Sep 17 00:00:00 2001 From: Fabian Schindler Date: Thu, 2 Oct 2025 15:50:16 +0200 Subject: [PATCH 10/13] fix(integrations): update minimum version for litellm integration to 1.77.5 --- sentry_sdk/integrations/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py index c34c18ad83..3f71f0f4ba 100644 --- a/sentry_sdk/integrations/__init__.py +++ b/sentry_sdk/integrations/__init__.py @@ -146,7 +146,7 @@ def iter_default_integrations(with_auto_enabling_integrations): "langchain": (0, 1, 0), "langgraph": (0, 6, 6), "launchdarkly": (9, 8, 0), - "litellm": (1, 77, 2), + "litellm": (1, 77, 5), "loguru": (0, 7, 0), "openai": (1, 0, 0), "openai_agents": (0, 0, 19), From a8156f525c5cd94ac860618e827c9251c9cc2811 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 3 Oct 2025 10:09:38 +0200 Subject: [PATCH 11/13] Add litellm to test setup and generate test matrix --- .github/workflows/test-integrations-ai.yml | 4 ++++ scripts/populate_tox/config.py | 3 +++ scripts/populate_tox/releases.jsonl | 7 ++++--- .../split_tox_gh_actions.py | 1 + tox.ini | 21 ++++++++++++------- 5 files changed, 25 insertions(+), 11 deletions(-) diff --git a/.github/workflows/test-integrations-ai.yml b/.github/workflows/test-integrations-ai.yml index cf21720ff1..fcbb464078 100644 --- a/.github/workflows/test-integrations-ai.yml +++ b/.github/workflows/test-integrations-ai.yml @@ -66,6 +66,10 @@ jobs: run: | set -x # print commands that are executed ./scripts/runtox.sh "py${{ matrix.python-version }}-langchain-notiktoken" + - name: Test litellm + run: | + set -x # print commands that are executed + ./scripts/runtox.sh "py${{ matrix.python-version }}-litellm" - name: Test openai-base run: | set -x # print commands that are executed diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index 34ae680fad..f69e5f2f90 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -214,6 +214,9 @@ "package": "launchdarkly-server-sdk", "num_versions": 2, }, + "litellm": { + "package": "litellm", + }, "litestar": { "package": "litestar", "deps": { diff --git a/scripts/populate_tox/releases.jsonl b/scripts/populate_tox/releases.jsonl index afbb5aef09..b7cca55815 100644 --- a/scripts/populate_tox/releases.jsonl +++ b/scripts/populate_tox/releases.jsonl @@ -46,7 +46,7 @@ {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7"], "name": "boto3", "requires_python": "", "version": "1.12.49", "yanked": false}} {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9"], "name": "boto3", "requires_python": ">= 3.6", "version": "1.20.54", "yanked": false}} {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9"], "name": "boto3", "requires_python": ">= 3.7", "version": "1.28.85", "yanked": false}} -{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9"], "name": "boto3", "requires_python": ">=3.9", "version": "1.40.43", "yanked": false}} +{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Natural Language :: English", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9"], "name": "boto3", "requires_python": ">=3.9", "version": "1.40.44", "yanked": false}} {"info": {"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries", "Topic :: Internet :: WWW/HTTP :: HTTP Servers", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", "Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware", "Topic :: Internet :: WWW/HTTP :: WSGI :: Server", "Topic :: Software Development :: Libraries :: Application Frameworks"], "name": "bottle", "requires_python": "", "version": "0.12.25", "yanked": false}} {"info": {"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries", "Topic :: Internet :: WWW/HTTP :: HTTP Servers", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", "Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware", "Topic :: Internet :: WWW/HTTP :: WSGI :: Server", "Topic :: Software Development :: Libraries :: Application Frameworks"], "name": "bottle", "requires_python": null, "version": "0.13.4", "yanked": false}} {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Object Brokering", "Topic :: System :: Distributed Computing"], "name": "celery", "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", "version": "4.4.7", "yanked": false}} @@ -101,6 +101,7 @@ {"info": {"classifiers": [], "name": "langgraph", "requires_python": ">=3.10", "version": "1.0.0a4", "yanked": false}} {"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", "Programming Language :: Python :: 3.9", "Topic :: Software Development", "Topic :: Software Development :: Libraries"], "name": "launchdarkly-server-sdk", "requires_python": ">=3.9", "version": "9.12.1", "yanked": false}} {"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Software Development", "Topic :: Software Development :: Libraries"], "name": "launchdarkly-server-sdk", "requires_python": ">=3.8", "version": "9.8.1", "yanked": false}} +{"info": {"classifiers": ["License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.9"], "name": "litellm", "requires_python": "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8", "version": "1.77.5", "yanked": false}} {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: AsyncIO", "Framework :: Pydantic", "Framework :: Pydantic :: 1", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Internet", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: HTTP Servers", "Topic :: Software Development", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Application Frameworks", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "litestar", "requires_python": ">=3.8,<4.0", "version": "2.0.1", "yanked": false}} {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Internet", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: HTTP Servers", "Topic :: Software Development", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Application Frameworks", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "litestar", "requires_python": "<4.0,>=3.8", "version": "2.12.1", "yanked": false}} {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Internet", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: HTTP Servers", "Topic :: Software Development", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Application Frameworks", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "litestar", "requires_python": "<4.0,>=3.8", "version": "2.17.0", "yanked": false}} @@ -108,7 +109,7 @@ {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: System :: Logging"], "name": "loguru", "requires_python": "<4.0,>=3.5", "version": "0.7.3", "yanked": false}} {"info": {"classifiers": ["Intended Audience :: Developers", "Operating System :: MacOS", "Operating System :: Microsoft :: Windows", "Operating System :: OS Independent", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "openai", "requires_python": ">=3.7.1", "version": "1.0.1", "yanked": false}} {"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS", "Operating System :: Microsoft :: Windows", "Operating System :: OS Independent", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "openai", "requires_python": ">=3.8", "version": "1.109.1", "yanked": false}} -{"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS", "Operating System :: Microsoft :: Windows", "Operating System :: OS Independent", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "openai", "requires_python": ">=3.8", "version": "2.0.1", "yanked": false}} +{"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS", "Operating System :: Microsoft :: Windows", "Operating System :: OS Independent", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "openai", "requires_python": ">=3.8", "version": "2.1.0", "yanked": false}} {"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "openai-agents", "requires_python": ">=3.9", "version": "0.0.19", "yanked": false}} {"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "openai-agents", "requires_python": ">=3.9", "version": "0.1.0", "yanked": false}} {"info": {"classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "openai-agents", "requires_python": ">=3.9", "version": "0.2.11", "yanked": false}} @@ -200,6 +201,6 @@ {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: No Input/Output (Daemon)", "Framework :: Tryton", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: Bulgarian", "Natural Language :: Catalan", "Natural Language :: Chinese (Simplified)", "Natural Language :: Czech", "Natural Language :: Dutch", "Natural Language :: English", "Natural Language :: Finnish", "Natural Language :: French", "Natural Language :: German", "Natural Language :: Hungarian", "Natural Language :: Indonesian", "Natural Language :: Italian", "Natural Language :: Persian", "Natural Language :: Polish", "Natural Language :: Portuguese (Brazilian)", "Natural Language :: Russian", "Natural Language :: Slovenian", "Natural Language :: Spanish", "Natural Language :: Turkish", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Application Frameworks"], "name": "trytond", "requires_python": ">=3.6", "version": "5.8.16", "yanked": false}} {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: No Input/Output (Daemon)", "Framework :: Tryton", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: Bulgarian", "Natural Language :: Catalan", "Natural Language :: Chinese (Simplified)", "Natural Language :: Czech", "Natural Language :: Dutch", "Natural Language :: English", "Natural Language :: Finnish", "Natural Language :: French", "Natural Language :: German", "Natural Language :: Hungarian", "Natural Language :: Indonesian", "Natural Language :: Italian", "Natural Language :: Persian", "Natural Language :: Polish", "Natural Language :: Portuguese (Brazilian)", "Natural Language :: Romanian", "Natural Language :: Russian", "Natural Language :: Slovenian", "Natural Language :: Spanish", "Natural Language :: Turkish", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Application Frameworks"], "name": "trytond", "requires_python": ">=3.6", "version": "6.2.14", "yanked": false}} {"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: No Input/Output (Daemon)", "Framework :: Tryton", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: Bulgarian", "Natural Language :: Catalan", "Natural Language :: Chinese (Simplified)", "Natural Language :: Czech", "Natural Language :: Dutch", "Natural Language :: English", "Natural Language :: Finnish", "Natural Language :: French", "Natural Language :: German", "Natural Language :: Hungarian", "Natural Language :: Indonesian", "Natural Language :: Italian", "Natural Language :: Persian", "Natural Language :: Polish", "Natural Language :: Portuguese (Brazilian)", "Natural Language :: Romanian", "Natural Language :: Russian", "Natural Language :: Slovenian", "Natural Language :: Spanish", "Natural Language :: Turkish", "Natural Language :: Ukrainian", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development :: Libraries :: Application Frameworks"], "name": "trytond", "requires_python": ">=3.8", "version": "6.8.17", "yanked": false}} -{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: No Input/Output (Daemon)", "Framework :: Tryton", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: Bulgarian", "Natural Language :: Catalan", "Natural Language :: Chinese (Simplified)", "Natural Language :: Czech", "Natural Language :: Dutch", "Natural Language :: English", "Natural Language :: Finnish", "Natural Language :: French", "Natural Language :: German", "Natural Language :: Hungarian", "Natural Language :: Indonesian", "Natural Language :: Italian", "Natural Language :: Persian", "Natural Language :: Polish", "Natural Language :: Portuguese (Brazilian)", "Natural Language :: Romanian", "Natural Language :: Russian", "Natural Language :: Slovenian", "Natural Language :: Spanish", "Natural Language :: Turkish", "Natural Language :: Ukrainian", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development :: Libraries :: Application Frameworks"], "name": "trytond", "requires_python": ">=3.9", "version": "7.6.7", "yanked": false}} +{"info": {"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: No Input/Output (Daemon)", "Framework :: Tryton", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Natural Language :: Bulgarian", "Natural Language :: Catalan", "Natural Language :: Chinese (Simplified)", "Natural Language :: Czech", "Natural Language :: Dutch", "Natural Language :: English", "Natural Language :: Finnish", "Natural Language :: French", "Natural Language :: German", "Natural Language :: Hungarian", "Natural Language :: Indonesian", "Natural Language :: Italian", "Natural Language :: Persian", "Natural Language :: Polish", "Natural Language :: Portuguese (Brazilian)", "Natural Language :: Romanian", "Natural Language :: Russian", "Natural Language :: Slovenian", "Natural Language :: Spanish", "Natural Language :: Turkish", "Natural Language :: Ukrainian", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development :: Libraries :: Application Frameworks"], "name": "trytond", "requires_python": ">=3.9", "version": "7.6.8", "yanked": false}} {"info": {"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Software Development", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Application Frameworks", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "typer", "requires_python": ">=3.7", "version": "0.15.4", "yanked": false}} {"info": {"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Topic :: Software Development", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Libraries :: Application Frameworks", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed"], "name": "typer", "requires_python": ">=3.8", "version": "0.19.2", "yanked": false}} diff --git a/scripts/split_tox_gh_actions/split_tox_gh_actions.py b/scripts/split_tox_gh_actions/split_tox_gh_actions.py index a7b7c394b1..81f887ad4f 100755 --- a/scripts/split_tox_gh_actions/split_tox_gh_actions.py +++ b/scripts/split_tox_gh_actions/split_tox_gh_actions.py @@ -74,6 +74,7 @@ "cohere", "langchain-base", "langchain-notiktoken", + "litellm", "openai-base", "openai-notiktoken", "langgraph", diff --git a/tox.ini b/tox.ini index 1bca280a11..c74755f206 100644 --- a/tox.ini +++ b/tox.ini @@ -63,13 +63,15 @@ envlist = {py3.9,py3.11,py3.12}-langchain-notiktoken-v0.2.17 {py3.9,py3.12,py3.13}-langchain-notiktoken-v0.3.27 + {py3.9,py3.12,py3.13}-litellm-v1.77.5 + {py3.8,py3.11,py3.12}-openai-base-v1.0.1 {py3.8,py3.12,py3.13}-openai-base-v1.109.1 - {py3.8,py3.12,py3.13}-openai-base-v2.0.1 + {py3.8,py3.12,py3.13}-openai-base-v2.1.0 {py3.8,py3.11,py3.12}-openai-notiktoken-v1.0.1 {py3.8,py3.12,py3.13}-openai-notiktoken-v1.109.1 - {py3.8,py3.12,py3.13}-openai-notiktoken-v2.0.1 + {py3.8,py3.12,py3.13}-openai-notiktoken-v2.1.0 {py3.9,py3.12,py3.13}-langgraph-v0.6.8 {py3.10,py3.12,py3.13}-langgraph-v1.0.0a4 @@ -89,7 +91,7 @@ envlist = {py3.6,py3.7}-boto3-v1.12.49 {py3.6,py3.9,py3.10}-boto3-v1.20.54 {py3.7,py3.11,py3.12}-boto3-v1.28.85 - {py3.9,py3.12,py3.13}-boto3-v1.40.43 + {py3.9,py3.12,py3.13}-boto3-v1.40.44 {py3.6,py3.7,py3.8}-chalice-v1.16.0 {py3.9,py3.12,py3.13}-chalice-v1.32.0 @@ -267,7 +269,7 @@ envlist = {py3.6}-trytond-v4.8.18 {py3.6,py3.7,py3.8}-trytond-v5.8.16 {py3.8,py3.10,py3.11}-trytond-v6.8.17 - {py3.9,py3.12,py3.13}-trytond-v7.6.7 + {py3.9,py3.12,py3.13}-trytond-v7.6.8 {py3.7,py3.12,py3.13}-typer-v0.15.4 {py3.8,py3.12,py3.13}-typer-v0.19.2 @@ -357,16 +359,18 @@ deps = langchain-notiktoken: langchain-openai langchain-notiktoken-v0.3.27: langchain-community + litellm-v1.77.5: litellm==1.77.5 + openai-base-v1.0.1: openai==1.0.1 openai-base-v1.109.1: openai==1.109.1 - openai-base-v2.0.1: openai==2.0.1 + openai-base-v2.1.0: openai==2.1.0 openai-base: pytest-asyncio openai-base: tiktoken openai-base-v1.0.1: httpx<0.28 openai-notiktoken-v1.0.1: openai==1.0.1 openai-notiktoken-v1.109.1: openai==1.109.1 - openai-notiktoken-v2.0.1: openai==2.0.1 + openai-notiktoken-v2.1.0: openai==2.1.0 openai-notiktoken: pytest-asyncio openai-notiktoken-v1.0.1: httpx<0.28 @@ -390,7 +394,7 @@ deps = boto3-v1.12.49: boto3==1.12.49 boto3-v1.20.54: boto3==1.20.54 boto3-v1.28.85: boto3==1.28.85 - boto3-v1.40.43: boto3==1.40.43 + boto3-v1.40.44: boto3==1.40.44 {py3.7,py3.8}-boto3: urllib3<2.0.0 chalice-v1.16.0: chalice==1.16.0 @@ -688,7 +692,7 @@ deps = trytond-v4.8.18: trytond==4.8.18 trytond-v5.8.16: trytond==5.8.16 trytond-v6.8.17: trytond==6.8.17 - trytond-v7.6.7: trytond==7.6.7 + trytond-v7.6.8: trytond==7.6.8 trytond: werkzeug trytond-v4.6.22: werkzeug<1.0 trytond-v4.8.18: werkzeug<1.0 @@ -746,6 +750,7 @@ setenv = langchain-notiktoken: TESTPATH=tests/integrations/langchain langgraph: TESTPATH=tests/integrations/langgraph launchdarkly: TESTPATH=tests/integrations/launchdarkly + litellm: TESTPATH=tests/integrations/litellm litestar: TESTPATH=tests/integrations/litestar loguru: TESTPATH=tests/integrations/loguru openai-base: TESTPATH=tests/integrations/openai From 2ec4a1afd191bac162d4e3eaa173b2cefcf2e868 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 3 Oct 2025 10:13:58 +0200 Subject: [PATCH 12/13] add litellm to lint reqs --- requirements-linting.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-linting.txt b/requirements-linting.txt index 1cc8274795..9d8e027321 100644 --- a/requirements-linting.txt +++ b/requirements-linting.txt @@ -18,3 +18,4 @@ statsig UnleashClient typer strawberry-graphql +litellm From d1d1b2a30a8a7c4c723d0b40f97cb7a853450cd1 Mon Sep 17 00:00:00 2001 From: Ivana Kellyer Date: Fri, 3 Oct 2025 10:16:31 +0200 Subject: [PATCH 13/13] ignore litellm in mypy instead --- requirements-linting.txt | 1 - sentry_sdk/integrations/litellm.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements-linting.txt b/requirements-linting.txt index 9d8e027321..1cc8274795 100644 --- a/requirements-linting.txt +++ b/requirements-linting.txt @@ -18,4 +18,3 @@ statsig UnleashClient typer strawberry-graphql -litellm diff --git a/sentry_sdk/integrations/litellm.py b/sentry_sdk/integrations/litellm.py index 261a8d029a..2582c2bc05 100644 --- a/sentry_sdk/integrations/litellm.py +++ b/sentry_sdk/integrations/litellm.py @@ -14,7 +14,7 @@ from datetime import datetime try: - import litellm + import litellm # type: ignore[import-not-found] except ImportError: raise DidNotEnable("LiteLLM not installed")