From a3056ce58095dd36073c3a13e7e2cdde66fdd8c1 Mon Sep 17 00:00:00 2001 From: Ali Waleed <134522290+alizenhom@users.noreply.github.com> Date: Wed, 24 Jul 2024 19:45:53 +0300 Subject: [PATCH 1/3] disable completions and prompts --- .../instrumentation/anthropic/patch.py | 5 +-- .../instrumentation/cohere/patch.py | 6 ++-- .../instrumentation/groq/patch.py | 18 +++++------ .../instrumentation/ollama/patch.py | 16 +++------- src/langtrace_python_sdk/utils/__init__.py | 21 ++++++++---- src/langtrace_python_sdk/utils/llm.py | 32 ++++++++----------- src/langtrace_python_sdk/version.py | 2 +- 7 files changed, 46 insertions(+), 54 deletions(-) diff --git a/src/langtrace_python_sdk/instrumentation/anthropic/patch.py b/src/langtrace_python_sdk/instrumentation/anthropic/patch.py index dbaade3e..6ae00f4c 100644 --- a/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +++ b/src/langtrace_python_sdk/instrumentation/anthropic/patch.py @@ -119,10 +119,7 @@ def handle_streaming_response(result, span): # Assuming span.add_event is part of a larger logging or event system # Add event for each chunk of content if content: - span.add_event( - Event.STREAM_OUTPUT.value, - {SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: "".join(content)}, - ) + set_event_completion_chunk(span, "".join(content)) # Assuming this is part of a generator, yield chunk or aggregated content yield content diff --git a/src/langtrace_python_sdk/instrumentation/cohere/patch.py b/src/langtrace_python_sdk/instrumentation/cohere/patch.py index 7fe72376..8413835a 100644 --- a/src/langtrace_python_sdk/instrumentation/cohere/patch.py +++ b/src/langtrace_python_sdk/instrumentation/cohere/patch.py @@ -22,6 +22,7 @@ get_extra_attributes, get_llm_url, set_event_completion, + set_event_completion_chunk, set_usage_attributes, ) from langtrace.trace_attributes import Event, LLMSpanAttributes @@ -403,10 +404,7 @@ def traced_method(wrapped, instance, args, kwargs): content = event.text else: content = "" - span.add_event( - Event.STREAM_OUTPUT.value, - {SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: "".join(content)}, - ) + set_event_completion_chunk(span, "".join(content)) if ( hasattr(event, "finish_reason") diff --git a/src/langtrace_python_sdk/instrumentation/groq/patch.py b/src/langtrace_python_sdk/instrumentation/groq/patch.py index 9e19e51e..11c89a88 100644 --- a/src/langtrace_python_sdk/instrumentation/groq/patch.py +++ b/src/langtrace_python_sdk/instrumentation/groq/patch.py @@ -30,6 +30,7 @@ get_llm_url, get_langtrace_attributes, set_event_completion, + set_event_completion_chunk, set_usage_attributes, ) from langtrace_python_sdk.constants.instrumentation.common import ( @@ -242,15 +243,14 @@ def handle_streaming_response( content = content + [] else: content = [] - span.add_event( - Event.STREAM_OUTPUT.value, - { - SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: ( - "".join(content) - if len(content) > 0 and content[0] is not None - else "" - ) - }, + + set_event_completion_chunk( + span, + ( + "".join(content) + if len(content) > 0 and content[0] is not None + else "" + ), ) result_content.append(content[0] if len(content) > 0 else "") yield chunk diff --git a/src/langtrace_python_sdk/instrumentation/ollama/patch.py b/src/langtrace_python_sdk/instrumentation/ollama/patch.py index 584320e3..9c13073a 100644 --- a/src/langtrace_python_sdk/instrumentation/ollama/patch.py +++ b/src/langtrace_python_sdk/instrumentation/ollama/patch.py @@ -6,6 +6,7 @@ get_llm_request_attributes, get_llm_url, set_event_completion, + set_event_completion_chunk, ) from langtrace_python_sdk.utils.silently_fail import silently_fail from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS @@ -177,12 +178,8 @@ def _handle_streaming_response(span, response, api): if api == "generate": accumulated_tokens["response"] += chunk["response"] - span.add_event( - Event.STREAM_OUTPUT.value, - { - SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: chunk.get("response") - or chunk.get("message").get("content"), - }, + set_event_completion_chunk( + span, chunk.get("response") or chunk.get("message").get("content") ) _set_response_attributes(span, chunk | accumulated_tokens) @@ -211,12 +208,7 @@ async def _ahandle_streaming_response(span, response, api): if api == "generate": accumulated_tokens["response"] += chunk["response"] - span.add_event( - Event.STREAM_OUTPUT.value, - { - SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: json.dumps(chunk), - }, - ) + set_event_completion_chunk(span, chunk) _set_response_attributes(span, chunk | accumulated_tokens) finally: # Finalize span after processing all chunks diff --git a/src/langtrace_python_sdk/utils/__init__.py b/src/langtrace_python_sdk/utils/__init__.py index 5f23d227..fd506c3b 100644 --- a/src/langtrace_python_sdk/utils/__init__.py +++ b/src/langtrace_python_sdk/utils/__init__.py @@ -2,23 +2,32 @@ from .sdk_version_checker import SDKVersionChecker from opentelemetry.trace import Span from langtrace.trace_attributes import SpanAttributes +import os def set_span_attribute(span: Span, name, value): if value is not None: if value != "" or value != NOT_GIVEN: if name == SpanAttributes.LLM_PROMPTS: - span.add_event( - name=SpanAttributes.LLM_CONTENT_PROMPT, - attributes={ - SpanAttributes.LLM_PROMPTS: value, - }, - ) + set_event_prompt(span, value) else: span.set_attribute(name, value) return +def set_event_prompt(span: Span, prompt): + disabled = os.environ.get("HIDE_SENSITIVE_DATA", False) + if disabled: + return + + span.add_event( + name=SpanAttributes.LLM_CONTENT_PROMPT, + attributes={ + SpanAttributes.LLM_PROMPTS: prompt, + }, + ) + + def check_if_sdk_is_outdated(): SDKVersionChecker().check() return diff --git a/src/langtrace_python_sdk/utils/llm.py b/src/langtrace_python_sdk/utils/llm.py index 4aefc912..a43c69d9 100644 --- a/src/langtrace_python_sdk/utils/llm.py +++ b/src/langtrace_python_sdk/utils/llm.py @@ -30,6 +30,7 @@ from opentelemetry import baggage from opentelemetry.trace import Span from opentelemetry.trace.status import StatusCode +import os def estimate_tokens(prompt): @@ -42,6 +43,9 @@ def estimate_tokens(prompt): def set_event_completion_chunk(span: Span, chunk): + disabled = os.environ.get("HIDE_SENSITIVE_DATA", False) + if disabled: + return span.add_event( name=SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK, attributes={ @@ -202,6 +206,9 @@ def get_tool_calls(item): def set_event_completion(span: Span, result_content): + disabled = os.environ.get("HIDE_SENSITIVE_DATA", False) + if disabled: + return span.add_event( name=SpanAttributes.LLM_CONTENT_COMPLETION, @@ -351,15 +358,9 @@ def process_chunk(self, chunk): ) self.completion_tokens += token_counts content.append(tool_call.function.arguments) - self.span.add_event( - Event.STREAM_OUTPUT.value, - { - SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: ( - "".join(content) - if len(content) > 0 and content[0] is not None - else "" - ) - }, + set_event_completion_chunk( + self.span, + "".join(content) if len(content) > 0 and content[0] is not None else "", ) if content: self.result_content.append(content[0]) @@ -368,16 +369,11 @@ def process_chunk(self, chunk): token_counts = estimate_tokens(chunk.text) self.completion_tokens += token_counts content = [chunk.text] - self.span.add_event( - Event.STREAM_OUTPUT.value, - { - SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: ( - "".join(content) - if len(content) > 0 and content[0] is not None - else "" - ) - }, + set_event_completion_chunk( + self.span, + "".join(content) if len(content) > 0 and content[0] is not None else "", ) + if content: self.result_content.append(content[0]) diff --git a/src/langtrace_python_sdk/version.py b/src/langtrace_python_sdk/version.py index a7ecb802..90a1f38f 100644 --- a/src/langtrace_python_sdk/version.py +++ b/src/langtrace_python_sdk/version.py @@ -1 +1 @@ -__version__ = "2.2.6" +__version__ = "2.2.7" From 35ee3d33290b9522533ff0038b89de4ca0503512 Mon Sep 17 00:00:00 2001 From: Ali Waleed <134522290+alizenhom@users.noreply.github.com> Date: Thu, 25 Jul 2024 00:48:19 +0300 Subject: [PATCH 2/3] fix import --- src/langtrace_python_sdk/instrumentation/anthropic/patch.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/langtrace_python_sdk/instrumentation/anthropic/patch.py b/src/langtrace_python_sdk/instrumentation/anthropic/patch.py index 6ae00f4c..69618667 100644 --- a/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +++ b/src/langtrace_python_sdk/instrumentation/anthropic/patch.py @@ -25,6 +25,7 @@ get_llm_url, is_streaming, set_event_completion, + set_event_completion_chunk, set_usage_attributes, ) from opentelemetry.trace import SpanKind From cd0c73e46d9b80d84d764a8f1ad74ab27b366a39 Mon Sep 17 00:00:00 2001 From: Karthik Kalyanaraman Date: Wed, 24 Jul 2024 21:27:20 -0700 Subject: [PATCH 3/3] Bump version --- README.md | 5 +++++ src/langtrace_python_sdk/utils/__init__.py | 4 ++-- src/langtrace_python_sdk/utils/llm.py | 8 ++++---- src/langtrace_python_sdk/version.py | 2 +- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 4c0af2b8..b6068619 100644 --- a/README.md +++ b/README.md @@ -228,6 +228,11 @@ from langtrace_python_sdk import get_prompt_from_registry prompt = get_prompt_from_registry(, options={"prompt_version": 1, "variables": {"foo": "bar"} }) ``` +### Opt out of tracing prompt and completion data +By default, prompt and completion data are captured. If you would like to opt out of it, set the following env var, + +`TRACE_PROMPT_COMPLETION_DATA=false` + ## Supported integrations Langtrace automatically captures traces from the following vendors: diff --git a/src/langtrace_python_sdk/utils/__init__.py b/src/langtrace_python_sdk/utils/__init__.py index fd506c3b..bcda19ce 100644 --- a/src/langtrace_python_sdk/utils/__init__.py +++ b/src/langtrace_python_sdk/utils/__init__.py @@ -16,8 +16,8 @@ def set_span_attribute(span: Span, name, value): def set_event_prompt(span: Span, prompt): - disabled = os.environ.get("HIDE_SENSITIVE_DATA", False) - if disabled: + enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true") + if enabled.lower() == "false": return span.add_event( diff --git a/src/langtrace_python_sdk/utils/llm.py b/src/langtrace_python_sdk/utils/llm.py index 7046b505..49352ede 100644 --- a/src/langtrace_python_sdk/utils/llm.py +++ b/src/langtrace_python_sdk/utils/llm.py @@ -43,8 +43,8 @@ def estimate_tokens(prompt): def set_event_completion_chunk(span: Span, chunk): - disabled = os.environ.get("HIDE_SENSITIVE_DATA", False) - if disabled: + enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true") + if enabled.lower() == "false": return span.add_event( name=SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK, @@ -207,8 +207,8 @@ def get_tool_calls(item): def set_event_completion(span: Span, result_content): - disabled = os.environ.get("HIDE_SENSITIVE_DATA", False) - if disabled: + enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true") + if enabled.lower() == "false": return span.add_event( diff --git a/src/langtrace_python_sdk/version.py b/src/langtrace_python_sdk/version.py index 23bc6ef5..73b4b053 100644 --- a/src/langtrace_python_sdk/version.py +++ b/src/langtrace_python_sdk/version.py @@ -1 +1 @@ -__version__ = "2.2.8" +__version__ = "2.2.9"