Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
4d717b1
Merge branch 'development' into release
karthikscale3 Apr 24, 2024
0233826
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Apr 28, 2024
7f4e951
Merge branch 'development' into release
karthikscale3 Apr 28, 2024
81a6ca0
Merge
karthikscale3 Jun 13, 2024
0c19f77
Merge branch 'development' into release
karthikscale3 Jun 13, 2024
c3a6ccf
remove logs
karthikscale3 Jun 13, 2024
a99cf10
remove requirements
karthikscale3 Jun 13, 2024
1379b27
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jun 17, 2024
dae04e7
Merge branch 'development' into release
karthikscale3 Jun 17, 2024
129e927
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jun 24, 2024
16e67f9
Merge branch 'development' into release
karthikscale3 Jun 24, 2024
e604e93
Bump version
karthikscale3 Jun 24, 2024
7e00473
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jun 24, 2024
6ac71aa
Merge branch 'development' into release
karthikscale3 Jun 24, 2024
c39bf01
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jun 24, 2024
f89e38c
Merge branch 'development' into release
karthikscale3 Jun 24, 2024
e95e743
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jul 19, 2024
390039d
Add OTLP example
karthikscale3 Jul 24, 2024
6623f20
Add OTLP example (#259)
karthikscale3 Jul 24, 2024
ae0655b
Merge commit
karthikscale3 Jul 24, 2024
379e22d
Adhere to otel span attributes (#257)
alizenhom Jul 24, 2024
e0e3944
Merge branch 'development' of github.com:Scale3-Labs/langtrace-python…
karthikscale3 Jul 24, 2024
3897fa3
Bugfixes to sending user feedback and consolidating env var (#261)
karthikscale3 Jul 25, 2024
85f6cef
Merge
karthikscale3 Jul 25, 2024
655fe76
disable completions and prompts (#258)
alizenhom Jul 25, 2024
e3c2b24
Merge branch 'development' of github.com:Scale3-Labs/langtrace-python…
karthikscale3 Jul 25, 2024
c62e803
Squash
karthikscale3 Jul 25, 2024
d7fd3fb
Merge
karthikscale3 Jul 25, 2024
c4ea507
Merge branch 'development' into release
karthikscale3 Jul 25, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,11 @@ from langtrace_python_sdk import get_prompt_from_registry
prompt = get_prompt_from_registry(<Registry ID>, options={"prompt_version": 1, "variables": {"foo": "bar"} })
```

### Opt out of tracing prompt and completion data
By default, prompt and completion data are captured. If you would like to opt out of it, set the following env var,

`TRACE_PROMPT_COMPLETION_DATA=false`

## Supported integrations

Langtrace automatically captures traces from the following vendors:
Expand Down
6 changes: 2 additions & 4 deletions src/langtrace_python_sdk/instrumentation/anthropic/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
get_llm_url,
is_streaming,
set_event_completion,
set_event_completion_chunk,
set_usage_attributes,
)
from opentelemetry.trace import SpanKind
Expand Down Expand Up @@ -119,10 +120,7 @@ def handle_streaming_response(result, span):
# Assuming span.add_event is part of a larger logging or event system
# Add event for each chunk of content
if content:
span.add_event(
Event.STREAM_OUTPUT.value,
{SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: "".join(content)},
)
set_event_completion_chunk(span, "".join(content))

# Assuming this is part of a generator, yield chunk or aggregated content
yield content
Expand Down
6 changes: 2 additions & 4 deletions src/langtrace_python_sdk/instrumentation/cohere/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
get_extra_attributes,
get_llm_url,
set_event_completion,
set_event_completion_chunk,
set_usage_attributes,
)
from langtrace.trace_attributes import Event, LLMSpanAttributes
Expand Down Expand Up @@ -403,10 +404,7 @@ def traced_method(wrapped, instance, args, kwargs):
content = event.text
else:
content = ""
span.add_event(
Event.STREAM_OUTPUT.value,
{SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: "".join(content)},
)
set_event_completion_chunk(span, "".join(content))

if (
hasattr(event, "finish_reason")
Expand Down
18 changes: 9 additions & 9 deletions src/langtrace_python_sdk/instrumentation/groq/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
get_llm_url,
get_langtrace_attributes,
set_event_completion,
set_event_completion_chunk,
set_usage_attributes,
)
from langtrace_python_sdk.constants.instrumentation.common import (
Expand Down Expand Up @@ -242,15 +243,14 @@ def handle_streaming_response(
content = content + []
else:
content = []
span.add_event(
Event.STREAM_OUTPUT.value,
{
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: (
"".join(content)
if len(content) > 0 and content[0] is not None
else ""
)
},

set_event_completion_chunk(
span,
(
"".join(content)
if len(content) > 0 and content[0] is not None
else ""
),
)
result_content.append(content[0] if len(content) > 0 else "")
yield chunk
Expand Down
16 changes: 4 additions & 12 deletions src/langtrace_python_sdk/instrumentation/ollama/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
get_llm_request_attributes,
get_llm_url,
set_event_completion,
set_event_completion_chunk,
)
from langtrace_python_sdk.utils.silently_fail import silently_fail
from langtrace_python_sdk.constants.instrumentation.common import SERVICE_PROVIDERS
Expand Down Expand Up @@ -177,12 +178,8 @@ def _handle_streaming_response(span, response, api):
if api == "generate":
accumulated_tokens["response"] += chunk["response"]

span.add_event(
Event.STREAM_OUTPUT.value,
{
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: chunk.get("response")
or chunk.get("message").get("content"),
},
set_event_completion_chunk(
span, chunk.get("response") or chunk.get("message").get("content")
)

_set_response_attributes(span, chunk | accumulated_tokens)
Expand Down Expand Up @@ -211,12 +208,7 @@ async def _ahandle_streaming_response(span, response, api):
if api == "generate":
accumulated_tokens["response"] += chunk["response"]

span.add_event(
Event.STREAM_OUTPUT.value,
{
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: json.dumps(chunk),
},
)
set_event_completion_chunk(span, chunk)
_set_response_attributes(span, chunk | accumulated_tokens)
finally:
# Finalize span after processing all chunks
Expand Down
21 changes: 15 additions & 6 deletions src/langtrace_python_sdk/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,32 @@
from .sdk_version_checker import SDKVersionChecker
from opentelemetry.trace import Span
from langtrace.trace_attributes import SpanAttributes
import os


def set_span_attribute(span: Span, name, value):
if value is not None:
if value != "" or value != NOT_GIVEN:
if name == SpanAttributes.LLM_PROMPTS:
span.add_event(
name=SpanAttributes.LLM_CONTENT_PROMPT,
attributes={
SpanAttributes.LLM_PROMPTS: value,
},
)
set_event_prompt(span, value)
else:
span.set_attribute(name, value)
return


def set_event_prompt(span: Span, prompt):
enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true")
if enabled.lower() == "false":
return

span.add_event(
name=SpanAttributes.LLM_CONTENT_PROMPT,
attributes={
SpanAttributes.LLM_PROMPTS: prompt,
},
)


def check_if_sdk_is_outdated():
SDKVersionChecker().check()
return
32 changes: 14 additions & 18 deletions src/langtrace_python_sdk/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from opentelemetry import baggage
from opentelemetry.trace import Span
from opentelemetry.trace.status import StatusCode
import os


def estimate_tokens(prompt):
Expand All @@ -42,6 +43,9 @@ def estimate_tokens(prompt):


def set_event_completion_chunk(span: Span, chunk):
enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true")
if enabled.lower() == "false":
return
span.add_event(
name=SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK,
attributes={
Expand Down Expand Up @@ -203,6 +207,9 @@ def get_tool_calls(item):


def set_event_completion(span: Span, result_content):
enabled = os.environ.get("TRACE_PROMPT_COMPLETION_DATA", "true")
if enabled.lower() == "false":
return

span.add_event(
name=SpanAttributes.LLM_CONTENT_COMPLETION,
Expand Down Expand Up @@ -352,15 +359,9 @@ def process_chunk(self, chunk):
)
self.completion_tokens += token_counts
content.append(tool_call.function.arguments)
self.span.add_event(
Event.STREAM_OUTPUT.value,
{
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: (
"".join(content)
if len(content) > 0 and content[0] is not None
else ""
)
},
set_event_completion_chunk(
self.span,
"".join(content) if len(content) > 0 and content[0] is not None else "",
)
if content:
self.result_content.append(content[0])
Expand All @@ -369,16 +370,11 @@ def process_chunk(self, chunk):
token_counts = estimate_tokens(chunk.text)
self.completion_tokens += token_counts
content = [chunk.text]
self.span.add_event(
Event.STREAM_OUTPUT.value,
{
SpanAttributes.LLM_CONTENT_COMPLETION_CHUNK: (
"".join(content)
if len(content) > 0 and content[0] is not None
else ""
)
},
set_event_completion_chunk(
self.span,
"".join(content) if len(content) > 0 and content[0] is not None else "",
)

if content:
self.result_content.append(content[0])

Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "2.2.8"
__version__ = "2.2.9"