Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
60 changes: 27 additions & 33 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ description = "Python SDK for LangTrace"
readme = "README.md"
authors = [{ name = "Scale3 Labs", email = "engineering@scale3labs.com" }]
license = "Apache-2.0"
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
]
dependencies = [
'trace-attributes==7.0.4',
Expand All @@ -28,40 +28,36 @@ dependencies = [
'tiktoken>=0.1.1',
'colorama>=0.4.6',
'sqlalchemy',
'fsspec>=2024.6.0'
'fsspec>=2024.6.0',
"transformers>=4.11.3",
]

requires-python = ">=3.9"

[project.optional-dependencies]
dev = [
"openai==1.30.1",
"anthropic",
"chromadb",
"qdrant-client",
"python-dotenv",
"pinecone-client",
"langchain",
"langchain-community",
"langchain-openai",
"langchain-openai",
"chromadb",
"cohere",
"qdrant_client",
"weaviate-client",
"ollama",
"groq",
"google-generativeai",
"google-cloud-aiplatform",
"mistralai"
]

test = [
"pytest",
"pytest-vcr",
"pytest-asyncio",
"openai==1.30.1",
"anthropic",
"chromadb",
"qdrant-client",
"python-dotenv",
"pinecone-client",
"langchain",
"langchain-community",
"langchain-openai",
"langchain-openai",
"chromadb",
"cohere",
"qdrant_client",
"weaviate-client",
"ollama",
"groq",
"google-generativeai",
"google-cloud-aiplatform",
"mistralai",
]

test = ["pytest", "pytest-vcr", "pytest-asyncio"]


[project.urls]
Expand All @@ -72,9 +68,7 @@ Homepage = "https://github.com/Scale3-Labs/langtrace-python-sdk"
path = "src/langtrace_python_sdk/version.py"

[tool.hatch.build.targets.sdist]
include = [
"/src",
]
include = ["/src"]

[tool.hatch.build.targets.wheel]
packages = ["src/langtrace_python_sdk", "src/examples", "src/tests"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

from opentelemetry.trace import SpanKind
from opentelemetry.trace.status import Status, StatusCode
from langtrace.trace_attributes import SpanAttributes

from langtrace_python_sdk.constants.instrumentation.common import (
LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY,
Expand Down Expand Up @@ -71,6 +72,16 @@ def traced_method(wrapped, instance, args, kwargs):
if trace_output:
span.set_attribute("langchain.outputs", to_json_string(result))

prompt_tokens = instance.get_num_tokens(args[0])
completion_tokens = instance.get_num_tokens(result)
if hasattr(result, 'usage'):
prompt_tokens = result.usage.prompt_tokens
completion_tokens = result.usage.completion_tokens

span.set_attribute(SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, prompt_tokens)
span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, completion_tokens)


span.set_status(StatusCode.OK)
return result
except Exception as err:
Expand Down
16 changes: 16 additions & 0 deletions src/langtrace_python_sdk/instrumentation/langchain_core/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
from importlib_metadata import version as v

from langtrace_python_sdk.constants import LANGTRACE_SDK_NAME
from langtrace.trace_attributes import SpanAttributes


def generic_patch(
Expand Down Expand Up @@ -78,8 +79,22 @@ def traced_method(wrapped, instance, args, kwargs):
try:
# Attempt to call the original method
result = wrapped(*args, **kwargs)

if trace_output:
span.set_attribute("langchain.outputs", to_json_string(result))
if hasattr(result, 'usage'):
prompt_tokens = result.usage.prompt_tokens
completion_tokens = result.usage.completion_tokens
span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, prompt_tokens)
span.set_attribute(SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, completion_tokens)

elif result.generations[0][0].text:
span.set_attribute(SpanAttributes.LLM_USAGE_COMPLETION_TOKENS, instance.get_num_tokens(result.generations[0][0].text))
elif isinstance(args[0][0], str):
span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, instance.get_num_tokens(args[0][0]))

else:
span.set_attribute(SpanAttributes.LLM_USAGE_PROMPT_TOKENS, instance.get_num_tokens(args[0][0].text))

span.set_status(StatusCode.OK)
return result
Expand Down Expand Up @@ -156,6 +171,7 @@ def traced_method(wrapped, instance, args, kwargs):
try:
# Attempt to call the original method
result = wrapped(*args, **kwargs)

if trace_output:
outputs = {}
if isinstance(result, dict):
Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/instrumentation/openai/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
StreamWrapper,
set_span_attributes,
)
from openai._types import NOT_GIVEN
from langtrace_python_sdk.types import NOT_GIVEN


def images_generate(original_method, version, tracer):
Expand Down
31 changes: 30 additions & 1 deletion src/langtrace_python_sdk/types/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import List, Literal, TypedDict
from typing import List, Literal, TypeVar, TypedDict, Union
from enum import Enum


Expand Down Expand Up @@ -111,3 +111,32 @@ class InstrumentationMethods(TypedDict):
anthropic: List[VendorMethods.AnthropicMethods]
cohere: List[VendorMethods.CohereMethods]
weaviate: List[str]

_T = TypeVar("_T")
class NotGiven:
"""
A sentinel singleton class used to distinguish omitted keyword arguments
from those passed in with the value None (which may have different behavior).

For example:

```py
def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response:
...


get(timeout=1) # 1s timeout
get(timeout=None) # No timeout
get() # Default timeout behavior, which may not be statically known at the method definition.
```
"""

def __bool__(self) -> Literal[False]:
return False

def __repr__(self) -> str:
return "NOT_GIVEN"


NotGivenOr = Union[_T, NotGiven]
NOT_GIVEN = NotGiven()
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from openai import NOT_GIVEN
from langtrace_python_sdk.types import NOT_GIVEN
from .sdk_version_checker import SDKVersionChecker
from opentelemetry.trace import Span
from langtrace.trace_attributes import SpanAttributes
Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from langtrace_python_sdk.constants import LANGTRACE_SDK_NAME
from langtrace_python_sdk.utils import set_span_attribute
from openai import NOT_GIVEN
from langtrace_python_sdk.types import NOT_GIVEN
from tiktoken import get_encoding
from tiktoken import get_encoding, list_encoding_names

Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "2.2.29"
__version__ = "2.2.30"