Skip to content

Commit

Permalink
Release 2.1.24 (#220)
Browse files Browse the repository at this point in the history
* remove logs

* remove requirements

* Bump version

* Account for NOT_GIVEN sentinel type (#219)

* Account for NOT_GIVEN sentinel type

* Bump version
  • Loading branch information
karthikscale3 committed Jun 24, 2024
1 parent a130443 commit 8dcdcce
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 17 deletions.
33 changes: 17 additions & 16 deletions src/langtrace_python_sdk/instrumentation/openai/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
)
from langtrace_python_sdk.constants.instrumentation.openai import APIS
from langtrace_python_sdk.utils.llm import calculate_prompt_tokens, estimate_tokens
from openai._types import NOT_GIVEN


def images_generate(original_method, version, tracer):
Expand Down Expand Up @@ -470,16 +471,16 @@ def traced_method(wrapped, instance, args, kwargs):
attributes = LLMSpanAttributes(**span_attributes)

tools = []
if kwargs.get("temperature") is not None:
if kwargs.get("temperature") is not None and kwargs.get("temperature") != NOT_GIVEN:
attributes.llm_temperature = kwargs.get("temperature")
if kwargs.get("top_p") is not None:
if kwargs.get("top_p") is not None and kwargs.get("top_p") != NOT_GIVEN:
attributes.llm_top_p = kwargs.get("top_p")
if kwargs.get("user") is not None:
if kwargs.get("user") is not None and kwargs.get("user") != NOT_GIVEN:
attributes.llm_user = kwargs.get("user")
if kwargs.get("functions") is not None:
if kwargs.get("functions") is not None and kwargs.get("functions") != NOT_GIVEN:
for function in kwargs.get("functions"):
tools.append(json.dumps({"type": "function", "function": function}))
if kwargs.get("tools") is not None:
if kwargs.get("tools") is not None and kwargs.get("tools") != NOT_GIVEN:
tools.append(json.dumps(kwargs.get("tools")))
if len(tools) > 0:
attributes.llm_tools = json.dumps(tools)
Expand All @@ -498,7 +499,7 @@ def traced_method(wrapped, instance, args, kwargs):
try:
# Attempt to call the original method
result = wrapped(*args, **kwargs)
if kwargs.get("stream") is False or kwargs.get("stream") is None:
if kwargs.get("stream") is False or kwargs.get("stream") is None or kwargs.get("stream") == NOT_GIVEN:
span.set_attribute("llm.model", result.model)
if hasattr(result, "choices") and result.choices is not None:
responses = [
Expand Down Expand Up @@ -527,7 +528,7 @@ def traced_method(wrapped, instance, args, kwargs):
span.set_attribute("llm.responses", json.dumps(responses))
if (
hasattr(result, "system_fingerprint")
and result.system_fingerprint is not None
and result.system_fingerprint is not None and result.system_fingerprint != NOT_GIVEN
):
span.set_attribute(
"llm.system.fingerprint", result.system_fingerprint
Expand All @@ -554,7 +555,7 @@ def traced_method(wrapped, instance, args, kwargs):
)

# iterate over kwargs.get("functions") and calculate the prompt tokens
if kwargs.get("functions") is not None:
if kwargs.get("functions") is not None and kwargs.get("functions") != NOT_GIVEN:
for function in kwargs.get("functions"):
prompt_tokens += calculate_prompt_tokens(
json.dumps(function), kwargs.get("model")
Expand Down Expand Up @@ -640,16 +641,16 @@ async def traced_method(wrapped, instance, args, kwargs):
attributes = LLMSpanAttributes(**span_attributes)

tools = []
if kwargs.get("temperature") is not None:
if kwargs.get("temperature") is not None and kwargs.get("temperature") != NOT_GIVEN:
attributes.llm_temperature = kwargs.get("temperature")
if kwargs.get("top_p") is not None:
if kwargs.get("top_p") is not None and kwargs.get("top_p") != NOT_GIVEN:
attributes.llm_top_p = kwargs.get("top_p")
if kwargs.get("user") is not None:
if kwargs.get("user") is not None and kwargs.get("user") != NOT_GIVEN:
attributes.llm_user = kwargs.get("user")
if kwargs.get("functions") is not None:
if kwargs.get("functions") is not None and kwargs.get("functions") != NOT_GIVEN:
for function in kwargs.get("functions"):
tools.append(json.dumps({"type": "function", "function": function}))
if kwargs.get("tools") is not None:
if kwargs.get("tools") is not None and kwargs.get("tools") != NOT_GIVEN:
tools.append(json.dumps(kwargs.get("tools")))
if len(tools) > 0:
attributes.llm_tools = json.dumps(tools)
Expand All @@ -666,7 +667,7 @@ async def traced_method(wrapped, instance, args, kwargs):
try:
# Attempt to call the original method
result = await wrapped(*args, **kwargs)
if kwargs.get("stream") is False or kwargs.get("stream") is None:
if kwargs.get("stream") is False or kwargs.get("stream") is None or kwargs.get("stream") == NOT_GIVEN:
span.set_attribute("llm.model", result.model)
if hasattr(result, "choices") and result.choices is not None:
responses = [
Expand Down Expand Up @@ -695,7 +696,7 @@ async def traced_method(wrapped, instance, args, kwargs):
span.set_attribute("llm.responses", json.dumps(responses))
if (
hasattr(result, "system_fingerprint")
and result.system_fingerprint is not None
and result.system_fingerprint is not None and result.system_fingerprint != NOT_GIVEN
):
span.set_attribute(
"llm.system.fingerprint", result.system_fingerprint
Expand All @@ -722,7 +723,7 @@ async def traced_method(wrapped, instance, args, kwargs):
)

# iterate over kwargs.get("functions") and calculate the prompt tokens
if kwargs.get("functions") is not None:
if kwargs.get("functions") is not None and kwargs.get("functions") != NOT_GIVEN:
for function in kwargs.get("functions"):
prompt_tokens += calculate_prompt_tokens(
json.dumps(function), kwargs.get("model")
Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "2.1.23"
__version__ = "2.1.24"

0 comments on commit 8dcdcce

Please sign in to comment.