Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 73 additions & 0 deletions src/examples/openai_example/chat_completion_tool_choice.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
# Example taken from https://platform.openai.com/docs/guides/function-calling
import json

from dotenv import find_dotenv, load_dotenv
from openai import OpenAI

from langtrace_python_sdk import langtrace

client = OpenAI()

_ = load_dotenv(find_dotenv())

langtrace.init(
write_spans_to_console=True,
)


# Example dummy function hard coded to return the same weather
# In production, this could be your backend API or an external API
def get_current_weather(location, unit="fahrenheit"):
"""Get the current weather in a given location"""
if "tokyo" in location.lower():
return json.dumps({"location": "Tokyo", "temperature": "10", "unit": unit})
elif "san francisco" in location.lower():
return json.dumps(
{"location": "San Francisco", "temperature": "72", "unit": unit}
)
elif "paris" in location.lower():
return json.dumps({"location": "Paris", "temperature": "22", "unit": unit})
else:
return json.dumps({"location": location, "temperature": "unknown"})


def run_conversation():
# Step 1: send the conversation and available functions to the model
messages = [
{
"role": "user",
"content": "What's the weather like in San Francisco, Tokyo, and Paris?",
}
]
tools = [
{
"type": "function",
"function": {
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
},
}
]
response = client.chat.completions.create(
model="gpt-4o",
messages=messages,
tools=tools,
tool_choice="required", # auto is default, but we'll be explicit
)
response_message = response.choices[0].message
tool_calls = response_message.tool_calls
print(tool_calls)


print(run_conversation())
4 changes: 3 additions & 1 deletion src/langtrace_python_sdk/instrumentation/anthropic/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@ def traced_method(wrapped, instance, args, kwargs):
prompts = kwargs.get("messages", [])
system = kwargs.get("system")
if system:
prompts = [{"role": "system", "content": system}] + kwargs.get("messages", [])
prompts = [{"role": "system", "content": system}] + kwargs.get(
"messages", []
)

span_attributes = {
**get_langtrace_attributes(version, service_provider),
Expand Down
5 changes: 4 additions & 1 deletion src/langtrace_python_sdk/instrumentation/gemini/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,10 @@ def get_llm_model(instance):

def serialize_prompts(args, kwargs, instance):
prompts = []
if hasattr(instance, "_system_instruction") and instance._system_instruction is not None:
if (
hasattr(instance, "_system_instruction")
and instance._system_instruction is not None
):
system_prompt = {
"role": "system",
"content": instance._system_instruction.__dict__["_pb"].parts[0].text,
Expand Down
1 change: 1 addition & 0 deletions src/langtrace_python_sdk/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ def get_llm_request_attributes(kwargs, prompts=None, model=None):
SpanAttributes.LLM_FREQUENCY_PENALTY: kwargs.get("frequency_penalty"),
SpanAttributes.LLM_REQUEST_SEED: kwargs.get("seed"),
SpanAttributes.LLM_TOOLS: json.dumps(tools) if tools else None,
SpanAttributes.LLM_TOOL_CHOICE: kwargs.get("tool_choice"),
SpanAttributes.LLM_REQUEST_LOGPROPS: kwargs.get("logprobs"),
SpanAttributes.LLM_REQUEST_LOGITBIAS: kwargs.get("logit_bias"),
SpanAttributes.LLM_REQUEST_TOP_LOGPROPS: kwargs.get("top_logprobs"),
Expand Down