Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions src/examples/azureopenai_example/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from examples.azureopenai_example.completion import chat_completion
from langtrace_python_sdk import with_langtrace_root_span, langtrace

langtrace.init()

class AzureOpenAIRunner:
@with_langtrace_root_span("AzureOpenAI")
def run(self):
chat_completion()
22 changes: 22 additions & 0 deletions src/examples/azureopenai_example/completion.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
from langchain_openai import AzureChatOpenAI

from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span

model = AzureChatOpenAI(
azure_endpoint=os.environ['AZURE_OPENAI_ENDPOINT'],
azure_deployment=os.environ['AZURE_OPENAI_DEPLOYMENT_NAME'],
openai_api_version=os.environ['AZURE_OPENAI_API_VERSION'],
)

@with_langtrace_root_span()
def chat_completion():
messages = [
(
"system",
"You are a helpful assistant that translates English to French. Translate the user sentence.",
),
("human", "I love programming."),
]
result = model.invoke(messages)
print(result)
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/utils/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def get_llm_request_attributes(kwargs, prompts=None, model=None, operation_name=
tools = kwargs.get("tools", None)
return {
SpanAttributes.LLM_OPERATION_NAME: operation_name,
SpanAttributes.LLM_REQUEST_MODEL: model or kwargs.get("model"),
SpanAttributes.LLM_REQUEST_MODEL: model or kwargs.get("model") or "gpt-3.5-turbo",
SpanAttributes.LLM_IS_STREAMING: kwargs.get("stream"),
SpanAttributes.LLM_REQUEST_TEMPERATURE: kwargs.get("temperature"),
SpanAttributes.LLM_TOP_K: top_k,
Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "2.3.5"
__version__ = "2.3.6"
9 changes: 8 additions & 1 deletion src/run_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,14 @@

ENABLED_EXAMPLES = {
"anthropic": False,
"azureopenai": True,
"chroma": False,
"cohere": False,
"fastapi": False,
"langchain": False,
"llamaindex": False,
"hiveagent": False,
"openai": True,
"openai": False,
"perplexity": False,
"pinecone": False,
"qdrant": False,
Expand Down Expand Up @@ -110,3 +111,9 @@

print(Fore.BLUE + "Running Mistral example" + Fore.RESET)
MistralRunner().run()

if ENABLED_EXAMPLES["azureopenai"]:
from examples.azureopenai_example import AzureOpenAIRunner

print(Fore.BLUE + "Running Azure OpenAI example" + Fore.RESET)
AzureOpenAIRunner().run()