Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions src/examples/crewai_example/trip_planner/agents.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ class TravelAgents:
def __init__(self):
self.OpenAIGPT35 = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0.7)
self.OpenAIGPT4 = ChatOpenAI(model_name="gpt-4", temperature=0.7)
self.Ollama = ChatOllama(model="openhermes")
self.Ollama = ChatOllama(model="llama3")
self.Cohere = ChatCohere(model="command-r")
self.Anthropic = ChatAnthropic(model="claude-3-5-sonnet")
self.Anthropic = ChatAnthropic(model="claude-3-5-sonnet-20240620")

def expert_travel_agent(self):
return Agent(
Expand All @@ -28,7 +28,7 @@ def expert_travel_agent(self):
# tools=[tool_1, tool_2],
allow_delegation=False,
verbose=True,
llm=self.OpenAIGPT4,
llm=self.Cohere,
)

def city_selection_expert(self):
Expand All @@ -39,7 +39,7 @@ def city_selection_expert(self):
# tools=[tool_1, tool_2],
allow_delegation=False,
verbose=True,
llm=self.OpenAIGPT4,
llm=self.Cohere,
)

def local_tour_guide(self):
Expand All @@ -50,5 +50,5 @@ def local_tour_guide(self):
# tools=[tool_1, tool_2],
allow_delegation=False,
verbose=True,
llm=self.OpenAIGPT4,
llm=self.Cohere,
)
98 changes: 98 additions & 0 deletions src/examples/litellm_example/basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
from langtrace_python_sdk import with_langtrace_root_span, langtrace
from dotenv import load_dotenv
from litellm import completion, acompletion
import litellm
import asyncio

load_dotenv()


litellm.success_callback = ["langtrace"]
langtrace.init()
litellm.set_verbose = False


@with_langtrace_root_span("Litellm Example OpenAI")
def openAI(streaming=False):
response = completion(
model="gpt-3.5-turbo",
messages=[
{"content": "respond only in Yoda speak.", "role": "system"},
{"content": "Hello, how are you?", "role": "user"},
],
stream=streaming,
stream_options={"include_usage": True},
)
if streaming:
for _ in response:
pass
else:
return response


# @with_langtrace_root_span("Litellm Example Anthropic Completion")
def anthropic(streaming=False):
try:

response = completion(
model="claude-2.1",
messages=[
{"content": "respond only in Yoda speak.", "role": "system"},
{"content": "what is 2 + 2?", "role": "user"},
],
temperature=0.5,
top_p=0.5,
n=1,
stream=streaming,
stream_options={"include_usage": True},
)
# print(response)
if streaming:
for _ in response:
pass
else:
return response
except Exception as e:
print("ERORRRR", e)


# @with_langtrace_root_span("Litellm Example OpenAI Async Streaming")
async def async_anthropic(streaming=False):
response = await acompletion(
model="claude-2.1",
messages=[{"content": "Hello, how are you?", "role": "user"}],
stream=streaming,
stream_options={"include_usage": True},
temperature=0.5,
top_p=0.5,
n=1,
)
if streaming:
async for _ in response:
pass
else:
return response


def cohere(streaming=False):
response = completion(
model="command-r",
messages=[
{"content": "respond only in Yoda speak.", "role": "system"},
{"content": "Hello, how are you?", "role": "user"},
],
stream=streaming,
stream_options={"include_usage": True},
)
if streaming:
for _ in response:
pass
else:
return response


if __name__ == "__main__":
# openAI()
anthropic(streaming=False)
cohere(streaming=True)
# asyncio.run(async_anthropic(streaming=True))
6 changes: 4 additions & 2 deletions src/langtrace_python_sdk/langtrace.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def init(
disable_instrumentations: Optional[DisableInstrumentations] = None,
disable_tracing_for_functions: Optional[InstrumentationMethods] = None,
service_name: Optional[str] = None,
disable_logging = False
disable_logging=False,
):
if disable_logging:
sys.stdout = open(os.devnull, "w")
Expand All @@ -93,7 +93,9 @@ def init(
provider = TracerProvider(resource=resource, sampler=sampler)

remote_write_exporter = (
LangTraceExporter(api_key=api_key, api_host=host, disable_logging=disable_logging)
LangTraceExporter(
api_key=api_key, api_host=host, disable_logging=disable_logging
)
if custom_remote_exporter is None
else custom_remote_exporter
)
Expand Down