Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
4d717b1
Merge branch 'development' into release
karthikscale3 Apr 24, 2024
0233826
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Apr 28, 2024
7f4e951
Merge branch 'development' into release
karthikscale3 Apr 28, 2024
81a6ca0
Merge
karthikscale3 Jun 13, 2024
0c19f77
Merge branch 'development' into release
karthikscale3 Jun 13, 2024
c3a6ccf
remove logs
karthikscale3 Jun 13, 2024
a99cf10
remove requirements
karthikscale3 Jun 13, 2024
1379b27
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jun 17, 2024
dae04e7
Merge branch 'development' into release
karthikscale3 Jun 17, 2024
129e927
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jun 24, 2024
16e67f9
Merge branch 'development' into release
karthikscale3 Jun 24, 2024
e604e93
Bump version
karthikscale3 Jun 24, 2024
7e00473
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jun 24, 2024
6ac71aa
Merge branch 'development' into release
karthikscale3 Jun 24, 2024
c39bf01
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jun 24, 2024
f89e38c
Merge branch 'development' into release
karthikscale3 Jun 24, 2024
a9d3400
DSPy - Bugfixes and update to dspy-ai (#246)
karthikscale3 Jul 19, 2024
e95e743
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jul 19, 2024
9ebbe17
Merge branch 'development' into release
karthikscale3 Jul 19, 2024
e6d8542
chore: add back openai tool choice arg (#245)
darshit-s3 Jul 22, 2024
5af542f
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jul 23, 2024
8b41bf9
Allow DSPy span naming (#249)
karthikscale3 Jul 23, 2024
16b9d46
Merge branch 'development' into release
karthikscale3 Jul 23, 2024
a820d79
Merge branch 'development' into release
karthikscale3 Jul 23, 2024
4fd269e
Bump trace attributes
karthikscale3 Jul 24, 2024
68ee1b5
Bump version
karthikscale3 Jul 24, 2024
3db2ea3
Fix SpanKind
karthikscale3 Jul 24, 2024
c2ed5c2
Merge branch 'main' of github.com:Scale3-Labs/langtrace-python-sdk in…
karthikscale3 Jul 24, 2024
166d5e1
Bump version
karthikscale3 Jul 24, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/examples/dspy_example/math_problems_cot_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from concurrent.futures import ThreadPoolExecutor

# flake8: noqa
from langtrace_python_sdk import langtrace, with_langtrace_root_span
from langtrace_python_sdk import langtrace, with_langtrace_root_span, inject_additional_attributes

langtrace.init()

Expand Down
24 changes: 16 additions & 8 deletions src/examples/inspect_ai_example/basic_eval.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,32 @@
# langtrace.init(write_spans_to_console=True)
import fsspec
from inspect_ai import Task, task
from inspect_ai.dataset import csv_dataset
from inspect_ai.dataset import csv_dataset, Sample
from inspect_ai.scorer import model_graded_qa
from inspect_ai.solver import chain_of_thought, generate, self_critique

from inspect_ai.solver import chain_of_thought, self_critique
from langtrace_python_sdk.extensions.langtrace_filesystem import LangTraceFileSystem

# from langtrace_python_sdk import langtrace


# Manually register the filesystem with fsspec
# Note: This is only necessary because the filesystem is not registered.
fsspec.register_implementation(LangTraceFileSystem.protocol, LangTraceFileSystem)

question = "What is the price?"


def hydrate_with_question(record):
# add context to input
record["input"] = f"Context: {record['input']}\n question: {question}"

return Sample(
input=record["input"],
target=record["target"],
)


@task
def security_guide():
def pricing_question():
return Task(
dataset=csv_dataset("langtracefs://clxc2mxu6000lpc7ntsvcjvp9"),
dataset=csv_dataset("langtracefs://clyythmcs0001145cuvi426zi", hydrate_with_question),
plan=[chain_of_thought(), self_critique()],
scorer=model_graded_qa(),
)
34 changes: 34 additions & 0 deletions src/examples/ollama_example/basic_example_2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from langtrace_python_sdk import langtrace
from openai import OpenAI
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter

service_name = "langtrace-python-ollama"
otlp_endpoint = "http://localhost:4318/v1/traces"
otlp_exporter = OTLPSpanExporter(
endpoint=otlp_endpoint,
headers=(("Content-Type", "application/json"),))
langtrace.init(custom_remote_exporter=otlp_exporter, batch=False)


def chat_with_ollama():
# Use the OpenAI endpoint, not the Ollama API.
base_url = "http://localhost:11434/v1"
client = OpenAI(base_url=base_url, api_key="unused")
messages = [
{
"role": "user",
"content": "Hello, I'm a human.",
},
]
chat_completion = client.chat.completions.create(
model="llama3", messages=messages
)
print(chat_completion.choices[0].message.content)


def main():
chat_with_ollama()


if __name__ == "__main__":
main()
43 changes: 43 additions & 0 deletions src/examples/otlp_example/otlp_basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# Instructions
# 1. Run the OpenTelemetry Collector with the OTLP receiver enabled
# Create otel-config.yaml with the following content:
# receivers:
# otlp:
# protocols:
# grpc:
# endpoint: "0.0.0.0:4317"
# http:
# endpoint: "0.0.0.0:4318"

# exporters:
# logging:
# loglevel: debug

# service:
# pipelines:
# traces:
# receivers: [otlp]
# exporters: [logging]
# docker pull otel/opentelemetry-collector:latest
# docker run --rm -p 4317:4317 -p 4318:4318 -v $(pwd)/otel-config.yaml:/otel-config.yaml otel/opentelemetry-collector --config otel-config.yaml
# 2. Run the following code

from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter

# Set up the tracer provider
trace.set_tracer_provider(TracerProvider())
tracer = trace.get_tracer(__name__)

# Set up the OTLP exporter
otlp_exporter = OTLPSpanExporter(endpoint="http://localhost:4317")

# Set up a span processor and add it to the tracer provider
span_processor = BatchSpanProcessor(otlp_exporter)
trace.get_tracer_provider().add_span_processor(span_processor)

# Create a span
with tracer.start_as_current_span("example-span"):
print("Hello, World!")
8 changes: 4 additions & 4 deletions src/langtrace_python_sdk/instrumentation/groq/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,10 +104,10 @@ def traced_method(wrapped, instance, args, kwargs):

# TODO(Karthik): Gotta figure out how to handle streaming with context
# with tracer.start_as_current_span(APIS["CHAT_COMPLETION"]["METHOD"],
# kind=SpanKind.CLIENT.value) as span:
# kind=SpanKind.CLIENT) as span:
span = tracer.start_span(
APIS["CHAT_COMPLETION"]["METHOD"],
kind=SpanKind.CLIENT.value,
kind=SpanKind.CLIENT,
context=set_span_in_context(trace.get_current_span()),
)
for field, value in attributes.model_dump(by_alias=True).items():
Expand Down Expand Up @@ -333,9 +333,9 @@ async def traced_method(wrapped, instance, args, kwargs):

# TODO(Karthik): Gotta figure out how to handle streaming with context
# with tracer.start_as_current_span(APIS["CHAT_COMPLETION"]["METHOD"],
# kind=SpanKind.CLIENT.value) as span:
# kind=SpanKind.CLIENT) as span:
span = tracer.start_span(
APIS["CHAT_COMPLETION"]["METHOD"], kind=SpanKind.CLIENT.value
APIS["CHAT_COMPLETION"]["METHOD"], kind=SpanKind.CLIENT
)
for field, value in attributes.model_dump(by_alias=True).items():
set_span_attribute(span, field, value)
Expand Down
14 changes: 7 additions & 7 deletions src/langtrace_python_sdk/instrumentation/openai/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def traced_method(wrapped, instance, args, kwargs):

with tracer.start_as_current_span(
APIS["IMAGES_GENERATION"]["METHOD"],
kind=SpanKind.CLIENT.value,
kind=SpanKind.CLIENT,
context=set_span_in_context(trace.get_current_span()),
) as span:
set_span_attributes(span, attributes)
Expand Down Expand Up @@ -128,7 +128,7 @@ async def traced_method(wrapped, instance, args, kwargs):

with tracer.start_as_current_span(
APIS["IMAGES_GENERATION"]["METHOD"],
kind=SpanKind.CLIENT.value,
kind=SpanKind.CLIENT,
context=set_span_in_context(trace.get_current_span()),
) as span:
set_span_attributes(span, attributes)
Expand Down Expand Up @@ -193,7 +193,7 @@ def traced_method(wrapped, instance, args, kwargs):

with tracer.start_as_current_span(
APIS["IMAGES_EDIT"]["METHOD"],
kind=SpanKind.CLIENT.value,
kind=SpanKind.CLIENT,
context=set_span_in_context(trace.get_current_span()),
) as span:
set_span_attributes(span, attributes)
Expand Down Expand Up @@ -283,7 +283,7 @@ def traced_method(wrapped, instance, args, kwargs):

span = tracer.start_span(
APIS["CHAT_COMPLETION"]["METHOD"],
kind=SpanKind.CLIENT.value,
kind=SpanKind.CLIENT,
context=set_span_in_context(trace.get_current_span()),
)
_set_input_attributes(span, kwargs, attributes)
Expand Down Expand Up @@ -377,7 +377,7 @@ async def traced_method(wrapped, instance, args, kwargs):

span = tracer.start_span(
APIS["CHAT_COMPLETION"]["METHOD"],
kind=SpanKind.CLIENT.value,
kind=SpanKind.CLIENT,
context=set_span_in_context(trace.get_current_span()),
)
_set_input_attributes(span, kwargs, attributes)
Expand Down Expand Up @@ -456,7 +456,7 @@ def traced_method(wrapped, instance, args, kwargs):

with tracer.start_as_current_span(
APIS["EMBEDDINGS_CREATE"]["METHOD"],
kind=SpanKind.CLIENT.value,
kind=SpanKind.CLIENT,
context=set_span_in_context(trace.get_current_span()),
) as span:

Expand Down Expand Up @@ -513,7 +513,7 @@ async def traced_method(wrapped, instance, args, kwargs):

with tracer.start_as_current_span(
APIS["EMBEDDINGS_CREATE"]["METHOD"],
kind=SpanKind.CLIENT.value,
kind=SpanKind.CLIENT,
context=set_span_in_context(trace.get_current_span()),
) as span:

Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "2.2.4"
__version__ = "2.2.5"