From a5224f0aab3d72d3dbe1de5e120b686fc2c42920 Mon Sep 17 00:00:00 2001 From: Karthik Kalyanaraman Date: Wed, 24 Jul 2024 20:30:24 -0700 Subject: [PATCH 1/3] ENV var bugfixes, send user feedback fixes --- .github/workflows/release.yml | 57 +++++++++++++++--- pyproject.toml | 2 +- .../math_problems_cot_parallel.py | 22 +++---- src/examples/inspect_ai_example/basic_eval.py | 26 +++++--- .../openai_example/send_user_feedback.py | 40 +++++++++++++ src/examples/otlp_example/otlp_basic.py | 43 ++++++++++++++ .../otlp_example/otlp_with_langtrace.py | 59 +++++++++++++++++++ src/examples/routellm_example/basic.py | 41 +++++++++++++ .../extensions/langtrace_filesystem.py | 32 ++++++++-- .../instrumentation/cohere/patch.py | 4 +- .../instrumentation/crewai/instrumentation.py | 33 ++++++----- .../instrumentation/dspy/patch.py | 24 +++++++- .../instrumentation/groq/patch.py | 8 +-- .../instrumentation/openai/patch.py | 24 ++++---- src/langtrace_python_sdk/utils/llm.py | 4 +- .../utils/with_root_span.py | 18 +++++- src/langtrace_python_sdk/version.py | 2 +- 17 files changed, 364 insertions(+), 75 deletions(-) create mode 100644 src/examples/openai_example/send_user_feedback.py create mode 100644 src/examples/otlp_example/otlp_basic.py create mode 100644 src/examples/otlp_example/otlp_with_langtrace.py create mode 100644 src/examples/routellm_example/basic.py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 235bbe88..7204fe4b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,17 +21,17 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: "3.x" - name: Install hatch - run: | + run: | pip install hatch - + - name: Get Version id: version - run: | + run: | echo "version=$(hatch version)" >> $GITHUB_OUTPUT - + publish: runs-on: ubuntu-latest needs: @@ -54,17 +54,58 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: "3.x" - name: Install hatch - run: | + run: | pip install hatch - name: Build SDK - run: | + run: | hatch build - name: Publish Python 🐍 distributions 📦 to PyPI uses: pypa/gh-action-pypi-publish@master with: password: ${{ secrets.PYPI_TOKEN }} + + post-release: + name: Post Release Actions + + runs-on: ubuntu-latest + if: ${{ always() }} + needs: + - generate-version + - publish + + steps: + - name: Checkout main branch + uses: actions/checkout@v4.1.7 + with: + ref: main + + - name: Slack - Success Message + uses: DSdatsme/slack-github-action@env_support + if: ${{ success() && needs.publish.result == 'success' }} + with: + channel-id: ${{ vars.SLACK_CHANNEL_ID }} + payload-file-path: ./.github/resources/slack-payloads/slack-message-template.json + env: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + RELEASETAG: ${{ needs.generate-version.outputs.new_version }} + PRE_TEXT_MESSAGE: "Workflow Passed! :successkid:" + FALLBACK_MESSAGE: "Workflow Passed!" + COLOR: "good" + + - name: Slack - Failure Message + uses: DSdatsme/slack-github-action@env_support + if: ${{ failure() || needs.publish.result != 'success' }} + with: + channel-id: ${{ vars.SLACK_CHANNEL_ID }} + payload-file-path: ./.github/resources/slack-payloads/slack-message-template.json + env: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + RELEASETAG: ${{ needs.generate-version.outputs.new_version }} + PRE_TEXT_MESSAGE: " Workflow Failed! :x:" + FALLBACK_MESSAGE: "Workflow Failed!" + COLOR: "danger" diff --git a/pyproject.toml b/pyproject.toml index d833506f..f5ea9d76 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ classifiers=[ "Operating System :: OS Independent", ] dependencies = [ - 'trace-attributes>=6.0.0,<7.0.0', + 'trace-attributes==7.0.0', 'opentelemetry-api>=1.25.0', 'opentelemetry-sdk>=1.25.0', 'opentelemetry-instrumentation>=0.46b0', diff --git a/src/examples/dspy_example/math_problems_cot_parallel.py b/src/examples/dspy_example/math_problems_cot_parallel.py index 8c5fabf7..690683e8 100644 --- a/src/examples/dspy_example/math_problems_cot_parallel.py +++ b/src/examples/dspy_example/math_problems_cot_parallel.py @@ -1,11 +1,11 @@ +import contextvars import dspy from dspy.datasets.gsm8k import GSM8K, gsm8k_metric from dspy.teleprompt import BootstrapFewShot from concurrent.futures import ThreadPoolExecutor -from opentelemetry.context import get_current, attach, detach # flake8: noqa -from langtrace_python_sdk import langtrace, with_langtrace_root_span +from langtrace_python_sdk import langtrace, with_langtrace_root_span, inject_additional_attributes langtrace.init() @@ -22,7 +22,8 @@ def __init__(self): self.prog = dspy.ChainOfThought("question -> answer") def forward(self, question): - return self.prog(question=question) + result = inject_additional_attributes(lambda: self.prog(question=question), {'langtrace.span.name': 'MathProblemsCotParallel'}) + return result @with_langtrace_root_span(name="parallel_example") def example(): @@ -34,21 +35,12 @@ def example(): optimized_cot = teleprompter.compile(CoT(), trainset=gsm8k_trainset) questions = [ - "What is the cosine of 0?", - "What is the tangent of 0?", + "What is the sine of 0?", + "What is the tangent of 100?", ] - current_context = get_current() - - def run_with_context(context, func, *args, **kwargs): - token = attach(context) - try: - return func(*args, **kwargs) - finally: - detach(token) - with ThreadPoolExecutor(max_workers=2) as executor: - futures = [executor.submit(run_with_context, current_context, optimized_cot, question=q) for q in questions] + futures = [executor.submit(contextvars.copy_context().run, optimized_cot, question=q) for q in questions] for future in futures: ans = future.result() diff --git a/src/examples/inspect_ai_example/basic_eval.py b/src/examples/inspect_ai_example/basic_eval.py index be509111..6cc9e7ab 100644 --- a/src/examples/inspect_ai_example/basic_eval.py +++ b/src/examples/inspect_ai_example/basic_eval.py @@ -1,24 +1,34 @@ -# langtrace.init(write_spans_to_console=True) import fsspec +from dotenv import find_dotenv, load_dotenv from inspect_ai import Task, task -from inspect_ai.dataset import csv_dataset +from inspect_ai.dataset import csv_dataset, Sample from inspect_ai.scorer import model_graded_qa -from inspect_ai.solver import chain_of_thought, generate, self_critique - +from inspect_ai.solver import chain_of_thought, self_critique from langtrace_python_sdk.extensions.langtrace_filesystem import LangTraceFileSystem -# from langtrace_python_sdk import langtrace - +_ = load_dotenv(find_dotenv()) # Manually register the filesystem with fsspec # Note: This is only necessary because the filesystem is not registered. fsspec.register_implementation(LangTraceFileSystem.protocol, LangTraceFileSystem) +question = "What is the price?" + + +def hydrate_with_question(record): + # add context to input + record["input"] = f"Context: {record['input']}\n question: {question}" + + return Sample( + input=record["input"], + target=record["target"], + ) + @task -def security_guide(): +def basic_eval(): return Task( - dataset=csv_dataset("langtracefs://clxc2mxu6000lpc7ntsvcjvp9"), + dataset=csv_dataset("langtracefs://clz0p4i1t000fwv0xjtlvkxyx"), plan=[chain_of_thought(), self_critique()], scorer=model_graded_qa(), ) diff --git a/src/examples/openai_example/send_user_feedback.py b/src/examples/openai_example/send_user_feedback.py new file mode 100644 index 00000000..a29198b8 --- /dev/null +++ b/src/examples/openai_example/send_user_feedback.py @@ -0,0 +1,40 @@ +from dotenv import find_dotenv, load_dotenv +from openai import OpenAI +from langtrace_python_sdk import langtrace, with_langtrace_root_span, SendUserFeedback + +_ = load_dotenv(find_dotenv()) + +# Initialize Langtrace SDK +langtrace.init() +client = OpenAI() + + +def api(span_id, trace_id): + response = client.chat.completions.create( + model="gpt-4o-mini", + messages=[ + {"role": "user", "content": "What is the best place to live in the US?"}, + ], + stream=False, + ) + + # Collect user feedback and send it to Langtrace + user_score = 1 # Example user score + user_id = 'user_1234' # Example user ID + data = { + "userScore": user_score, + "userId": user_id, + "spanId": span_id, + "traceId": trace_id + } + SendUserFeedback().evaluate(data=data) + + # Return the response + return response.choices[0].message.content + + +# wrap the API call with the Langtrace root span +wrapped_api = with_langtrace_root_span()(api) + +# Call the wrapped API +wrapped_api() diff --git a/src/examples/otlp_example/otlp_basic.py b/src/examples/otlp_example/otlp_basic.py new file mode 100644 index 00000000..48da9129 --- /dev/null +++ b/src/examples/otlp_example/otlp_basic.py @@ -0,0 +1,43 @@ +# Instructions +# 1. Run the OpenTelemetry Collector with the OTLP receiver enabled +# Create otel-config.yaml with the following content: +# receivers: +# otlp: +# protocols: +# grpc: +# endpoint: "0.0.0.0:4317" +# http: +# endpoint: "0.0.0.0:4318" + +# exporters: +# logging: +# loglevel: debug + +# service: +# pipelines: +# traces: +# receivers: [otlp] +# exporters: [logging] +# docker pull otel/opentelemetry-collector:latest +# docker run --rm -p 4317:4317 -p 4318:4318 -v $(pwd)/otel-config.yaml:/otel-config.yaml otel/opentelemetry-collector --config otel-config.yaml +# 2. Run the following code + +from opentelemetry import trace +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter + +# Set up the tracer provider +trace.set_tracer_provider(TracerProvider()) +tracer = trace.get_tracer(__name__) + +# Set up the OTLP exporter +otlp_exporter = OTLPSpanExporter(endpoint="http://localhost:4317") + +# Set up a span processor and add it to the tracer provider +span_processor = BatchSpanProcessor(otlp_exporter) +trace.get_tracer_provider().add_span_processor(span_processor) + +# Create a span +with tracer.start_as_current_span("example-span"): + print("Hello, World!") diff --git a/src/examples/otlp_example/otlp_with_langtrace.py b/src/examples/otlp_example/otlp_with_langtrace.py new file mode 100644 index 00000000..52851dfa --- /dev/null +++ b/src/examples/otlp_example/otlp_with_langtrace.py @@ -0,0 +1,59 @@ +# Instructions +# 1. Run the OpenTelemetry Collector with the OTLP receiver enabled +# Create otel-config.yaml with the following content: +# receivers: +# otlp: +# protocols: +# grpc: +# endpoint: "0.0.0.0:4317" +# http: +# endpoint: "0.0.0.0:4318" + +# exporters: +# logging: +# loglevel: debug + +# service: +# pipelines: +# traces: +# receivers: [otlp] +# exporters: [logging] +# docker pull otel/opentelemetry-collector:latest +# docker run --rm -p 4317:4317 -p 4318:4318 -v $(pwd)/otel-config.yaml:/otel-config.yaml otel/opentelemetry-collector --config otel-config.yaml +# 2. Run the following code + +from langtrace_python_sdk import langtrace +from openai import OpenAI +from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter + + +# Configure the OTLP exporter to use the correct endpoint and API key +otlp_endpoint = "http://localhost:4318/v1/traces" +otlp_exporter = OTLPSpanExporter( + endpoint=otlp_endpoint, + headers=(("Content-Type", "application/json"),)) +langtrace.init(custom_remote_exporter=otlp_exporter, batch=False) + + +def chat_with_openai(): + client = OpenAI() + messages = [ + { + "role": "user", + "content": "Hello, I'm a human.", + }, + ] + chat_completion = client.chat.completions.create( + messages=messages, + stream=False, + model="gpt-3.5-turbo", + ) + print(chat_completion.choices[0].message.content) + + +def main(): + chat_with_openai() + + +if __name__ == "__main__": + main() diff --git a/src/examples/routellm_example/basic.py b/src/examples/routellm_example/basic.py new file mode 100644 index 00000000..9f908fb0 --- /dev/null +++ b/src/examples/routellm_example/basic.py @@ -0,0 +1,41 @@ +import sys + +sys.path.insert(0, "/Users/karthikkalyanaraman/work/langtrace/langtrace-python-sdk/src") + +from langtrace_python_sdk import langtrace +from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span +from routellm.controller import Controller +from dotenv import load_dotenv + +load_dotenv() + +langtrace.init() + +# litellm.set_verbose=True +client = Controller( + routers=["mf"], + strong_model="claude-3-opus-20240229", + weak_model="claude-3-opus-20240229", +) + + +@with_langtrace_root_span("Routellm") +def Routellm(prompt): + try: + + response = client.chat.completions.create( + model="router-mf-0.11593", messages=[{"role": "user", "content": prompt}] + ) + + for chunk in response: + if hasattr(chunk, "choices"): + print(chunk.choices[0].delta.content or "", end="") + else: + print(chunk) + + except Exception as e: + print(f"An error occurred: {e}") + + +Routellm("what is the square root of 12182382932.99") +Routellm("Write me a short story") diff --git a/src/langtrace_python_sdk/extensions/langtrace_filesystem.py b/src/langtrace_python_sdk/extensions/langtrace_filesystem.py index 6506be40..c89f75aa 100644 --- a/src/langtrace_python_sdk/extensions/langtrace_filesystem.py +++ b/src/langtrace_python_sdk/extensions/langtrace_filesystem.py @@ -27,13 +27,25 @@ def __new__(cls, value): class LangTraceFile(io.BytesIO): - _host: str = os.environ.get("LANGTRACE_API_HOST", None) or LANGTRACE_REMOTE_URL def __init__(self, fs: "LangTraceFileSystem", path: str, mode: OpenMode): super().__init__() self.fs = fs self.path = path self.mode = mode + self._host: str = os.environ.get("LANGTRACE_API_HOST", LANGTRACE_REMOTE_URL) + self._api_key: str = os.environ.get("LANGTRACE_API_KEY", None) + if self._host.endswith("/api/trace"): + self._host = self._host.replace("/api/trace", "") + + if self._api_key is None: + print(Fore.RED) + print( + f"Missing Langtrace API key, proceed to {self._host} to create one" + ) + print("Set the API key as an environment variable LANGTRACE_API_KEY") + print(Fore.RESET) + return def close(self) -> None: if not self.closed: @@ -71,7 +83,7 @@ def upload_to_server(self, file_data: bytes) -> None: data=json.dumps(data), headers={ "Content-Type": "application/json", - "x-api-key": os.environ.get("LANGTRACE_API_KEY"), + "x-api-key": self._api_key, }, timeout=20, ) @@ -82,7 +94,6 @@ def upload_to_server(self, file_data: bytes) -> None: class LangTraceFileSystem(AbstractFileSystem): - _host: str = os.environ.get("LANGTRACE_API_HOST", None) or LANGTRACE_REMOTE_URL protocol = "langtracefs" sep = "/" @@ -90,6 +101,19 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.files = {} self.dirs = set() + self._host: str = os.environ.get("LANGTRACE_API_HOST", LANGTRACE_REMOTE_URL) + self._api_key: str = os.environ.get("LANGTRACE_API_KEY", None) + if self._host.endswith("/api/trace"): + self._host = self._host.replace("/api/trace", "") + + if self._api_key is None: + print(Fore.RED) + print( + f"Missing Langtrace API key, proceed to {self._host} to create one" + ) + print("Set the API key as an environment variable LANGTRACE_API_KEY") + print(Fore.RESET) + return def open( self, @@ -118,7 +142,7 @@ def fetch_file_from_api(self, dataset_id: str) -> bytes: url=f"{self._host}/api/dataset/download?id={dataset_id}", headers={ "Content-Type": "application/json", - "x-api-key": os.environ.get("LANGTRACE_API_KEY"), + "x-api-key": self._api_key, }, timeout=20, ) diff --git a/src/langtrace_python_sdk/instrumentation/cohere/patch.py b/src/langtrace_python_sdk/instrumentation/cohere/patch.py index 7fe72376..e3e26dc1 100644 --- a/src/langtrace_python_sdk/instrumentation/cohere/patch.py +++ b/src/langtrace_python_sdk/instrumentation/cohere/patch.py @@ -44,7 +44,7 @@ def traced_method(wrapped, instance, args, kwargs): span_attributes = { **get_langtrace_attributes(version, service_provider), - **get_llm_request_attributes(kwargs), + **get_llm_request_attributes(kwargs, operation_name="rerank"), **get_llm_url(instance), SpanAttributes.LLM_REQUEST_MODEL: kwargs.get("model") or "command-r-plus", SpanAttributes.LLM_URL: APIS["RERANK"]["URL"], @@ -121,7 +121,7 @@ def traced_method(wrapped, instance, args, kwargs): span_attributes = { **get_langtrace_attributes(version, service_provider), - **get_llm_request_attributes(kwargs), + **get_llm_request_attributes(kwargs, operation_name="embed"), **get_llm_url(instance), SpanAttributes.LLM_URL: APIS["EMBED"]["URL"], SpanAttributes.LLM_PATH: APIS["EMBED"]["ENDPOINT"], diff --git a/src/langtrace_python_sdk/instrumentation/crewai/instrumentation.py b/src/langtrace_python_sdk/instrumentation/crewai/instrumentation.py index e648b671..78cb1f87 100644 --- a/src/langtrace_python_sdk/instrumentation/crewai/instrumentation.py +++ b/src/langtrace_python_sdk/instrumentation/crewai/instrumentation.py @@ -33,21 +33,24 @@ def _instrument(self, **kwargs): tracer_provider = kwargs.get("tracer_provider") tracer = get_tracer(__name__, "", tracer_provider) version = v("crewai") - _W( - "crewai.crew", - "Crew.kickoff", - patch_crew("Crew.kickoff", version, tracer), - ) - _W( - "crewai.agent", - "Agent.execute_task", - patch_crew("Agent.execute_task", version, tracer), - ) - _W( - "crewai.task", - "Task.execute", - patch_crew("Task.execute", version, tracer), - ) + try: + _W( + "crewai.crew", + "Crew.kickoff", + patch_crew("Crew.kickoff", version, tracer), + ) + _W( + "crewai.agent", + "Agent.execute_task", + patch_crew("Agent.execute_task", version, tracer), + ) + _W( + "crewai.task", + "Task.execute", + patch_crew("Task.execute", version, tracer), + ) + except Exception as e: + pass def _uninstrument(self, **kwargs): pass diff --git a/src/langtrace_python_sdk/instrumentation/dspy/patch.py b/src/langtrace_python_sdk/instrumentation/dspy/patch.py index 181b276d..4b57fe16 100644 --- a/src/langtrace_python_sdk/instrumentation/dspy/patch.py +++ b/src/langtrace_python_sdk/instrumentation/dspy/patch.py @@ -61,8 +61,14 @@ def traced_method(wrapped, instance, args, kwargs): if config and len(config) > 0: span_attributes["dspy.optimizer.config"] = json.dumps(config) + # passed operation name + opname = operation_name + if extra_attributes is not None and "langtrace.span.name" in extra_attributes: + # append the operation name to the span name + opname = f"{operation_name}-{extra_attributes['langtrace.span.name']}" + attributes = FrameworkSpanAttributes(**span_attributes) - with tracer.start_as_current_span(operation_name, kind=SpanKind.CLIENT) as span: + with tracer.start_as_current_span(opname, kind=SpanKind.CLIENT) as span: _set_input_attributes(span, kwargs, attributes) try: @@ -100,6 +106,12 @@ def traced_method(wrapped, instance, args, kwargs): **(extra_attributes if extra_attributes is not None else {}), } + # passed operation name + opname = operation_name + if extra_attributes is not None and "langtrace.span.name" in extra_attributes: + # append the operation name to the span name + opname = f"{operation_name}-{extra_attributes['langtrace.span.name']}" + if instance.__class__.__name__: span_attributes["dspy.signature.name"] = instance.__class__.__name__ span_attributes["dspy.signature"] = str(instance) @@ -108,7 +120,7 @@ def traced_method(wrapped, instance, args, kwargs): span_attributes["dspy.signature.args"] = str(kwargs) attributes = FrameworkSpanAttributes(**span_attributes) - with tracer.start_as_current_span(operation_name, kind=SpanKind.CLIENT) as span: + with tracer.start_as_current_span(opname, kind=SpanKind.CLIENT) as span: _set_input_attributes(span, kwargs, attributes) try: @@ -147,6 +159,12 @@ def traced_method(wrapped, instance, args, kwargs): **(extra_attributes if extra_attributes is not None else {}), } + # passed operation name + opname = operation_name + if extra_attributes is not None and "langtrace.span.name" in extra_attributes: + # append the operation name to the span name + opname = f"{operation_name}-{extra_attributes['langtrace.span.name']}" + if hasattr(instance, "devset"): span_attributes["dspy.evaluate.devset"] = str(getattr(instance, "devset")) if hasattr(instance, "trainset"): @@ -175,7 +193,7 @@ def traced_method(wrapped, instance, args, kwargs): span_attributes["dspy.evaluate.args"] = str(args) attributes = FrameworkSpanAttributes(**span_attributes) - with tracer.start_as_current_span(operation_name, kind=SpanKind.CLIENT) as span: + with tracer.start_as_current_span(opname, kind=SpanKind.CLIENT) as span: _set_input_attributes(span, kwargs, attributes) try: diff --git a/src/langtrace_python_sdk/instrumentation/groq/patch.py b/src/langtrace_python_sdk/instrumentation/groq/patch.py index 82f345e6..9e19e51e 100644 --- a/src/langtrace_python_sdk/instrumentation/groq/patch.py +++ b/src/langtrace_python_sdk/instrumentation/groq/patch.py @@ -104,10 +104,10 @@ def traced_method(wrapped, instance, args, kwargs): # TODO(Karthik): Gotta figure out how to handle streaming with context # with tracer.start_as_current_span(APIS["CHAT_COMPLETION"]["METHOD"], - # kind=SpanKind.CLIENT.value) as span: + # kind=SpanKind.CLIENT) as span: span = tracer.start_span( APIS["CHAT_COMPLETION"]["METHOD"], - kind=SpanKind.CLIENT.value, + kind=SpanKind.CLIENT, context=set_span_in_context(trace.get_current_span()), ) for field, value in attributes.model_dump(by_alias=True).items(): @@ -333,9 +333,9 @@ async def traced_method(wrapped, instance, args, kwargs): # TODO(Karthik): Gotta figure out how to handle streaming with context # with tracer.start_as_current_span(APIS["CHAT_COMPLETION"]["METHOD"], - # kind=SpanKind.CLIENT.value) as span: + # kind=SpanKind.CLIENT) as span: span = tracer.start_span( - APIS["CHAT_COMPLETION"]["METHOD"], kind=SpanKind.CLIENT.value + APIS["CHAT_COMPLETION"]["METHOD"], kind=SpanKind.CLIENT ) for field, value in attributes.model_dump(by_alias=True).items(): set_span_attribute(span, field, value) diff --git a/src/langtrace_python_sdk/instrumentation/openai/patch.py b/src/langtrace_python_sdk/instrumentation/openai/patch.py index e72e0441..a70ca630 100644 --- a/src/langtrace_python_sdk/instrumentation/openai/patch.py +++ b/src/langtrace_python_sdk/instrumentation/openai/patch.py @@ -55,7 +55,7 @@ def traced_method(wrapped, instance, args, kwargs): service_provider = SERVICE_PROVIDERS["OPENAI"] span_attributes = { **get_langtrace_attributes(version, service_provider, vendor_type="llm"), - **get_llm_request_attributes(kwargs), + **get_llm_request_attributes(kwargs, operation_name="images_generate"), **get_llm_url(instance), SpanAttributes.LLM_PATH: APIS["IMAGES_GENERATION"]["ENDPOINT"], **get_extra_attributes(), @@ -65,7 +65,7 @@ def traced_method(wrapped, instance, args, kwargs): with tracer.start_as_current_span( APIS["IMAGES_GENERATION"]["METHOD"], - kind=SpanKind.CLIENT.value, + kind=SpanKind.CLIENT, context=set_span_in_context(trace.get_current_span()), ) as span: set_span_attributes(span, attributes) @@ -118,7 +118,7 @@ async def traced_method(wrapped, instance, args, kwargs): span_attributes = { **get_langtrace_attributes(version, service_provider, vendor_type="llm"), - **get_llm_request_attributes(kwargs), + **get_llm_request_attributes(kwargs, operation_name="images_generate"), **get_llm_url(instance), SpanAttributes.LLM_PATH: APIS["IMAGES_GENERATION"]["ENDPOINT"], **get_extra_attributes(), @@ -128,7 +128,7 @@ async def traced_method(wrapped, instance, args, kwargs): with tracer.start_as_current_span( APIS["IMAGES_GENERATION"]["METHOD"], - kind=SpanKind.CLIENT.value, + kind=SpanKind.CLIENT, context=set_span_in_context(trace.get_current_span()), ) as span: set_span_attributes(span, attributes) @@ -181,7 +181,7 @@ def traced_method(wrapped, instance, args, kwargs): span_attributes = { **get_langtrace_attributes(version, service_provider, vendor_type="llm"), - **get_llm_request_attributes(kwargs), + **get_llm_request_attributes(kwargs, operation_name="images_edit"), **get_llm_url(instance), SpanAttributes.LLM_PATH: APIS["IMAGES_EDIT"]["ENDPOINT"], SpanAttributes.LLM_RESPONSE_FORMAT: kwargs.get("response_format"), @@ -193,7 +193,7 @@ def traced_method(wrapped, instance, args, kwargs): with tracer.start_as_current_span( APIS["IMAGES_EDIT"]["METHOD"], - kind=SpanKind.CLIENT.value, + kind=SpanKind.CLIENT, context=set_span_in_context(trace.get_current_span()), ) as span: set_span_attributes(span, attributes) @@ -283,7 +283,7 @@ def traced_method(wrapped, instance, args, kwargs): span = tracer.start_span( APIS["CHAT_COMPLETION"]["METHOD"], - kind=SpanKind.CLIENT.value, + kind=SpanKind.CLIENT, context=set_span_in_context(trace.get_current_span()), ) _set_input_attributes(span, kwargs, attributes) @@ -377,7 +377,7 @@ async def traced_method(wrapped, instance, args, kwargs): span = tracer.start_span( APIS["CHAT_COMPLETION"]["METHOD"], - kind=SpanKind.CLIENT.value, + kind=SpanKind.CLIENT, context=set_span_in_context(trace.get_current_span()), ) _set_input_attributes(span, kwargs, attributes) @@ -432,7 +432,7 @@ def traced_method(wrapped, instance, args, kwargs): span_attributes = { **get_langtrace_attributes(version, service_provider, vendor_type="llm"), - **get_llm_request_attributes(kwargs), + **get_llm_request_attributes(kwargs, operation_name="embed"), **get_llm_url(instance), SpanAttributes.LLM_PATH: APIS["EMBEDDINGS_CREATE"]["ENDPOINT"], SpanAttributes.LLM_REQUEST_DIMENSIONS: kwargs.get("dimensions"), @@ -456,7 +456,7 @@ def traced_method(wrapped, instance, args, kwargs): with tracer.start_as_current_span( APIS["EMBEDDINGS_CREATE"]["METHOD"], - kind=SpanKind.CLIENT.value, + kind=SpanKind.CLIENT, context=set_span_in_context(trace.get_current_span()), ) as span: @@ -490,7 +490,7 @@ async def traced_method(wrapped, instance, args, kwargs): span_attributes = { **get_langtrace_attributes(version, service_provider, vendor_type="llm"), - **get_llm_request_attributes(kwargs), + **get_llm_request_attributes(kwargs, operation_name="embed"), SpanAttributes.LLM_PATH: APIS["EMBEDDINGS_CREATE"]["ENDPOINT"], SpanAttributes.LLM_REQUEST_DIMENSIONS: kwargs.get("dimensions"), **get_extra_attributes(), @@ -513,7 +513,7 @@ async def traced_method(wrapped, instance, args, kwargs): with tracer.start_as_current_span( APIS["EMBEDDINGS_CREATE"]["METHOD"], - kind=SpanKind.CLIENT.value, + kind=SpanKind.CLIENT, context=set_span_in_context(trace.get_current_span()), ) as span: diff --git a/src/langtrace_python_sdk/utils/llm.py b/src/langtrace_python_sdk/utils/llm.py index 965f2324..bb00d18f 100644 --- a/src/langtrace_python_sdk/utils/llm.py +++ b/src/langtrace_python_sdk/utils/llm.py @@ -88,10 +88,11 @@ def get_langtrace_attributes(version, service_provider, vendor_type="llm"): SpanAttributes.LANGTRACE_SERVICE_VERSION: version, SpanAttributes.LANGTRACE_SERVICE_NAME: service_provider, SpanAttributes.LANGTRACE_SERVICE_TYPE: vendor_type, + SpanAttributes.LLM_SYSTEM: service_provider, } -def get_llm_request_attributes(kwargs, prompts=None, model=None): +def get_llm_request_attributes(kwargs, prompts=None, model=None, operation_name="chat"): user = kwargs.get("user", None) if prompts is None: @@ -110,6 +111,7 @@ def get_llm_request_attributes(kwargs, prompts=None, model=None): top_p = kwargs.get("p", None) or kwargs.get("top_p", None) tools = kwargs.get("tools", None) return { + SpanAttributes.LLM_OPERATION_NAME: operation_name, SpanAttributes.LLM_REQUEST_MODEL: model or kwargs.get("model"), SpanAttributes.LLM_IS_STREAMING: kwargs.get("stream"), SpanAttributes.LLM_REQUEST_TEMPERATURE: kwargs.get("temperature"), diff --git a/src/langtrace_python_sdk/utils/with_root_span.py b/src/langtrace_python_sdk/utils/with_root_span.py index 79d0bf81..6fcb0279 100644 --- a/src/langtrace_python_sdk/utils/with_root_span.py +++ b/src/langtrace_python_sdk/utils/with_root_span.py @@ -25,6 +25,9 @@ from opentelemetry.trace import SpanKind from opentelemetry.trace.propagation import set_span_in_context +from langtrace_python_sdk.constants.exporter.langtrace_exporter import ( + LANGTRACE_REMOTE_URL, +) from langtrace_python_sdk.constants.instrumentation.common import ( LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY, ) @@ -142,7 +145,10 @@ class SendUserFeedback: _langtrace_api_key: str def __init__(self): - self._langtrace_host = os.environ["LANGTRACE_API_HOST"] + self._langtrace_host = os.environ.get("LANGTRACE_API_HOST", LANGTRACE_REMOTE_URL) + # When the host is set to /api/trace, remove the /api/trace + if self._langtrace_host.endswith("/api/trace"): + self._langtrace_host = self._langtrace_host.replace("/api/trace", "") self._langtrace_api_key = os.environ.get("LANGTRACE_API_KEY", None) def evaluate(self, data: EvaluationAPIData) -> None: @@ -155,6 +161,16 @@ def evaluate(self, data: EvaluationAPIData) -> None: print("Set the API key as an environment variable LANGTRACE_API_KEY") print(Fore.RESET) return + + # convert spanId and traceId to hexadecimals + span_hex_number = hex(int(data["spanId"], 10))[2:] # Convert to hex and remove the '0x' prefix + formatted_span_hex_number = span_hex_number.zfill(16) # Pad with zeros to 16 characters + data["spanId"] = f"0x{formatted_span_hex_number}" + + trace_hex_number = hex(int(data["traceId"], 10))[2:] # Convert to hex and remove the '0x' prefix + formatted_trace_hex_number = trace_hex_number.zfill(32) # Pad with zeros to 32 characters + data["traceId"] = f"0x{formatted_trace_hex_number}" + evaluation = self.get_evaluation(data["spanId"]) headers = {"x-api-key": self._langtrace_api_key} if evaluation is not None: diff --git a/src/langtrace_python_sdk/version.py b/src/langtrace_python_sdk/version.py index ba51cedf..90a1f38f 100644 --- a/src/langtrace_python_sdk/version.py +++ b/src/langtrace_python_sdk/version.py @@ -1 +1 @@ -__version__ = "2.2.2" +__version__ = "2.2.7" From 68d59678142883768593f295cc5501a30e9c8744 Mon Sep 17 00:00:00 2001 From: Karthik Kalyanaraman Date: Wed, 24 Jul 2024 20:30:44 -0700 Subject: [PATCH 2/3] Slack template --- .../slack-message-template.json | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .github/resources/slack-payloads/slack-message-template.json diff --git a/.github/resources/slack-payloads/slack-message-template.json b/.github/resources/slack-payloads/slack-message-template.json new file mode 100644 index 00000000..ea175a3f --- /dev/null +++ b/.github/resources/slack-payloads/slack-message-template.json @@ -0,0 +1,22 @@ +{ + "attachments": [ + { + "pretext": "{{ env.PRE_TEXT_MESSAGE }}", + "fallback": "{{ env.FALLBACK_MESSAGE }}", + "color": "{{ env.COLOR }}", + "author_name": "{{ github.workflow }}", + "author_link": "{{ env.GITHUB_SERVER_URL }}/{{ env.GITHUB_REPOSITORY }}/actions/runs/{{ env.GITHUB_RUN_ID }}", + "title": "{{ env.GITHUB_REPOSITORY }}", + "title_link": "{{ env.GITHUB_SERVER_URL }}/{{ env.GITHUB_REPOSITORY }}", + "fields": [ + { + "title": "Release Tag", + "short": true, + "value": "{{ env.RELEASETAG }}" + } + ], + "footer": "deployed by: {{ github.actor }}", + "footer_icon": "https://github.githubassets.com/images/modules/logos_page/GitHub-Mark.png" + } + ] +} From 08e52947001f7b4d6f5f2470ed079e1f3a613d03 Mon Sep 17 00:00:00 2001 From: Karthik Kalyanaraman Date: Wed, 24 Jul 2024 20:35:40 -0700 Subject: [PATCH 3/3] Bump version --- src/langtrace_python_sdk/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/langtrace_python_sdk/version.py b/src/langtrace_python_sdk/version.py index 90a1f38f..23bc6ef5 100644 --- a/src/langtrace_python_sdk/version.py +++ b/src/langtrace_python_sdk/version.py @@ -1 +1 @@ -__version__ = "2.2.7" +__version__ = "2.2.8"