-
Notifications
You must be signed in to change notification settings - Fork 20
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: support llama-index new instrumentation paradigm under feature …
…flag `use_experimental_instrumentation` (#462)
- Loading branch information
1 parent
f3ab3a4
commit e254928
Showing
12 changed files
with
1,375 additions
and
380 deletions.
There are no files selected for viewing
34 changes: 34 additions & 0 deletions
34
...trumentation/openinference-instrumentation-llama-index/examples/agent_calculator_tools.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,34 @@ | ||
from llama_index.agent.openai import OpenAIAgent | ||
from llama_index.core import Settings | ||
from llama_index.core.tools import FunctionTool | ||
from llama_index.llms.openai import OpenAI | ||
from openinference.instrumentation.llama_index import LlamaIndexInstrumentor | ||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter | ||
from opentelemetry.sdk import trace as trace_sdk | ||
from opentelemetry.sdk.trace.export import SimpleSpanProcessor | ||
|
||
endpoint = "http://127.0.0.1:6006/v1/traces" | ||
tracer_provider = trace_sdk.TracerProvider() | ||
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter(endpoint))) | ||
|
||
LlamaIndexInstrumentor().instrument(tracer_provider=tracer_provider) | ||
|
||
|
||
def multiply(a: int, b: int) -> int: | ||
"""Multiple two integers and returns the result integer""" | ||
return a * b | ||
|
||
|
||
def add(a: int, b: int) -> int: | ||
"""Add two integers and returns the result integer""" | ||
return a + b | ||
|
||
|
||
multiply_tool = FunctionTool.from_defaults(fn=multiply) | ||
add_tool = FunctionTool.from_defaults(fn=add) | ||
agent = OpenAIAgent.from_tools([multiply_tool, add_tool]) | ||
Settings.llm = OpenAI(model="gpt-3.5-turbo") | ||
|
||
if __name__ == "__main__": | ||
response = agent.query("What is (121 * 3) + 42?") | ||
print(response) |
21 changes: 14 additions & 7 deletions
21
...instrumentation/openinference-instrumentation-llama-index/examples/chroma_query_engine.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
353 changes: 0 additions & 353 deletions
353
...openinference-instrumentation-llama-index/examples/data/paul_graham/paul_graham_essay.txt
This file was deleted.
Oops, something went wrong.
57 changes: 57 additions & 0 deletions
57
.../instrumentation/openinference-instrumentation-llama-index/examples/query_engine_tools.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
import pandas as pd | ||
import wikipedia | ||
from llama_index.core import Document, Settings | ||
from llama_index.core.indices import VectorStoreIndex | ||
from llama_index.core.query_engine import NLSQLTableQueryEngine, RouterQueryEngine | ||
from llama_index.core.selectors import LLMSingleSelector | ||
from llama_index.core.tools import QueryEngineTool | ||
from llama_index.core.utilities.sql_wrapper import SQLDatabase | ||
from llama_index.llms.openai import OpenAI | ||
from openinference.instrumentation.llama_index import LlamaIndexInstrumentor | ||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter | ||
from opentelemetry.sdk import trace as trace_sdk | ||
from opentelemetry.sdk.trace.export import SimpleSpanProcessor | ||
from sqlalchemy import create_engine | ||
|
||
endpoint = "http://127.0.0.1:6006/v1/traces" | ||
tracer_provider = trace_sdk.TracerProvider() | ||
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter(endpoint))) | ||
|
||
LlamaIndexInstrumentor().instrument(tracer_provider=tracer_provider) | ||
|
||
engine = create_engine("sqlite:///:memory:") | ||
pd.read_parquet( | ||
"https://storage.googleapis.com/arize-phoenix-assets/datasets/structured/camera-info/cameras.parquet" | ||
).to_sql("cameras", engine, index=False) | ||
sql_tool = QueryEngineTool.from_defaults( | ||
query_engine=NLSQLTableQueryEngine( | ||
sql_database=SQLDatabase(engine, include_tables=["cameras"]), | ||
tables=["cameras"], | ||
), | ||
description=( | ||
"Useful for translating a natural language query into a SQL query over" | ||
" a table containing technical details about specific digital camera models: Model," | ||
" Release date, Max resolution, Low resolution, Effective pixels, Zoom wide (W)," | ||
" Zoom tele (T), Normal focus range, Macro focus range, Storage included," | ||
" Weight (inc. batteries), Dimensions, Price" | ||
), | ||
) | ||
|
||
page = wikipedia.page(pageid=52797) | ||
vector_tool = QueryEngineTool.from_defaults( | ||
query_engine=VectorStoreIndex.from_documents( | ||
[Document(id_=page.pageid, text=page.content)] | ||
).as_query_engine(), | ||
description="Useful for answering generic questions about digital cameras.", | ||
) | ||
query_engine = RouterQueryEngine( | ||
selector=LLMSingleSelector.from_defaults(), | ||
query_engine_tools=[sql_tool, vector_tool], | ||
) | ||
Settings.llm = OpenAI(model="gpt-3.5-turbo") | ||
|
||
if __name__ == "__main__": | ||
response = query_engine.query("What is the most expensive digital camera?") | ||
print(str(response)) | ||
response = query_engine.query("Tell me about the history of digital camera sensors.") | ||
print(str(response)) |
8 changes: 6 additions & 2 deletions
8
python/instrumentation/openinference-instrumentation-llama-index/examples/requirements.txt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,10 @@ | ||
chromadb | ||
llama-index >= 0.10.0 | ||
llama-index-agent-openai | ||
llama-index-llms-openai | ||
llama-index-vector-stores-chroma | ||
openinference-instrumentation-llama-index | ||
opentelemetry-sdk | ||
opentelemetry-exporter-otlp | ||
chromadb | ||
opentelemetry-sdk | ||
sqlalchemy | ||
wikipedia |
31 changes: 31 additions & 0 deletions
31
python/instrumentation/openinference-instrumentation-llama-index/examples/streaming.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
import tempfile | ||
from urllib.request import urlretrieve | ||
|
||
from llama_index.core import Settings, SimpleDirectoryReader, VectorStoreIndex | ||
from llama_index.llms.openai import OpenAI | ||
from openinference.instrumentation.llama_index import LlamaIndexInstrumentor | ||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter | ||
from opentelemetry.sdk import trace as trace_sdk | ||
from opentelemetry.sdk.trace.export import SimpleSpanProcessor | ||
|
||
endpoint = "http://127.0.0.1:6006/v1/traces" | ||
tracer_provider = trace_sdk.TracerProvider() | ||
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter(endpoint))) | ||
|
||
LlamaIndexInstrumentor().instrument(tracer_provider=tracer_provider) | ||
|
||
|
||
with tempfile.NamedTemporaryFile() as tf: | ||
urlretrieve( | ||
"https://raw.githubusercontent.com/run-llama/llama_index/main/docs/docs/examples/data/paul_graham/paul_graham_essay.txt", | ||
tf.name, | ||
) | ||
documents = SimpleDirectoryReader(input_files=[tf.name]).load_data() | ||
|
||
index = VectorStoreIndex.from_documents(documents) | ||
Settings.llm = OpenAI(model="gpt-3.5-turbo") | ||
|
||
if __name__ == "__main__": | ||
query_engine = index.as_query_engine(streaming=True, similarity_top_k=1) | ||
response_stream = query_engine.query("What did the author do growing up?") | ||
response_stream.print_response_stream() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.