From 59b6cfd6bf9fa99708ce51a43882ca4b9193482d Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Fri, 25 Jul 2025 18:04:28 -0700 Subject: [PATCH 01/45] successfully implement exact/vector search --- .../chains/graph_qa/arangodb.py | 194 ++++++++++++++---- 1 file changed, 156 insertions(+), 38 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index e1f0d73..60635df 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -13,6 +13,7 @@ from langchain_core.messages import AIMessage from langchain_core.prompts import BasePromptTemplate from langchain_core.runnables import Runnable +from langchain_openai import OpenAIEmbeddings from pydantic import Field from langchain_arangodb.chains.graph_qa.prompts import ( @@ -90,9 +91,10 @@ class ArangoGraphQAChain(Chain): See https://python.langchain.com/docs/security for more information. """ - def __init__(self, **kwargs: Any) -> None: + def __init__(self, embedding: Optional[OpenAIEmbeddings] = None, **kwargs: Any) -> None: """Initialize the chain.""" super().__init__(**kwargs) + self.embedding = embedding if self.allow_dangerous_requests is not True: raise ValueError( "In order to use this chain, you must acknowledge that it can make " @@ -127,6 +129,8 @@ def _chain_type(self) -> str: def from_llm( cls, llm: BaseLanguageModel, + embedding: OpenAIEmbeddings, + enable_query_cache: bool = True, *, qa_prompt: Optional[BasePromptTemplate] = None, aql_generation_prompt: Optional[BasePromptTemplate] = None, @@ -161,6 +165,11 @@ def from_llm( if aql_fix_prompt is None: aql_fix_prompt = AQL_FIX_PROMPT + if enable_query_cache and embedding is None: + raise ValueError("Cannot enable query cache without passing **embedding**") + if embedding and not enable_query_cache: + raise ValueError("You passed an embedding, but you did not enable Query Cache usage.") + qa_chain = qa_prompt | llm aql_generation_chain = aql_generation_prompt | llm aql_fix_chain = aql_fix_prompt | llm @@ -169,6 +178,7 @@ def from_llm( qa_chain=qa_chain, aql_generation_chain=aql_generation_chain, aql_fix_chain=aql_fix_chain, + embedding=embedding, **kwargs, ) @@ -432,56 +442,164 @@ def _call( aql_generation_attempt = 1 aql_execution_func = ( - self.graph.query if self.execute_aql_query else self.graph.explain + self.graph.query if self.execute_aql_query else self.graph.explain ) - while ( - aql_result is None - and aql_generation_attempt < self.max_aql_generation_attempts + 1 - ): - if isinstance(aql_generation_output, str): - aql_generation_output_content = aql_generation_output - elif isinstance(aql_generation_output, AIMessage): - aql_generation_output_content = str(aql_generation_output.content) - else: - m = f"Invalid AQL Generation Output: {aql_generation_output} (type: {type(aql_generation_output)})" # noqa: E501 - raise ValueError(m) + if use_query_cache: + + ###################### + # Check Query Cache # + ###################### - ##################### - # Extract AQL Query # - ##################### + # Exact Search + exact_search_check = list(self.graph.db.collection("Queries").find({"text": user_input}, limit=1)) + if len(exact_search_check) == 1: + cached_query = exact_search_check[0]["aql"] + print("!!!using exact search!!!") + else: + # Vector Search + query_embedding = self.embedding.embed_query(user_input) + vector_search_check = """ + FOR q IN Queries + LET score = COSINE_SIMILARITY(q.embedding, @query_embedding) + SORT score DESC + LIMIT 1 + FILTER score > @score_threshold + RETURN q.aql + """ + vector_result = list(self.graph.db.aql.execute(vector_search_check, bind_vars={"query_embedding": query_embedding, "score_threshold": 0.80})) + cached_query = vector_result[0] if vector_result else None + if cached_query: + print("!!!using vector search!!!") + + if not use_query_cache or not cached_query: + print("!!!using aql generation!!!") + + ###################### + # Generate AQL Query # + ###################### - pattern = r"```(?i:aql)?(.*?)```" - matches: List[str] = re.findall( - pattern, aql_generation_output_content, re.DOTALL + aql_generation_output = self.aql_generation_chain.invoke( + { + "adb_schema": self.graph.schema_yaml, + "aql_examples": self.aql_examples, + "user_input": user_input, + }, + callbacks=callbacks, ) - if not matches: - _run_manager.on_text( - "Invalid Response: ", end="\n", verbose=self.verbose + aql_query = "" + aql_error = "" + aql_result = None + aql_generation_attempt = 1 + + while ( + aql_result is None + and aql_generation_attempt < self.max_aql_generation_attempts + 1 + ): + if isinstance(aql_generation_output, str): + aql_generation_output_content = aql_generation_output + elif isinstance(aql_generation_output, AIMessage): + aql_generation_output_content = str(aql_generation_output.content) + else: + m = f"Invalid AQL Generation Output: {aql_generation_output} (type: {type(aql_generation_output)})" # noqa: E501 + raise ValueError(m) + + ##################### + # Extract AQL Query # + ##################### + + pattern = r"```(?i:aql)?(.*?)```" + matches: List[str] = re.findall( + pattern, aql_generation_output_content, re.DOTALL ) + if not matches: + _run_manager.on_text( + "Invalid Response: ", end="\n", verbose=self.verbose + ) + + _run_manager.on_text( + aql_generation_output_content, + color="red", + end="\n", + verbose=self.verbose, + ) + + m = f"Unable to extract AQL Query from response: {aql_generation_output_content}" # noqa: E501 + raise ValueError(m) + + aql_query = matches[0].strip() + + if self.force_read_only_query: + is_read_only, write_operation = self._is_read_only_query(aql_query) + + if not is_read_only: + error_msg = f""" + Security violation: Write operations are not allowed. + Detected write operation in query: {write_operation} + """ + raise ValueError(error_msg) + _run_manager.on_text( - aql_generation_output_content, - color="red", - end="\n", - verbose=self.verbose, + f"AQL Query ({aql_generation_attempt}):", verbose=self.verbose + ) + _run_manager.on_text( + aql_query, color="green", end="\n", verbose=self.verbose ) - m = f"Unable to extract AQL Query from response: {aql_generation_output_content}" # noqa: E501 + ############################# + # Execute/Explain AQL Query # + ############################# + + try: + aql_result = aql_execution_func(aql_query, params) + except (AQLQueryExecuteError, AQLQueryExplainError) as e: + aql_error = str(e.error_message) + + _run_manager.on_text( + "AQL Query Execution Error: ", end="\n", verbose=self.verbose + ) + _run_manager.on_text( + aql_error, color="yellow", end="\n\n", verbose=self.verbose + ) + + ######################## + # Retry AQL Generation # + ######################## + + aql_generation_output = self.aql_fix_chain.invoke( + { + "adb_schema": self.graph.schema_yaml, + "aql_query": aql_query, + "aql_error": aql_error, + }, + callbacks=callbacks, + ) + + aql_generation_attempt += 1 + + if aql_result is None: + m = f""" + Maximum amount of AQL Query Generation attempts reached. + Unable to execute the AQL Query due to the following error: + {aql_error} + """ raise ValueError(m) - aql_query = matches[0].strip() + ###################### + # Store Query in Cache # + ###################### - if self.force_read_only_query: - is_read_only, write_operation = self._is_read_only_query(aql_query) + self.graph.db.collection("Queries").insert({ + "text": user_input, + "embedding": query_embedding, + "aql": aql_query, + }) - if not is_read_only: - error_msg = f""" - Security violation: Write operations are not allowed. - Detected write operation in query: {write_operation} - """ - raise ValueError(error_msg) + if use_query_cache and cached_query: + aql_query = cached_query + aql_result = aql_execution_func(aql_query, params) query_message = f"AQL Query ({aql_generation_attempt})\n" if cached_query: @@ -564,7 +682,7 @@ def _call( callbacks=callbacks, ) - results: Dict[str, Any] = {self.output_key: result} + results: Dict[str, Any] = {self.output_key: result.content} if self.return_aql_query: results["aql_query"] = aql_generation_output @@ -592,4 +710,4 @@ def _is_read_only_query(self, aql_query: str) -> Tuple[bool, Optional[str]]: if op in normalized_query: return False, op - return True, None + return True, None \ No newline at end of file From 851b7bc9ec814eacf806331465ff2f229b9b5ab1 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Fri, 25 Jul 2025 21:00:31 -0700 Subject: [PATCH 02/45] add unit tests --- .../tests/unit_tests/chains/test_graph_qa.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py index b6312af..b1ca1eb 100644 --- a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py +++ b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py @@ -27,6 +27,16 @@ def __init__(self) -> None: self.explains_run = [] # type: ignore self.refreshed = False self.graph_documents_added = [] # type: ignore + + # Mock the database interface + self.db = Mock() + self.db.collection = Mock() + mock_queries_collection = Mock() + mock_queries_collection.find = Mock(return_value=[]) + mock_queries_collection.insert = Mock() + self.db.collection.return_value = mock_queries_collection + self.db.aql = Mock() + self.db.aql.execute = Mock(return_value=[]) # Mock the database interface self.__db = Mock() @@ -117,7 +127,7 @@ def batch(self, *args, **kwargs) -> List[Any]: # type: ignore return [] qa_chain = CompliantRunnable() - qa_chain.invoke = MagicMock(return_value="This is a test answer") # type: ignore + qa_chain.invoke = MagicMock(return_value=AIMessage(content="This is a test answer")) # type: ignore aql_generation_chain = CompliantRunnable() aql_generation_chain.invoke = MagicMock( # type: ignore From 832f852e8057f04b49d882cd4cbd4301a9fdeb52 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Fri, 25 Jul 2025 21:04:09 -0700 Subject: [PATCH 03/45] improve error handling --- .../chains/graph_qa/arangodb.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 60635df..88525c0 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -129,8 +129,8 @@ def _chain_type(self) -> str: def from_llm( cls, llm: BaseLanguageModel, - embedding: OpenAIEmbeddings, - enable_query_cache: bool = True, + embedding: Optional[OpenAIEmbeddings] = None, + enable_query_cache: bool = False, *, qa_prompt: Optional[BasePromptTemplate] = None, aql_generation_prompt: Optional[BasePromptTemplate] = None, @@ -445,8 +445,8 @@ def _call( self.graph.query if self.execute_aql_query else self.graph.explain ) + cached_query = None if use_query_cache: - ###################### # Check Query Cache # ###################### @@ -470,7 +470,7 @@ def _call( vector_result = list(self.graph.db.aql.execute(vector_search_check, bind_vars={"query_embedding": query_embedding, "score_threshold": 0.80})) cached_query = vector_result[0] if vector_result else None if cached_query: - print("!!!using vector search!!!") + print("!!!using vector search!!!") if not use_query_cache or not cached_query: print("!!!using aql generation!!!") @@ -591,11 +591,13 @@ def _call( # Store Query in Cache # ###################### - self.graph.db.collection("Queries").insert({ - "text": user_input, - "embedding": query_embedding, - "aql": aql_query, - }) + if use_query_cache: + query_embedding = self.embedding.embed_query(user_input) + self.graph.db.collection("Queries").insert({ + "text": user_input, + "embedding": query_embedding, + "aql": aql_query, + }) if use_query_cache and cached_query: aql_query = cached_query From 0a6c5ba2731bbe80d8f36a53b8a91d5f6cae6c54 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Sat, 26 Jul 2025 13:35:07 -0700 Subject: [PATCH 04/45] add integration tests for query caching --- .../tests/integration_tests/chains/test_graph_database.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py index 6eb0f58..b48787e 100644 --- a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py +++ b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py @@ -13,6 +13,8 @@ from langchain_arangodb.graphs.arangodb_graph import ArangoGraph from tests.llms.fake_llm import FakeLLM +from ipdb import set_trace + @pytest.mark.usefixtures("clear_arangodb_database") def test_aql_generating_run(db: StandardDatabase) -> None: From 2942063c6a0deb65201c067a5f50cb9edf9ddf0a Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Sat, 26 Jul 2025 13:38:23 -0700 Subject: [PATCH 05/45] remove set_trace() --- .../tests/integration_tests/chains/test_graph_database.py | 2 -- libs/arangodb/tests/unit_tests/chains/test_graph_qa.py | 1 - 2 files changed, 3 deletions(-) diff --git a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py index b48787e..6eb0f58 100644 --- a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py +++ b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py @@ -13,8 +13,6 @@ from langchain_arangodb.graphs.arangodb_graph import ArangoGraph from tests.llms.fake_llm import FakeLLM -from ipdb import set_trace - @pytest.mark.usefixtures("clear_arangodb_database") def test_aql_generating_run(db: StandardDatabase) -> None: diff --git a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py index b1ca1eb..eccfdd9 100644 --- a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py +++ b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py @@ -3,7 +3,6 @@ import math from typing import Any, Dict, List from unittest.mock import MagicMock, Mock - import pytest from arango import AQLQueryExecuteError from langchain_core.callbacks import CallbackManagerForChainRun From 5178c0bc3aa45d40e652bf2ec5d1cf7ce366f23d Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Sat, 26 Jul 2025 13:47:01 -0700 Subject: [PATCH 06/45] fix AI message error --- libs/arangodb/tests/unit_tests/chains/test_graph_qa.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py index eccfdd9..24d9b74 100644 --- a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py +++ b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py @@ -227,6 +227,7 @@ def test_call_successful_execution( self, fake_graph_store: FakeGraphStore, mock_chains: Dict[str, Runnable] ) -> None: """Test successful AQL query execution.""" + print("DEBUG Result:") chain = ArangoGraphQAChain( graph=fake_graph_store, aql_generation_chain=mock_chains["aql_generation_chain"], @@ -236,9 +237,10 @@ def test_call_successful_execution( ) result = chain._call({"query": "Find all movies"}) + print("DEBUG Result:", result) assert "result" in result - assert result["result"] == "This is a test answer" + assert result["result"].content == "This is a test answer" assert len(fake_graph_store.queries_executed) == 1 def test_call_with_ai_message_response( From 128ba8fe80e2ed5bb70afeddcb3ea1c6cdc173ae Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Sun, 27 Jul 2025 00:37:52 -0700 Subject: [PATCH 07/45] fix integration tests to pass lint tests --- .../tests/integration_tests/chains/test_graph_database.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py index 6eb0f58..7915853 100644 --- a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py +++ b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py @@ -1,10 +1,13 @@ """Test Graph Database Chain.""" import pprint +from typing import Any, Dict, List from unittest.mock import MagicMock, patch import pytest +from arango.cursor import Cursor from arango.database import StandardDatabase +from arango.job import AsyncJob, BatchJob from langchain_core.language_models import BaseLanguageModel from langchain_core.messages import AIMessage from langchain_core.runnables import RunnableLambda From d77e5c454ac6e45ae9850bc51fdf91c1bc44c7ad Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Sun, 27 Jul 2025 00:39:28 -0700 Subject: [PATCH 08/45] fix aql_gen_count error in unit tests and pass lint tests --- libs/arangodb/tests/unit_tests/chains/test_graph_qa.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py index 24d9b74..8e0a67c 100644 --- a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py +++ b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py @@ -3,6 +3,7 @@ import math from typing import Any, Dict, List from unittest.mock import MagicMock, Mock + import pytest from arango import AQLQueryExecuteError from langchain_core.callbacks import CallbackManagerForChainRun @@ -26,7 +27,7 @@ def __init__(self) -> None: self.explains_run = [] # type: ignore self.refreshed = False self.graph_documents_added = [] # type: ignore - + # Mock the database interface self.db = Mock() self.db.collection = Mock() @@ -126,7 +127,9 @@ def batch(self, *args, **kwargs) -> List[Any]: # type: ignore return [] qa_chain = CompliantRunnable() - qa_chain.invoke = MagicMock(return_value=AIMessage(content="This is a test answer")) # type: ignore + qa_chain.__class__.invoke = MagicMock( # type: ignore + return_value=AIMessage(content="This is a test answer") + ) # type: ignore aql_generation_chain = CompliantRunnable() aql_generation_chain.invoke = MagicMock( # type: ignore @@ -227,7 +230,6 @@ def test_call_successful_execution( self, fake_graph_store: FakeGraphStore, mock_chains: Dict[str, Runnable] ) -> None: """Test successful AQL query execution.""" - print("DEBUG Result:") chain = ArangoGraphQAChain( graph=fake_graph_store, aql_generation_chain=mock_chains["aql_generation_chain"], @@ -237,7 +239,6 @@ def test_call_successful_execution( ) result = chain._call({"query": "Find all movies"}) - print("DEBUG Result:", result) assert "result" in result assert result["result"].content == "This is a test answer" From e0bb02d2cbb3a8e2831767296a0d99ec7354a503 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Sun, 27 Jul 2025 00:39:44 -0700 Subject: [PATCH 09/45] fix lint errors --- .../chains/graph_qa/arangodb.py | 77 +++++++++++++------ 1 file changed, 55 insertions(+), 22 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 88525c0..0d83248 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -3,7 +3,8 @@ from __future__ import annotations import re -from typing import Any, Dict, List, Optional, Tuple +from numbers import Number +from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, cast from arango import AQLQueryExecuteError, AQLQueryExplainError from langchain.chains.base import Chain @@ -23,6 +24,8 @@ ) from langchain_arangodb.graphs.arangodb_graph import ArangoGraph +from ...graphs.arangodb_graph import ArangoGraph + AQL_WRITE_OPERATIONS: List[str] = [ "INSERT", "UPDATE", @@ -91,7 +94,9 @@ class ArangoGraphQAChain(Chain): See https://python.langchain.com/docs/security for more information. """ - def __init__(self, embedding: Optional[OpenAIEmbeddings] = None, **kwargs: Any) -> None: + def __init__( + self, embedding: Optional[OpenAIEmbeddings] = None, **kwargs: Any + ) -> None: """Initialize the chain.""" super().__init__(**kwargs) self.embedding = embedding @@ -168,7 +173,9 @@ def from_llm( if enable_query_cache and embedding is None: raise ValueError("Cannot enable query cache without passing **embedding**") if embedding and not enable_query_cache: - raise ValueError("You passed an embedding, but you did not enable Query Cache usage.") + raise ValueError( + "You passed an embedding, but you did not enable Query Cache usage." + ) qa_chain = qa_prompt | llm aql_generation_chain = aql_generation_prompt | llm @@ -442,7 +449,7 @@ def _call( aql_generation_attempt = 1 aql_execution_func = ( - self.graph.query if self.execute_aql_query else self.graph.explain + self.graph.query if self.execute_aql_query else self.graph.explain ) cached_query = None @@ -450,14 +457,24 @@ def _call( ###################### # Check Query Cache # ###################### - - # Exact Search - exact_search_check = list(self.graph.db.collection("Queries").find({"text": user_input}, limit=1)) + # Exact Search + + exact_search_check = list( + cast( + Iterable, + cast(ArangoGraph, self.graph) + .db.collection("Queries") + .find({"text": user_input}, limit=1), + ) + ) if len(exact_search_check) == 1: cached_query = exact_search_check[0]["aql"] - print("!!!using exact search!!!") else: - # Vector Search + # Vector Search + if self.embedding is None: + raise ValueError( + "Embedding must be provided when using query cache" + ) query_embedding = self.embedding.embed_query(user_input) vector_search_check = """ FOR q IN Queries @@ -467,14 +484,24 @@ def _call( FILTER score > @score_threshold RETURN q.aql """ - vector_result = list(self.graph.db.aql.execute(vector_search_check, bind_vars={"query_embedding": query_embedding, "score_threshold": 0.80})) + + vector_result = list( + cast( + Iterable, + cast(ArangoGraph, self.graph).db.aql.execute( + vector_search_check, + bind_vars={ + "query_embedding": cast( + Sequence[Number], query_embedding + ), + "score_threshold": cast(Number, 0.80), + }, + ), + ) + ) cached_query = vector_result[0] if vector_result else None - if cached_query: - print("!!!using vector search!!!") if not use_query_cache or not cached_query: - print("!!!using aql generation!!!") - ###################### # Generate AQL Query # ###################### @@ -592,12 +619,18 @@ def _call( ###################### if use_query_cache: + if self.embedding is None: + raise ValueError( + "Embedding must be provided when using query cache" + ) query_embedding = self.embedding.embed_query(user_input) - self.graph.db.collection("Queries").insert({ - "text": user_input, - "embedding": query_embedding, - "aql": aql_query, - }) + cast(ArangoGraph, self.graph).db.collection("Queries").insert( + { + "text": user_input, + "embedding": query_embedding, + "aql": aql_query, + } + ) if use_query_cache and cached_query: aql_query = cached_query @@ -683,8 +716,8 @@ def _call( }, callbacks=callbacks, ) - - results: Dict[str, Any] = {self.output_key: result.content} + results: Dict[str, Any] = {self.output_key: result} + # results: Dict[str, Any] = {self.output_key: result.content} if self.return_aql_query: results["aql_query"] = aql_generation_output @@ -712,4 +745,4 @@ def _is_read_only_query(self, aql_query: str) -> Tuple[bool, Optional[str]]: if op in normalized_query: return False, op - return True, None \ No newline at end of file + return True, None From 1d4d8e82597bde64528b9a92f2ba415df92d0237 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Sun, 27 Jul 2025 00:55:56 -0700 Subject: [PATCH 10/45] add langchain-openai --- libs/arangodb/pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/arangodb/pyproject.toml b/libs/arangodb/pyproject.toml index b0c5616..dd7189b 100644 --- a/libs/arangodb/pyproject.toml +++ b/libs/arangodb/pyproject.toml @@ -14,6 +14,7 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.9,<4.0" langchain-core = "^0.3.8" +langchain-openai = ">=0.3.8" langchain = "^0.3.7" python-arango = "^8.0.0" cityhash = "^0.4.8" From f63f19a58312b0c2b85b3cc8565255b247e396e4 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Sun, 27 Jul 2025 00:58:06 -0700 Subject: [PATCH 11/45] update poetry.lock --- libs/arangodb/poetry.lock | 294 +++++++++++++++++++++++++++++++++++++- 1 file changed, 292 insertions(+), 2 deletions(-) diff --git a/libs/arangodb/poetry.lock b/libs/arangodb/poetry.lock index ab6ee97..8dd587f 100644 --- a/libs/arangodb/poetry.lock +++ b/libs/arangodb/poetry.lock @@ -350,12 +350,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["test"] -markers = "sys_platform == \"win32\"" +groups = ["main", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -440,6 +440,18 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + [[package]] name = "docutils" version = "0.20.1" @@ -681,6 +693,93 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jiter" +version = "0.10.0" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "jiter-0.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2fb72b02478f06a900a5782de2ef47e0396b3e1f7d5aba30daeb1fce66f303"}, + {file = "jiter-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32bb468e3af278f095d3fa5b90314728a6916d89ba3d0ffb726dd9bf7367285e"}, + {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8b3e0068c26ddedc7abc6fac37da2d0af16b921e288a5a613f4b86f050354f"}, + {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:286299b74cc49e25cd42eea19b72aa82c515d2f2ee12d11392c56d8701f52224"}, + {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ed5649ceeaeffc28d87fb012d25a4cd356dcd53eff5acff1f0466b831dda2a7"}, + {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2ab0051160cb758a70716448908ef14ad476c3774bd03ddce075f3c1f90a3d6"}, + {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03997d2f37f6b67d2f5c475da4412be584e1cec273c1cfc03d642c46db43f8cf"}, + {file = "jiter-0.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c404a99352d839fed80d6afd6c1d66071f3bacaaa5c4268983fc10f769112e90"}, + {file = "jiter-0.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66e989410b6666d3ddb27a74c7e50d0829704ede652fd4c858e91f8d64b403d0"}, + {file = "jiter-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b532d3af9ef4f6374609a3bcb5e05a1951d3bf6190dc6b176fdb277c9bbf15ee"}, + {file = "jiter-0.10.0-cp310-cp310-win32.whl", hash = "sha256:da9be20b333970e28b72edc4dff63d4fec3398e05770fb3205f7fb460eb48dd4"}, + {file = "jiter-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:f59e533afed0c5b0ac3eba20d2548c4a550336d8282ee69eb07b37ea526ee4e5"}, + {file = "jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978"}, + {file = "jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc"}, + {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d"}, + {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2"}, + {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61"}, + {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db"}, + {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5"}, + {file = "jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606"}, + {file = "jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605"}, + {file = "jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5"}, + {file = "jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7"}, + {file = "jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812"}, + {file = "jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b"}, + {file = "jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744"}, + {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2"}, + {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026"}, + {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c"}, + {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959"}, + {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a"}, + {file = "jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95"}, + {file = "jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea"}, + {file = "jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b"}, + {file = "jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01"}, + {file = "jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49"}, + {file = "jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644"}, + {file = "jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a"}, + {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6"}, + {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3"}, + {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2"}, + {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25"}, + {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041"}, + {file = "jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca"}, + {file = "jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4"}, + {file = "jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e"}, + {file = "jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d"}, + {file = "jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4"}, + {file = "jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca"}, + {file = "jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070"}, + {file = "jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca"}, + {file = "jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522"}, + {file = "jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8"}, + {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216"}, + {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4"}, + {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426"}, + {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12"}, + {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9"}, + {file = "jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a"}, + {file = "jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853"}, + {file = "jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86"}, + {file = "jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357"}, + {file = "jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00"}, + {file = "jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5"}, + {file = "jiter-0.10.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bd6292a43c0fc09ce7c154ec0fa646a536b877d1e8f2f96c19707f65355b5a4d"}, + {file = "jiter-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39de429dcaeb6808d75ffe9effefe96a4903c6a4b376b2f6d08d77c1aaee2f18"}, + {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ce124f13a7a616fad3bb723f2bfb537d78239d1f7f219566dc52b6f2a9e48d"}, + {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:166f3606f11920f9a1746b2eea84fa2c0a5d50fd313c38bdea4edc072000b0af"}, + {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28dcecbb4ba402916034fc14eba7709f250c4d24b0c43fc94d187ee0580af181"}, + {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86c5aa6910f9bebcc7bc4f8bc461aff68504388b43bfe5e5c0bd21efa33b52f4"}, + {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceeb52d242b315d7f1f74b441b6a167f78cea801ad7c11c36da77ff2d42e8a28"}, + {file = "jiter-0.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ff76d8887c8c8ee1e772274fcf8cc1071c2c58590d13e33bd12d02dc9a560397"}, + {file = "jiter-0.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a9be4d0fa2b79f7222a88aa488bd89e2ae0a0a5b189462a12def6ece2faa45f1"}, + {file = "jiter-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ab7fd8738094139b6c1ab1822d6f2000ebe41515c537235fd45dabe13ec9324"}, + {file = "jiter-0.10.0-cp39-cp39-win32.whl", hash = "sha256:5f51e048540dd27f204ff4a87f5d79294ea0aa3aa552aca34934588cf27023cf"}, + {file = "jiter-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b28302349dc65703a9e4ead16f163b1c339efffbe1049c30a44b001a2a4fff9"}, + {file = "jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500"}, +] + [[package]] name = "jsonpatch" version = "1.33" @@ -780,6 +879,23 @@ reference = "HEAD" resolved_reference = "a7d0e42f3fa5b147fea9109f60e799229f30a68b" subdirectory = "libs/core" +[[package]] +name = "langchain-openai" +version = "0.3.12" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "langchain_openai-0.3.12-py3-none-any.whl", hash = "sha256:0fab64d58ec95e65ffbaf659470cd362e815685e15edbcb171641e90eca4eb86"}, + {file = "langchain_openai-0.3.12.tar.gz", hash = "sha256:c9dbff63551f6bd91913bca9f99a2d057fd95dc58d4778657d67e5baa1737f61"}, +] + +[package.dependencies] +langchain-core = ">=0.3.49,<1.0.0" +openai = ">=1.68.2,<2.0.0" +tiktoken = ">=0.7,<1" + [[package]] name = "langchain-text-splitters" version = "0.3.8" @@ -1499,6 +1615,110 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "regex" +version = "2024.11.6" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, + {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, + {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, + {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, + {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, + {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, + {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, + {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, + {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, + {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, + {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, +] + [[package]] name = "requests" version = "2.32.4" @@ -1893,6 +2113,54 @@ files = [ doc = ["reno", "sphinx"] test = ["pytest", "tornado (>=4.5)", "typeguard"] +[[package]] +name = "tiktoken" +version = "0.9.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tiktoken-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:586c16358138b96ea804c034b8acf3f5d3f0258bd2bc3b0227af4af5d622e382"}, + {file = "tiktoken-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9c59ccc528c6c5dd51820b3474402f69d9a9e1d656226848ad68a8d5b2e5108"}, + {file = "tiktoken-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0968d5beeafbca2a72c595e8385a1a1f8af58feaebb02b227229b69ca5357fd"}, + {file = "tiktoken-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a5fb085a6a3b7350b8fc838baf493317ca0e17bd95e8642f95fc69ecfed1de"}, + {file = "tiktoken-0.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15a2752dea63d93b0332fb0ddb05dd909371ededa145fe6a3242f46724fa7990"}, + {file = "tiktoken-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:26113fec3bd7a352e4b33dbaf1bd8948de2507e30bd95a44e2b1156647bc01b4"}, + {file = "tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e"}, + {file = "tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348"}, + {file = "tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33"}, + {file = "tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136"}, + {file = "tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336"}, + {file = "tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb"}, + {file = "tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03"}, + {file = "tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210"}, + {file = "tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794"}, + {file = "tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22"}, + {file = "tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2"}, + {file = "tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16"}, + {file = "tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb"}, + {file = "tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63"}, + {file = "tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01"}, + {file = "tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139"}, + {file = "tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a"}, + {file = "tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95"}, + {file = "tiktoken-0.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c6386ca815e7d96ef5b4ac61e0048cd32ca5a92d5781255e13b31381d28667dc"}, + {file = "tiktoken-0.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75f6d5db5bc2c6274b674ceab1615c1778e6416b14705827d19b40e6355f03e0"}, + {file = "tiktoken-0.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e15b16f61e6f4625a57a36496d28dd182a8a60ec20a534c5343ba3cafa156ac7"}, + {file = "tiktoken-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebcec91babf21297022882344c3f7d9eed855931466c3311b1ad6b64befb3df"}, + {file = "tiktoken-0.9.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e5fd49e7799579240f03913447c0cdfa1129625ebd5ac440787afc4345990427"}, + {file = "tiktoken-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:26242ca9dc8b58e875ff4ca078b9a94d2f0813e6a535dcd2205df5d49d927cc7"}, + {file = "tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + [[package]] name = "tomli" version = "2.2.1" @@ -1936,6 +2204,28 @@ files = [ ] markers = {test = "python_full_version <= \"3.11.0a6\"", typing = "python_version < \"3.11\""} +[[package]] +name = "tqdm" +version = "4.67.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, + {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] +discord = ["requests"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "types-pyfarmhash" version = "0.4.0.20240902" From 175f2336075ac6a9c55885bcc1658ece506fffca Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 28 Jul 2025 08:34:19 -0700 Subject: [PATCH 12/45] add documentation --- .../langchain_arangodb/chains/graph_qa/arangodb.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 0d83248..e67b41a 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -171,7 +171,7 @@ def from_llm( aql_fix_prompt = AQL_FIX_PROMPT if enable_query_cache and embedding is None: - raise ValueError("Cannot enable query cache without passing **embedding**") + raise ValueError("Cannot enable query cache without passing embedding.") if embedding and not enable_query_cache: raise ValueError( "You passed an embedding, but you did not enable Query Cache usage." @@ -457,8 +457,8 @@ def _call( ###################### # Check Query Cache # ###################### - # Exact Search + # Exact Search exact_search_check = list( cast( Iterable, @@ -469,6 +469,7 @@ def _call( ) if len(exact_search_check) == 1: cached_query = exact_search_check[0]["aql"] + # print("Exact Search") else: # Vector Search if self.embedding is None: @@ -500,12 +501,14 @@ def _call( ) ) cached_query = vector_result[0] if vector_result else None + # if cached_query: + # print("Vector Search") if not use_query_cache or not cached_query: ###################### # Generate AQL Query # ###################### - + # print("Generate AQL Query") aql_generation_output = self.aql_generation_chain.invoke( { "adb_schema": self.graph.schema_yaml, From e8d30eace615aa511d510808c88bb59b9ae898cc Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 28 Jul 2025 10:29:19 -0700 Subject: [PATCH 13/45] remove cast and reformat --- .../chains/graph_qa/arangodb.py | 53 +++++++------------ 1 file changed, 19 insertions(+), 34 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index e67b41a..6025b32 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -3,8 +3,7 @@ from __future__ import annotations import re -from numbers import Number -from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, cast +from typing import Any, Dict, List, Optional, Tuple from arango import AQLQueryExecuteError, AQLQueryExplainError from langchain.chains.base import Chain @@ -24,8 +23,6 @@ ) from langchain_arangodb.graphs.arangodb_graph import ArangoGraph -from ...graphs.arangodb_graph import ArangoGraph - AQL_WRITE_OPERATIONS: List[str] = [ "INSERT", "UPDATE", @@ -452,24 +449,20 @@ def _call( self.graph.query if self.execute_aql_query else self.graph.explain ) + ###################### + # Check Query Cache # + ###################### + cached_query = None if use_query_cache: - ###################### - # Check Query Cache # - ###################### - # Exact Search exact_search_check = list( - cast( - Iterable, - cast(ArangoGraph, self.graph) - .db.collection("Queries") - .find({"text": user_input}, limit=1), + self.graph.db.collection("Queries").find( # type: ignore + {"text": user_input}, limit=1 ) ) if len(exact_search_check) == 1: cached_query = exact_search_check[0]["aql"] - # print("Exact Search") else: # Vector Search if self.embedding is None: @@ -487,28 +480,21 @@ def _call( """ vector_result = list( - cast( - Iterable, - cast(ArangoGraph, self.graph).db.aql.execute( - vector_search_check, - bind_vars={ - "query_embedding": cast( - Sequence[Number], query_embedding - ), - "score_threshold": cast(Number, 0.80), - }, - ), - ) + self.graph.db.aql.execute( # type: ignore + vector_search_check, + bind_vars={ + "query_embedding": query_embedding, # type: ignore + "score_threshold": 0.80, # type: ignore + }, + ), ) cached_query = vector_result[0] if vector_result else None - # if cached_query: - # print("Vector Search") + + ###################### + # Generate AQL Query # + ###################### if not use_query_cache or not cached_query: - ###################### - # Generate AQL Query # - ###################### - # print("Generate AQL Query") aql_generation_output = self.aql_generation_chain.invoke( { "adb_schema": self.graph.schema_yaml, @@ -627,7 +613,7 @@ def _call( "Embedding must be provided when using query cache" ) query_embedding = self.embedding.embed_query(user_input) - cast(ArangoGraph, self.graph).db.collection("Queries").insert( + self.graph.db.collection("Queries").insert( # type: ignore { "text": user_input, "embedding": query_embedding, @@ -720,7 +706,6 @@ def _call( callbacks=callbacks, ) results: Dict[str, Any] = {self.output_key: result} - # results: Dict[str, Any] = {self.output_key: result.content} if self.return_aql_query: results["aql_query"] = aql_generation_output From 575de90cb837cb2631c802bb350746cdbdcd6047 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 28 Jul 2025 20:32:07 -0700 Subject: [PATCH 14/45] change to Embeddings and remove unnecessary args --- .../chains/graph_qa/arangodb.py | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 6025b32..d346528 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -13,7 +13,8 @@ from langchain_core.messages import AIMessage from langchain_core.prompts import BasePromptTemplate from langchain_core.runnables import Runnable -from langchain_openai import OpenAIEmbeddings +from langchain_core.embeddings import Embeddings + from pydantic import Field from langchain_arangodb.chains.graph_qa.prompts import ( @@ -92,11 +93,10 @@ class ArangoGraphQAChain(Chain): """ def __init__( - self, embedding: Optional[OpenAIEmbeddings] = None, **kwargs: Any + self, **kwargs: Any ) -> None: """Initialize the chain.""" super().__init__(**kwargs) - self.embedding = embedding if self.allow_dangerous_requests is not True: raise ValueError( "In order to use this chain, you must acknowledge that it can make " @@ -131,8 +131,6 @@ def _chain_type(self) -> str: def from_llm( cls, llm: BaseLanguageModel, - embedding: Optional[OpenAIEmbeddings] = None, - enable_query_cache: bool = False, *, qa_prompt: Optional[BasePromptTemplate] = None, aql_generation_prompt: Optional[BasePromptTemplate] = None, @@ -167,13 +165,6 @@ def from_llm( if aql_fix_prompt is None: aql_fix_prompt = AQL_FIX_PROMPT - if enable_query_cache and embedding is None: - raise ValueError("Cannot enable query cache without passing embedding.") - if embedding and not enable_query_cache: - raise ValueError( - "You passed an embedding, but you did not enable Query Cache usage." - ) - qa_chain = qa_prompt | llm aql_generation_chain = aql_generation_prompt | llm aql_fix_chain = aql_fix_prompt | llm @@ -182,7 +173,6 @@ def from_llm( qa_chain=qa_chain, aql_generation_chain=aql_generation_chain, aql_fix_chain=aql_fix_chain, - embedding=embedding, **kwargs, ) From e8e0380dcb11660e1ec00d31254b76ce03442683 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 28 Jul 2025 20:38:15 -0700 Subject: [PATCH 15/45] remove langchain-openai and update ruff version --- libs/arangodb/poetry.lock | 294 +---------------------------------- libs/arangodb/pyproject.toml | 1 - 2 files changed, 2 insertions(+), 293 deletions(-) diff --git a/libs/arangodb/poetry.lock b/libs/arangodb/poetry.lock index 8dd587f..ab6ee97 100644 --- a/libs/arangodb/poetry.lock +++ b/libs/arangodb/poetry.lock @@ -350,12 +350,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "test"] +groups = ["test"] +markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "platform_system == \"Windows\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -440,18 +440,6 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] -[[package]] -name = "distro" -version = "1.9.0" -description = "Distro - an OS platform information API" -optional = false -python-versions = ">=3.6" -groups = ["main"] -files = [ - {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, - {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, -] - [[package]] name = "docutils" version = "0.20.1" @@ -693,93 +681,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "jiter" -version = "0.10.0" -description = "Fast iterable JSON parser." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "jiter-0.10.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2fb72b02478f06a900a5782de2ef47e0396b3e1f7d5aba30daeb1fce66f303"}, - {file = "jiter-0.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32bb468e3af278f095d3fa5b90314728a6916d89ba3d0ffb726dd9bf7367285e"}, - {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8b3e0068c26ddedc7abc6fac37da2d0af16b921e288a5a613f4b86f050354f"}, - {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:286299b74cc49e25cd42eea19b72aa82c515d2f2ee12d11392c56d8701f52224"}, - {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6ed5649ceeaeffc28d87fb012d25a4cd356dcd53eff5acff1f0466b831dda2a7"}, - {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2ab0051160cb758a70716448908ef14ad476c3774bd03ddce075f3c1f90a3d6"}, - {file = "jiter-0.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03997d2f37f6b67d2f5c475da4412be584e1cec273c1cfc03d642c46db43f8cf"}, - {file = "jiter-0.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c404a99352d839fed80d6afd6c1d66071f3bacaaa5c4268983fc10f769112e90"}, - {file = "jiter-0.10.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66e989410b6666d3ddb27a74c7e50d0829704ede652fd4c858e91f8d64b403d0"}, - {file = "jiter-0.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b532d3af9ef4f6374609a3bcb5e05a1951d3bf6190dc6b176fdb277c9bbf15ee"}, - {file = "jiter-0.10.0-cp310-cp310-win32.whl", hash = "sha256:da9be20b333970e28b72edc4dff63d4fec3398e05770fb3205f7fb460eb48dd4"}, - {file = "jiter-0.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:f59e533afed0c5b0ac3eba20d2548c4a550336d8282ee69eb07b37ea526ee4e5"}, - {file = "jiter-0.10.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3bebe0c558e19902c96e99217e0b8e8b17d570906e72ed8a87170bc290b1e978"}, - {file = "jiter-0.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:558cc7e44fd8e507a236bee6a02fa17199ba752874400a0ca6cd6e2196cdb7dc"}, - {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d613e4b379a07d7c8453c5712ce7014e86c6ac93d990a0b8e7377e18505e98d"}, - {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f62cf8ba0618eda841b9bf61797f21c5ebd15a7a1e19daab76e4e4b498d515b2"}, - {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:919d139cdfa8ae8945112398511cb7fca58a77382617d279556b344867a37e61"}, - {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ddbc6ae311175a3b03bd8994881bc4635c923754932918e18da841632349db"}, - {file = "jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c440ea003ad10927a30521a9062ce10b5479592e8a70da27f21eeb457b4a9c5"}, - {file = "jiter-0.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dc347c87944983481e138dea467c0551080c86b9d21de6ea9306efb12ca8f606"}, - {file = "jiter-0.10.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:13252b58c1f4d8c5b63ab103c03d909e8e1e7842d302473f482915d95fefd605"}, - {file = "jiter-0.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d1bbf3c465de4a24ab12fb7766a0003f6f9bce48b8b6a886158c4d569452dc5"}, - {file = "jiter-0.10.0-cp311-cp311-win32.whl", hash = "sha256:db16e4848b7e826edca4ccdd5b145939758dadf0dc06e7007ad0e9cfb5928ae7"}, - {file = "jiter-0.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c9c1d5f10e18909e993f9641f12fe1c77b3e9b533ee94ffa970acc14ded3812"}, - {file = "jiter-0.10.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1e274728e4a5345a6dde2d343c8da018b9d4bd4350f5a472fa91f66fda44911b"}, - {file = "jiter-0.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7202ae396446c988cb2a5feb33a543ab2165b786ac97f53b59aafb803fef0744"}, - {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ba7722d6748b6920ed02a8f1726fb4b33e0fd2f3f621816a8b486c66410ab2"}, - {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:371eab43c0a288537d30e1f0b193bc4eca90439fc08a022dd83e5e07500ed026"}, - {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c675736059020365cebc845a820214765162728b51ab1e03a1b7b3abb70f74c"}, - {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c5867d40ab716e4684858e4887489685968a47e3ba222e44cde6e4a2154f959"}, - {file = "jiter-0.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:395bb9a26111b60141757d874d27fdea01b17e8fac958b91c20128ba8f4acc8a"}, - {file = "jiter-0.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6842184aed5cdb07e0c7e20e5bdcfafe33515ee1741a6835353bb45fe5d1bd95"}, - {file = "jiter-0.10.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:62755d1bcea9876770d4df713d82606c8c1a3dca88ff39046b85a048566d56ea"}, - {file = "jiter-0.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533efbce2cacec78d5ba73a41756beff8431dfa1694b6346ce7af3a12c42202b"}, - {file = "jiter-0.10.0-cp312-cp312-win32.whl", hash = "sha256:8be921f0cadd245e981b964dfbcd6fd4bc4e254cdc069490416dd7a2632ecc01"}, - {file = "jiter-0.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7c7d785ae9dda68c2678532a5a1581347e9c15362ae9f6e68f3fdbfb64f2e49"}, - {file = "jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644"}, - {file = "jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a"}, - {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6"}, - {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3"}, - {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2"}, - {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25"}, - {file = "jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041"}, - {file = "jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca"}, - {file = "jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4"}, - {file = "jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e"}, - {file = "jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d"}, - {file = "jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4"}, - {file = "jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca"}, - {file = "jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070"}, - {file = "jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca"}, - {file = "jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522"}, - {file = "jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8"}, - {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216"}, - {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4"}, - {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426"}, - {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12"}, - {file = "jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9"}, - {file = "jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a"}, - {file = "jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853"}, - {file = "jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86"}, - {file = "jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357"}, - {file = "jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00"}, - {file = "jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5"}, - {file = "jiter-0.10.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bd6292a43c0fc09ce7c154ec0fa646a536b877d1e8f2f96c19707f65355b5a4d"}, - {file = "jiter-0.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39de429dcaeb6808d75ffe9effefe96a4903c6a4b376b2f6d08d77c1aaee2f18"}, - {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ce124f13a7a616fad3bb723f2bfb537d78239d1f7f219566dc52b6f2a9e48d"}, - {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:166f3606f11920f9a1746b2eea84fa2c0a5d50fd313c38bdea4edc072000b0af"}, - {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28dcecbb4ba402916034fc14eba7709f250c4d24b0c43fc94d187ee0580af181"}, - {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86c5aa6910f9bebcc7bc4f8bc461aff68504388b43bfe5e5c0bd21efa33b52f4"}, - {file = "jiter-0.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceeb52d242b315d7f1f74b441b6a167f78cea801ad7c11c36da77ff2d42e8a28"}, - {file = "jiter-0.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ff76d8887c8c8ee1e772274fcf8cc1071c2c58590d13e33bd12d02dc9a560397"}, - {file = "jiter-0.10.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a9be4d0fa2b79f7222a88aa488bd89e2ae0a0a5b189462a12def6ece2faa45f1"}, - {file = "jiter-0.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ab7fd8738094139b6c1ab1822d6f2000ebe41515c537235fd45dabe13ec9324"}, - {file = "jiter-0.10.0-cp39-cp39-win32.whl", hash = "sha256:5f51e048540dd27f204ff4a87f5d79294ea0aa3aa552aca34934588cf27023cf"}, - {file = "jiter-0.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:1b28302349dc65703a9e4ead16f163b1c339efffbe1049c30a44b001a2a4fff9"}, - {file = "jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500"}, -] - [[package]] name = "jsonpatch" version = "1.33" @@ -879,23 +780,6 @@ reference = "HEAD" resolved_reference = "a7d0e42f3fa5b147fea9109f60e799229f30a68b" subdirectory = "libs/core" -[[package]] -name = "langchain-openai" -version = "0.3.12" -description = "An integration package connecting OpenAI and LangChain" -optional = false -python-versions = "<4.0,>=3.9" -groups = ["main"] -files = [ - {file = "langchain_openai-0.3.12-py3-none-any.whl", hash = "sha256:0fab64d58ec95e65ffbaf659470cd362e815685e15edbcb171641e90eca4eb86"}, - {file = "langchain_openai-0.3.12.tar.gz", hash = "sha256:c9dbff63551f6bd91913bca9f99a2d057fd95dc58d4778657d67e5baa1737f61"}, -] - -[package.dependencies] -langchain-core = ">=0.3.49,<1.0.0" -openai = ">=1.68.2,<2.0.0" -tiktoken = ">=0.7,<1" - [[package]] name = "langchain-text-splitters" version = "0.3.8" @@ -1615,110 +1499,6 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] -[[package]] -name = "regex" -version = "2024.11.6" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, - {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, - {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, - {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, - {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, - {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, - {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, - {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, - {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, - {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, - {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, - {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, - {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, - {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, - {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, - {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, - {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, - {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, - {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, - {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, - {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, - {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, - {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, - {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, - {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, - {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, - {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, - {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, - {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"}, - {file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"}, - {file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"}, - {file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"}, - {file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"}, - {file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"}, - {file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"}, - {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"}, - {file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"}, - {file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"}, - {file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"}, - {file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"}, - {file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"}, - {file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"}, - {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, -] - [[package]] name = "requests" version = "2.32.4" @@ -2113,54 +1893,6 @@ files = [ doc = ["reno", "sphinx"] test = ["pytest", "tornado (>=4.5)", "typeguard"] -[[package]] -name = "tiktoken" -version = "0.9.0" -description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "tiktoken-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:586c16358138b96ea804c034b8acf3f5d3f0258bd2bc3b0227af4af5d622e382"}, - {file = "tiktoken-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9c59ccc528c6c5dd51820b3474402f69d9a9e1d656226848ad68a8d5b2e5108"}, - {file = "tiktoken-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0968d5beeafbca2a72c595e8385a1a1f8af58feaebb02b227229b69ca5357fd"}, - {file = "tiktoken-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a5fb085a6a3b7350b8fc838baf493317ca0e17bd95e8642f95fc69ecfed1de"}, - {file = "tiktoken-0.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15a2752dea63d93b0332fb0ddb05dd909371ededa145fe6a3242f46724fa7990"}, - {file = "tiktoken-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:26113fec3bd7a352e4b33dbaf1bd8948de2507e30bd95a44e2b1156647bc01b4"}, - {file = "tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e"}, - {file = "tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348"}, - {file = "tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33"}, - {file = "tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136"}, - {file = "tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336"}, - {file = "tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb"}, - {file = "tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03"}, - {file = "tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210"}, - {file = "tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794"}, - {file = "tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22"}, - {file = "tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2"}, - {file = "tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16"}, - {file = "tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb"}, - {file = "tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63"}, - {file = "tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01"}, - {file = "tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139"}, - {file = "tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a"}, - {file = "tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95"}, - {file = "tiktoken-0.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c6386ca815e7d96ef5b4ac61e0048cd32ca5a92d5781255e13b31381d28667dc"}, - {file = "tiktoken-0.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75f6d5db5bc2c6274b674ceab1615c1778e6416b14705827d19b40e6355f03e0"}, - {file = "tiktoken-0.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e15b16f61e6f4625a57a36496d28dd182a8a60ec20a534c5343ba3cafa156ac7"}, - {file = "tiktoken-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebcec91babf21297022882344c3f7d9eed855931466c3311b1ad6b64befb3df"}, - {file = "tiktoken-0.9.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e5fd49e7799579240f03913447c0cdfa1129625ebd5ac440787afc4345990427"}, - {file = "tiktoken-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:26242ca9dc8b58e875ff4ca078b9a94d2f0813e6a535dcd2205df5d49d927cc7"}, - {file = "tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d"}, -] - -[package.dependencies] -regex = ">=2022.1.18" -requests = ">=2.26.0" - -[package.extras] -blobfile = ["blobfile (>=2)"] - [[package]] name = "tomli" version = "2.2.1" @@ -2204,28 +1936,6 @@ files = [ ] markers = {test = "python_full_version <= \"3.11.0a6\"", typing = "python_version < \"3.11\""} -[[package]] -name = "tqdm" -version = "4.67.1" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, - {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] -discord = ["requests"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - [[package]] name = "types-pyfarmhash" version = "0.4.0.20240902" diff --git a/libs/arangodb/pyproject.toml b/libs/arangodb/pyproject.toml index dd7189b..b0c5616 100644 --- a/libs/arangodb/pyproject.toml +++ b/libs/arangodb/pyproject.toml @@ -14,7 +14,6 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.9,<4.0" langchain-core = "^0.3.8" -langchain-openai = ">=0.3.8" langchain = "^0.3.7" python-arango = "^8.0.0" cityhash = "^0.4.8" From 80109ab530032c308c51e19045c603e6880b0ef2 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 28 Jul 2025 22:14:14 -0700 Subject: [PATCH 16/45] simplify integration tests --- .../tests/integration_tests/chains/test_graph_database.py | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py index 7915853..6b65163 100644 --- a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py +++ b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py @@ -11,6 +11,7 @@ from langchain_core.language_models import BaseLanguageModel from langchain_core.messages import AIMessage from langchain_core.runnables import RunnableLambda +from langchain_core.embeddings import Embeddings from langchain_arangodb.chains.graph_qa.arangodb import ArangoGraphQAChain from langchain_arangodb.graphs.arangodb_graph import ArangoGraph From 38be0c00b5667d879879a2bc7c7c2e2f6a44f1ad Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 29 Jul 2025 09:53:44 -0700 Subject: [PATCH 17/45] add score to the output --- .../chains/graph_qa/arangodb.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index d346528..6be7446 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -13,8 +13,6 @@ from langchain_core.messages import AIMessage from langchain_core.prompts import BasePromptTemplate from langchain_core.runnables import Runnable -from langchain_core.embeddings import Embeddings - from pydantic import Field from langchain_arangodb.chains.graph_qa.prompts import ( @@ -92,9 +90,7 @@ class ArangoGraphQAChain(Chain): See https://python.langchain.com/docs/security for more information. """ - def __init__( - self, **kwargs: Any - ) -> None: + def __init__(self, **kwargs: Any) -> None: """Initialize the chain.""" super().__init__(**kwargs) if self.allow_dangerous_requests is not True: @@ -453,6 +449,8 @@ def _call( ) if len(exact_search_check) == 1: cached_query = exact_search_check[0]["aql"] + # score = "1.0" + score: Optional[int] = None else: # Vector Search if self.embedding is None: @@ -466,7 +464,7 @@ def _call( SORT score DESC LIMIT 1 FILTER score > @score_threshold - RETURN q.aql + RETURN {aql: q.aql, score: score} """ vector_result = list( @@ -478,7 +476,10 @@ def _call( }, ), ) - cached_query = vector_result[0] if vector_result else None + if vector_result: + result = vector_result[0] if vector_result else None + cached_query = result["aql"] # type: ignore + score = f"{result['score']:.2f}" # type: ignore ###################### # Generate AQL Query # From 7906a1c2977f8e3b1f15061ccf975c4a768f986a Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 29 Jul 2025 09:55:55 -0700 Subject: [PATCH 18/45] change to insert_many() and invoke() --- .../tests/integration_tests/chains/test_graph_database.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py index 6b65163..6eb0f58 100644 --- a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py +++ b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py @@ -1,17 +1,13 @@ """Test Graph Database Chain.""" import pprint -from typing import Any, Dict, List from unittest.mock import MagicMock, patch import pytest -from arango.cursor import Cursor from arango.database import StandardDatabase -from arango.job import AsyncJob, BatchJob from langchain_core.language_models import BaseLanguageModel from langchain_core.messages import AIMessage from langchain_core.runnables import RunnableLambda -from langchain_core.embeddings import Embeddings from langchain_arangodb.chains.graph_qa.arangodb import ArangoGraphQAChain from langchain_arangodb.graphs.arangodb_graph import ArangoGraph From 78cca90e032268278a72791329dbb433e028fe0b Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 29 Jul 2025 09:57:09 -0700 Subject: [PATCH 19/45] change to invoke() and revert result changes --- libs/arangodb/tests/unit_tests/chains/test_graph_qa.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py index 8e0a67c..a2b7b78 100644 --- a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py +++ b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py @@ -127,9 +127,7 @@ def batch(self, *args, **kwargs) -> List[Any]: # type: ignore return [] qa_chain = CompliantRunnable() - qa_chain.__class__.invoke = MagicMock( # type: ignore - return_value=AIMessage(content="This is a test answer") - ) # type: ignore + qa_chain.invoke = MagicMock(return_value="This is a test answer") # type: ignore aql_generation_chain = CompliantRunnable() aql_generation_chain.invoke = MagicMock( # type: ignore @@ -241,7 +239,7 @@ def test_call_successful_execution( result = chain._call({"query": "Find all movies"}) assert "result" in result - assert result["result"].content == "This is a test answer" + assert result["result"] == "This is a test answer" assert len(fake_graph_store.queries_executed) == 1 def test_call_with_ai_message_response( From 386094576c103a00fe87a1bf2a118c81f7f1809d Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 29 Jul 2025 21:53:32 -0700 Subject: [PATCH 20/45] refactor _call --- .../chains/graph_qa/arangodb.py | 242 +++++++----------- 1 file changed, 96 insertions(+), 146 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 6be7446..85ba1cd 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -439,179 +439,121 @@ def _call( # Check Query Cache # ###################### - cached_query = None + cached_query, score = None, None if use_query_cache: - # Exact Search - exact_search_check = list( - self.graph.db.collection("Queries").find( # type: ignore - {"text": user_input}, limit=1 - ) - ) - if len(exact_search_check) == 1: - cached_query = exact_search_check[0]["aql"] - # score = "1.0" - score: Optional[int] = None - else: - # Vector Search - if self.embedding is None: - raise ValueError( - "Embedding must be provided when using query cache" - ) - query_embedding = self.embedding.embed_query(user_input) - vector_search_check = """ - FOR q IN Queries - LET score = COSINE_SIMILARITY(q.embedding, @query_embedding) - SORT score DESC - LIMIT 1 - FILTER score > @score_threshold - RETURN {aql: q.aql, score: score} - """ - - vector_result = list( - self.graph.db.aql.execute( # type: ignore - vector_search_check, - bind_vars={ - "query_embedding": query_embedding, # type: ignore - "score_threshold": 0.80, # type: ignore - }, - ), - ) - if vector_result: - result = vector_result[0] if vector_result else None - cached_query = result["aql"] # type: ignore - score = f"{result['score']:.2f}" # type: ignore + if self.embedding is None: + m = "Embedding must be provided when using query cache" + raise ValueError(m) - ###################### - # Generate AQL Query # - ###################### + cache_result = self.__get_cached_query() + + if cache_result is not None: + cached_query, score = cache_result - if not use_query_cache or not cached_query: + if cached_query: + aql_generation_output = f"```aql{cached_query}```" + else: aql_generation_output = self.aql_generation_chain.invoke( { "adb_schema": self.graph.schema_yaml, "aql_examples": self.aql_examples, - "user_input": user_input, + "user_input": self._last_user_input, }, callbacks=callbacks, ) - aql_query = "" - aql_error = "" - aql_result = None - aql_generation_attempt = 1 - - while ( - aql_result is None - and aql_generation_attempt < self.max_aql_generation_attempts + 1 - ): - if isinstance(aql_generation_output, str): - aql_generation_output_content = aql_generation_output - elif isinstance(aql_generation_output, AIMessage): - aql_generation_output_content = str(aql_generation_output.content) - else: - m = f"Invalid AQL Generation Output: {aql_generation_output} (type: {type(aql_generation_output)})" # noqa: E501 - raise ValueError(m) - - ##################### - # Extract AQL Query # - ##################### - - pattern = r"```(?i:aql)?(.*?)```" - matches: List[str] = re.findall( - pattern, aql_generation_output_content, re.DOTALL + aql_query = "" + aql_error = "" + aql_result = None + aql_generation_attempt = 0 + + while ( + aql_result is None + and aql_generation_attempt < self.max_aql_generation_attempts + ): + if isinstance(aql_generation_output, str): + aql_generation_output_content = aql_generation_output + elif isinstance(aql_generation_output, AIMessage): + aql_generation_output_content = str(aql_generation_output.content) + else: + m = f"Invalid AQL Generation Output: {aql_generation_output} (type: {type(aql_generation_output)})" # noqa: E501 + raise ValueError(m) + + ##################### + # Extract AQL Query # + ##################### + + pattern = r"```(?i:aql)?(.*?)```" + matches: List[str] = re.findall( + pattern, aql_generation_output_content, re.DOTALL + ) + + if not matches: + _run_manager.on_text( + "Invalid Response: ", end="\n", verbose=self.verbose ) - if not matches: - _run_manager.on_text( - "Invalid Response: ", end="\n", verbose=self.verbose - ) + _run_manager.on_text( + aql_generation_output_content, + color="red", + end="\n", + verbose=self.verbose, + ) - _run_manager.on_text( - aql_generation_output_content, - color="red", - end="\n", - verbose=self.verbose, - ) + m = f"Unable to extract AQL Query from response: {aql_generation_output_content}" # noqa: E501 + raise ValueError(m) - m = f"Unable to extract AQL Query from response: {aql_generation_output_content}" # noqa: E501 - raise ValueError(m) + aql_query = matches[0].strip() - aql_query = matches[0].strip() + if self.force_read_only_query: + is_read_only, write_operation = self._is_read_only_query(aql_query) - if self.force_read_only_query: - is_read_only, write_operation = self._is_read_only_query(aql_query) + if not is_read_only: + error_msg = f""" + Security violation: Write operations are not allowed. + Detected write operation in query: {write_operation} + """ + raise ValueError(error_msg) - if not is_read_only: - error_msg = f""" - Security violation: Write operations are not allowed. - Detected write operation in query: {write_operation} - """ - raise ValueError(error_msg) + ############################# + # Execute/Explain AQL Query # + ############################# + + try: + aql_result = aql_execution_func(aql_query, params) + except (AQLQueryExecuteError, AQLQueryExplainError) as e: + aql_error = str(e.error_message) _run_manager.on_text( - f"AQL Query ({aql_generation_attempt}):", verbose=self.verbose + "AQL Query Execution Error: ", end="\n", verbose=self.verbose ) _run_manager.on_text( - aql_query, color="green", end="\n", verbose=self.verbose + aql_error, color="yellow", end="\n\n", verbose=self.verbose ) - ############################# - # Execute/Explain AQL Query # - ############################# - - try: - aql_result = aql_execution_func(aql_query, params) - except (AQLQueryExecuteError, AQLQueryExplainError) as e: - aql_error = str(e.error_message) - - _run_manager.on_text( - "AQL Query Execution Error: ", end="\n", verbose=self.verbose - ) - _run_manager.on_text( - aql_error, color="yellow", end="\n\n", verbose=self.verbose - ) - - ######################## - # Retry AQL Generation # - ######################## - - aql_generation_output = self.aql_fix_chain.invoke( - { - "adb_schema": self.graph.schema_yaml, - "aql_query": aql_query, - "aql_error": aql_error, - }, - callbacks=callbacks, - ) - - aql_generation_attempt += 1 - - if aql_result is None: - m = f""" - Maximum amount of AQL Query Generation attempts reached. - Unable to execute the AQL Query due to the following error: - {aql_error} - """ - raise ValueError(m) + ######################## + # Retry AQL Generation # + ######################## - ###################### - # Store Query in Cache # - ###################### - - if use_query_cache: - if self.embedding is None: - raise ValueError( - "Embedding must be provided when using query cache" - ) - query_embedding = self.embedding.embed_query(user_input) - self.graph.db.collection("Queries").insert( # type: ignore + aql_generation_output = self.aql_fix_chain.invoke( { - "text": user_input, - "embedding": query_embedding, - "aql": aql_query, - } + "adb_schema": self.graph.schema_yaml, + "aql_query": aql_query, + "aql_error": aql_error, + }, + callbacks=callbacks, ) + aql_generation_attempt += 1 + + if aql_result is None: + m = f""" + Maximum amount of AQL Query Generation attempts reached. + Unable to execute the AQL Query due to the following error: + {aql_error} + """ + raise ValueError(m) + if use_query_cache and cached_query: aql_query = cached_query aql_result = aql_execution_func(aql_query, params) @@ -627,6 +569,13 @@ def _call( _run_manager.on_text( aql_query, color="green", end="\n", verbose=self.verbose ) + else: + _run_manager.on_text( + f"AQL Query ({aql_generation_attempt}):", verbose=self.verbose + ) + _run_manager.on_text( + aql_query, color="green", end="\n", verbose=self.verbose + ) ############################# # Execute/Explain AQL Query # @@ -690,13 +639,14 @@ def _call( result = self.qa_chain.invoke( # type: ignore { "adb_schema": self.graph.schema_yaml, - "user_input": user_input, + "user_input": self._last_user_input, "aql_query": aql_query, "aql_result": aql_result, }, callbacks=callbacks, ) results: Dict[str, Any] = {self.output_key: result} + self._last_aql_query = aql_query if self.return_aql_query: results["aql_query"] = aql_generation_output From a163ebd2837fea8bfdbfb9af0772cbc0522042a8 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Wed, 30 Jul 2025 16:17:10 -0700 Subject: [PATCH 21/45] customize clear_query_cache --- libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 85ba1cd..e33136c 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -445,7 +445,7 @@ def _call( m = "Embedding must be provided when using query cache" raise ValueError(m) - cache_result = self.__get_cached_query() + cache_result = self.__get_cached_query(query_cache_similarity_threshold) if cache_result is not None: cached_query, score = cache_result From 44904828b59e05fed4a42c3b194df4c68e8d8bd4 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Wed, 30 Jul 2025 17:02:53 -0700 Subject: [PATCH 22/45] handle edge cases for cache_query --- libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index e33136c..0df6405 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -3,7 +3,7 @@ from __future__ import annotations import re -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple, Union from arango import AQLQueryExecuteError, AQLQueryExplainError from langchain.chains.base import Chain From 630251f59fd3efd3331dc7e73746d3990cc7ded8 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Wed, 30 Jul 2025 19:02:49 -0700 Subject: [PATCH 23/45] normalize user input text, query --- libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 0df6405..686941d 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -571,7 +571,7 @@ def _call( ) else: _run_manager.on_text( - f"AQL Query ({aql_generation_attempt}):", verbose=self.verbose + f"AQL Query ({aql_generation_attempt}):\n", verbose=self.verbose ) _run_manager.on_text( aql_query, color="green", end="\n", verbose=self.verbose From 677ff384f854b7ddba4fdfacd3f94216a70f9c24 Mon Sep 17 00:00:00 2001 From: Anthony Mahanna Date: Mon, 4 Aug 2025 14:03:00 -0400 Subject: [PATCH 24/45] move: AQL query print --- .../langchain_arangodb/chains/graph_qa/arangodb.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 686941d..5e94b86 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -515,6 +515,15 @@ def _call( """ raise ValueError(error_msg) + query_message = f"AQL Query ({aql_generation_attempt})" + if cached_query: + query_message += f" (used cached query, score: {score})" + + _run_manager.on_text(query_message, verbose=self.verbose) + _run_manager.on_text( + aql_query, color="green", end="\n", verbose=self.verbose + ) + ############################# # Execute/Explain AQL Query # ############################# From 087a1fdb94a52700f0f510c52e1760bb14f84652 Mon Sep 17 00:00:00 2001 From: Anthony Mahanna Date: Mon, 4 Aug 2025 14:12:14 -0400 Subject: [PATCH 25/45] misc: introduce `assert`, `self.graph._hash`, minor cleanup --- .../langchain_arangodb/chains/graph_qa/arangodb.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 5e94b86..656e6f2 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -383,6 +383,9 @@ def _call( Defaults to 256. :type output_string_limit: int """ + if not isinstance(self.graph, ArangoGraph): + raise ValueError("Graph must be an ArangoGraph instance") + _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() callbacks = _run_manager.get_child() user_input = inputs[self.input_key].strip().lower() @@ -445,6 +448,9 @@ def _call( m = "Embedding must be provided when using query cache" raise ValueError(m) + if not self.graph.db.has_collection(self.query_cache_collection_name): + self.graph.db.create_collection(self.query_cache_collection_name) + cache_result = self.__get_cached_query(query_cache_similarity_threshold) if cache_result is not None: @@ -648,14 +654,14 @@ def _call( result = self.qa_chain.invoke( # type: ignore { "adb_schema": self.graph.schema_yaml, - "user_input": self._last_user_input, + "user_input": user_input, "aql_query": aql_query, "aql_result": aql_result, }, callbacks=callbacks, ) + results: Dict[str, Any] = {self.output_key: result} - self._last_aql_query = aql_query if self.return_aql_query: results["aql_query"] = aql_generation_output From 2eeb9190cf85d4b343b9aadd9f14c7b7905bc309 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 4 Aug 2025 19:11:12 -0700 Subject: [PATCH 26/45] replace with hashed key and remove _format_aql --- .../chains/graph_qa/arangodb.py | 31 +++++++++++-------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 656e6f2..dc9bfe3 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -3,7 +3,7 @@ from __future__ import annotations import re -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple from arango import AQLQueryExecuteError, AQLQueryExplainError from langchain.chains.base import Chain @@ -215,7 +215,7 @@ def cache_query(self, text: Optional[str] = None, aql: Optional[str] = None) -> if self.embedding is None: raise ValueError("Cannot cache queries without an embedding model.") - if not self.graph.db.has_collection(self.query_cache_collection_name): + if not self.graph.db.has_collection(self.query_cache_collection_name): # type: ignore m = f"Collection {self.query_cache_collection_name} does not exist" # noqa: E501 raise ValueError(m) @@ -254,7 +254,7 @@ def clear_query_cache(self, text: Optional[str] = None) -> str: m = f"Collection {self.query_cache_collection_name} does not exist" raise ValueError(m) - collection = self.graph.db.collection(self.query_cache_collection_name) + collection = self.graph.db.collection(self.query_cache_collection_name) # type: ignore if text is None: collection.truncate() @@ -383,8 +383,8 @@ def _call( Defaults to 256. :type output_string_limit: int """ - if not isinstance(self.graph, ArangoGraph): - raise ValueError("Graph must be an ArangoGraph instance") + if not isinstance(self.graph, GraphStore): + raise ValueError("Graph must be an GraphStore instance") _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() callbacks = _run_manager.get_child() @@ -448,10 +448,12 @@ def _call( m = "Embedding must be provided when using query cache" raise ValueError(m) - if not self.graph.db.has_collection(self.query_cache_collection_name): - self.graph.db.create_collection(self.query_cache_collection_name) + if not self.graph.db.has_collection(self.query_cache_collection_name): # type: ignore + self.graph.db.create_collection(self.query_cache_collection_name) # type: ignore - cache_result = self.__get_cached_query(query_cache_similarity_threshold) + cache_result = self.__get_cached_query( + user_input, query_cache_similarity_threshold + ) if cache_result is not None: cached_query, score = cache_result @@ -463,7 +465,7 @@ def _call( { "adb_schema": self.graph.schema_yaml, "aql_examples": self.aql_examples, - "user_input": self._last_user_input, + "user_input": user_input, }, callbacks=callbacks, ) @@ -471,11 +473,11 @@ def _call( aql_query = "" aql_error = "" aql_result = None - aql_generation_attempt = 0 + aql_generation_attempt = 1 while ( aql_result is None - and aql_generation_attempt < self.max_aql_generation_attempts + and aql_generation_attempt < self.max_aql_generation_attempts + 1 ): if isinstance(aql_generation_output, str): aql_generation_output_content = aql_generation_output @@ -521,9 +523,12 @@ def _call( """ raise ValueError(error_msg) - query_message = f"AQL Query ({aql_generation_attempt})" + query_message = f"AQL Query ({aql_generation_attempt})\n" if cached_query: - query_message += f" (used cached query, score: {score})" + score_string = score if score is not None else "1.0" + query_message = ( + f"AQL Query (used cached query, score: {score_string})\n" # noqa: E501 + ) _run_manager.on_text(query_message, verbose=self.verbose) _run_manager.on_text( From b6e7e262d7706c464da37e21c345feb37d41e25c Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 4 Aug 2025 22:18:32 -0700 Subject: [PATCH 27/45] add chat history --- .../chains/graph_qa/arangodb.py | 33 +++++++++++++++++++ .../chains/graph_qa/prompts.py | 16 +++++++-- 2 files changed, 46 insertions(+), 3 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index dc9bfe3..860567a 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -54,6 +54,10 @@ class ArangoGraphQAChain(Chain): qa_chain: Runnable[Dict[str, Any], Any] input_key: str = "query" #: :meta private: output_key: str = "result" #: :meta private: + include_history: bool = Field(default=True) + max_history_messages: int = Field(default=5) + chat_history_store: Optional[ArangoChatMessageHistory] = Field(default=None) + top_k: int = 10 """Number of results to return from the query""" @@ -383,6 +387,7 @@ def _call( Defaults to 256. :type output_string_limit: int """ + if not isinstance(self.graph, GraphStore): raise ValueError("Graph must be an GraphStore instance") @@ -438,6 +443,19 @@ def _call( self.graph.query if self.execute_aql_query else self.graph.explain ) + # ###################### + # # Get Chat History # + # ###################### + + chat_history = [] + if self.include_history and self.chat_history_store is not None: + chat_history = self.chat_history_store.messages[:self.max_history_messages] + chat_history = [ + f"{'Human' if msg.type == 'human' else 'AI'}: {msg.content}" + for msg in chat_history + ] + formatted_history = " ".join(chat_history) if chat_history else "" + ###################### # Check Query Cache # ###################### @@ -466,6 +484,7 @@ def _call( "adb_schema": self.graph.schema_yaml, "aql_examples": self.aql_examples, "user_input": user_input, + "chat_history": formatted_history, }, callbacks=callbacks, ) @@ -646,6 +665,10 @@ def _call( _run_manager.on_text( str(aql_result), color="green", end="\n", verbose=self.verbose ) + _run_manager.on_text( + str(formatted_history), color="red", end="\n", verbose=self.verbose + ) + if not self.execute_aql_query: result = {self.output_key: aql_query, "aql_result": aql_result} @@ -662,6 +685,7 @@ def _call( "user_input": user_input, "aql_query": aql_query, "aql_result": aql_result, + "chat_history": formatted_history, }, callbacks=callbacks, ) @@ -677,6 +701,15 @@ def _call( self._last_user_input = user_input self._last_aql_query = aql_query + ######################## + # Store Chat History # + ######################## + + if self.chat_history_store: + self.chat_history_store.add_user_message(user_input) + self.chat_history_store.add_ai_message(result) + + return results def _is_read_only_query(self, aql_query: str) -> Tuple[bool, Optional[str]]: diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py index 658bbc4..7543db7 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py @@ -3,7 +3,7 @@ AQL_GENERATION_TEMPLATE = """Task: Generate an ArangoDB Query Language (AQL) query from a User Input. -You are an ArangoDB Query Language (AQL) expert responsible for translating a `User Input` into an ArangoDB Query Language (AQL) query. +You are an ArangoDB Query Language (AQL) expert responsible for translating a `User Input` into an ArangoDB Query Language (AQL) query. You may also be given a `Chat History` to help you create the `AQL Query`. You are given an `ArangoDB Schema`. It is a YAML Spec containing: 1. `Graph Schema`: Lists all Graphs within the ArangoDB Database Instance, along with their Edge Relationships. @@ -16,6 +16,7 @@ Things you should do: - Think step by step. - Rely on `ArangoDB Schema` and `AQL Query Examples` (if provided) to generate the query. +- Rely on `Chat History` to help you create the `AQL Query`. - Begin the `AQL Query` by the `WITH` AQL keyword to specify all of the ArangoDB Collections required. - If a `View Schema` is defined and contains analyzers for specific fields, prefer using the View with the `SEARCH` and `ANALYZER` clauses instead of a direct collection scan. - Use `PHRASE(...)`, `TOKENS(...)`, or `IN TOKENS(...)` as appropriate when analyzers are available on a field. @@ -41,11 +42,14 @@ User Input: {user_input} +Chat History (Optional): +{chat_history} + AQL Query: """ AQL_GENERATION_PROMPT = PromptTemplate( - input_variables=["adb_schema", "aql_examples", "user_input"], + input_variables=["adb_schema", "aql_examples", "user_input", "chat_history"], template=AQL_GENERATION_TEMPLATE, ) @@ -92,6 +96,8 @@ You are an ArangoDB Query Language (AQL) expert responsible for creating a well-written `Summary` from the `User Input` and associated `AQL Result`. +You may also be given a `Chat History` to help you create the `Summary`. + A user has executed an ArangoDB Query Language query, which has returned the AQL Result in JSON format. You are responsible for creating an `Summary` based on the AQL Result. @@ -100,6 +106,7 @@ - `User Input`: the original question/request of the user, which has been translated into an AQL Query. - `AQL Query`: the AQL equivalent of the `User Input`, translated by another AI Model. Should you deem it to be incorrect, suggest a different AQL Query. - `AQL Result`: the JSON output returned by executing the `AQL Query` within the ArangoDB Database. +- `Chat History`: the chat history between the user and the AI model, which may contain information about the user's request and the AI model's response. Remember to think step by step. @@ -112,6 +119,9 @@ User Input: {user_input} +Chat History (Optional): +{chat_history} + AQL Query: {aql_query} @@ -121,6 +131,6 @@ Summary: """ AQL_QA_PROMPT = PromptTemplate( - input_variables=["adb_schema", "user_input", "aql_query", "aql_result"], + input_variables=["adb_schema", "user_input", "aql_query", "aql_result", "chat_history"], template=AQL_QA_TEMPLATE, ) From 1f2d16a22392e8a786aaabd0cda3a3a71d46423e Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 4 Aug 2025 23:27:05 -0700 Subject: [PATCH 28/45] emphasize chat history in prompt --- libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py index 7543db7..1b7fc58 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py @@ -13,6 +13,8 @@ You may also be given a set of `AQL Query Examples` to help you create the `AQL Query`. If provided, the `AQL Query Examples` should be used as a reference, similar to how `ArangoDB Schema` should be used. +You may also be given a `Chat History` to help you create the `AQL Query`. If provided, the `Chat History` should be used as a reference, similar to how `ArangoDB Schema` should be used. + Things you should do: - Think step by step. - Rely on `ArangoDB Schema` and `AQL Query Examples` (if provided) to generate the query. @@ -96,7 +98,7 @@ You are an ArangoDB Query Language (AQL) expert responsible for creating a well-written `Summary` from the `User Input` and associated `AQL Result`. -You may also be given a `Chat History` to help you create the `Summary`. +You may also be given a `Chat History` to help you create the `Summary`. A user has executed an ArangoDB Query Language query, which has returned the AQL Result in JSON format. You are responsible for creating an `Summary` based on the AQL Result. From f8601ab481ac31467656fb847e6a7a853b28adcf Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 4 Aug 2025 23:27:55 -0700 Subject: [PATCH 29/45] change chat history format & add summary to the output --- .../chains/graph_qa/arangodb.py | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 860567a..2cda70a 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -14,6 +14,7 @@ from langchain_core.prompts import BasePromptTemplate from langchain_core.runnables import Runnable from pydantic import Field +from langchain_core.messages import HumanMessage, AIMessage from langchain_arangodb.chains.graph_qa.prompts import ( AQL_FIX_PROMPT, @@ -449,12 +450,11 @@ def _call( chat_history = [] if self.include_history and self.chat_history_store is not None: - chat_history = self.chat_history_store.messages[:self.max_history_messages] - chat_history = [ - f"{'Human' if msg.type == 'human' else 'AI'}: {msg.content}" - for msg in chat_history - ] - formatted_history = " ".join(chat_history) if chat_history else "" + for msg in self.chat_history_store.messages[:self.max_history_messages]: + if msg.type == "human": + chat_history.append(HumanMessage(content=msg.content)) + else: + chat_history.append(AIMessage(content=msg.content)) ###################### # Check Query Cache # @@ -484,7 +484,7 @@ def _call( "adb_schema": self.graph.schema_yaml, "aql_examples": self.aql_examples, "user_input": user_input, - "chat_history": formatted_history, + "chat_history": chat_history, }, callbacks=callbacks, ) @@ -665,9 +665,6 @@ def _call( _run_manager.on_text( str(aql_result), color="green", end="\n", verbose=self.verbose ) - _run_manager.on_text( - str(formatted_history), color="red", end="\n", verbose=self.verbose - ) if not self.execute_aql_query: @@ -685,11 +682,18 @@ def _call( "user_input": user_input, "aql_query": aql_query, "aql_result": aql_result, - "chat_history": formatted_history, + "chat_history": chat_history, }, callbacks=callbacks, ) + # Add summary + text = "Summary:" if self.execute_aql_query else "AQL Explain:" + _run_manager.on_text(text, end="\n", verbose=self.verbose) + _run_manager.on_text( + str(result.content), color="green", end="\n", verbose=self.verbose + ) + results: Dict[str, Any] = {self.output_key: result} if self.return_aql_query: From 9d46e7652c13e185bba38c34c1e03e2b03a0c747 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 5 Aug 2025 14:19:26 -0700 Subject: [PATCH 30/45] handle edge cases & add documentation --- .../chains/graph_qa/arangodb.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 2cda70a..f28fd7a 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -56,7 +56,7 @@ class ArangoGraphQAChain(Chain): input_key: str = "query" #: :meta private: output_key: str = "result" #: :meta private: include_history: bool = Field(default=True) - max_history_messages: int = Field(default=5) + max_history_messages: int = Field(default=10) chat_history_store: Optional[ArangoChatMessageHistory] = Field(default=None) @@ -147,6 +147,12 @@ def from_llm( :param query_cache_collection_name: The name of the collection to use for the query cache. :type query_cache_collection_name: str + :param include_history: Whether to include the chat history in the prompt. + :type include_history: bool + :param max_history_messages: The maximum number of messages to include in the chat history. + :type max_history_messages: int + :param chat_history_store: The chat history store to use. + :type chat_history_store: ArangoChatMessageHistory :param qa_prompt: The prompt to use for the QA chain. :type qa_prompt: BasePromptTemplate :param aql_generation_prompt: The prompt to use for the AQL generation chain. @@ -448,14 +454,21 @@ def _call( # # Get Chat History # # ###################### + if self.include_history and self.chat_history_store is None: + raise ValueError("Chat message history is required if include_history is True") + + if self.max_history_messages <= 0: + raise ValueError("max_history_messages must be greater than 0") + chat_history = [] if self.include_history and self.chat_history_store is not None: - for msg in self.chat_history_store.messages[:self.max_history_messages]: + for msg in self.chat_history_store.messages[-self.max_history_messages:]: if msg.type == "human": chat_history.append(HumanMessage(content=msg.content)) else: chat_history.append(AIMessage(content=msg.content)) + ###################### # Check Query Cache # ###################### From 6b13a7dc04d6b79e75c1aa21a0358d9b115d0bf5 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 5 Aug 2025 14:21:23 -0700 Subject: [PATCH 31/45] fix inbound/outbound, sort, pronounce issues --- .../chains/graph_qa/prompts.py | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py index 1b7fc58..732afaa 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py @@ -13,13 +13,19 @@ You may also be given a set of `AQL Query Examples` to help you create the `AQL Query`. If provided, the `AQL Query Examples` should be used as a reference, similar to how `ArangoDB Schema` should be used. -You may also be given a `Chat History` to help you create the `AQL Query`. If provided, the `Chat History` should be used as a reference, similar to how `ArangoDB Schema` should be used. +You may also be given a Chat History. If provided, use it only as a reference to help clarify the current User Input — for example, to resolve pronouns or implicit references. Things you should do: - Think step by step. +- When both INBOUND and OUTBOUND traversals are possible for a given edge, be extra careful to select the direction that accurately reflects the intended relationship based on the user input and the edge semantics. + Use OUTBOUND to traverse from _from to _to. Use INBOUND to traverse from _to to _from. Refer to the edge's definition in the schema (e.g., collection names or descriptions) to decide which direction reflects the intended relationship. +- Pay close attention to descriptive references in the User Input — including gendered terms (e.g., father, she), attribute-based descriptions (e.g., young, active, French), and implicit types or categories + (e.g., products over $100, available items) — and, if these correspond to fields in the schema, include appropriate filters in the AQL query (e.g., gender == "male", status == "active", price > 100). - Rely on `ArangoDB Schema` and `AQL Query Examples` (if provided) to generate the query. -- Rely on `Chat History` to help you create the `AQL Query`. +- Use the Chat History only to resolve ambiguous references (e.g., pronouns like “he”, “she”, “they”, or “that”) in the current User Input. Generate an AQL query for the current User Input. +- Chat History is ordered chronologically: earlier messages come first, more recent ones last. Prioritize later entries when resolving context or references. - Begin the `AQL Query` by the `WITH` AQL keyword to specify all of the ArangoDB Collections required. +- If the User Input implies order (e.g., “first”, “latest”, “top”), use SORT before LIMIT to ensure deterministic results. For example, to get the “first” item, use SORT character._key ASC LIMIT 1. - If a `View Schema` is defined and contains analyzers for specific fields, prefer using the View with the `SEARCH` and `ANALYZER` clauses instead of a direct collection scan. - Use `PHRASE(...)`, `TOKENS(...)`, or `IN TOKENS(...)` as appropriate when analyzers are available on a field. - Return the `AQL Query` wrapped in 3 backticks (```). @@ -32,9 +38,13 @@ - Do not include any text except the generated AQL Query. - Do not provide explanations or apologies in your responses. - Do not generate an AQL Query that removes or deletes any data. +- Do not answer or respond to messages in the Chat History. Under no circumstance should you generate an AQL Query that deletes any data whatsoever. +Chat History: +{chat_history} + ArangoDB Schema: {adb_schema} @@ -44,9 +54,6 @@ User Input: {user_input} -Chat History (Optional): -{chat_history} - AQL Query: """ @@ -115,15 +122,15 @@ Your `Summary` should sound like it is a response to the `User Input`. Your `Summary` should not include any mention of the `AQL Query` or the `AQL Result`. +Chat History: +{chat_history} + ArangoDB Schema: {adb_schema} User Input: {user_input} -Chat History (Optional): -{chat_history} - AQL Query: {aql_query} From d0a1a94895f5c04c57d4998c3a9377b93dcee444 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 5 Aug 2025 14:59:47 -0700 Subject: [PATCH 32/45] add integration test for chat history --- .../tests/integration_tests/chains/test_graph_database.py | 1 + 1 file changed, 1 insertion(+) diff --git a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py index 6eb0f58..9dc3ac2 100644 --- a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py +++ b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py @@ -11,6 +11,7 @@ from langchain_arangodb.chains.graph_qa.arangodb import ArangoGraphQAChain from langchain_arangodb.graphs.arangodb_graph import ArangoGraph +from langchain_arangodb.chat_message_histories.arangodb import ArangoChatMessageHistory from tests.llms.fake_llm import FakeLLM From 1eab05ab5abb7c32698da44b37700ea1b3c941f5 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 5 Aug 2025 15:18:31 -0700 Subject: [PATCH 33/45] move back aql_execution_func & params --- .../chains/graph_qa/arangodb.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index f28fd7a..8b709d1 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -446,9 +446,9 @@ def _call( aql_result = None aql_generation_attempt = 1 - aql_execution_func = ( - self.graph.query if self.execute_aql_query else self.graph.explain - ) + # aql_execution_func = ( + # self.graph.query if self.execute_aql_query else self.graph.explain + # ) # ###################### # # Get Chat History # @@ -507,6 +507,10 @@ def _call( aql_result = None aql_generation_attempt = 1 + aql_execution_func = ( + self.graph.query if self.execute_aql_query else self.graph.explain + ) + while ( aql_result is None and aql_generation_attempt < self.max_aql_generation_attempts + 1 @@ -572,6 +576,11 @@ def _call( ############################# try: + params = { + "top_k": self.top_k, + "list_limit": self.output_list_limit, + "string_limit": self.output_string_limit, + } aql_result = aql_execution_func(aql_query, params) except (AQLQueryExecuteError, AQLQueryExplainError) as e: aql_error = str(e.error_message) From 83eb040dbddf25c342b1e625ae5c71f2026f937a Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 5 Aug 2025 15:40:07 -0700 Subject: [PATCH 34/45] rename __get_cached_query and fix lint err --- libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 8b709d1..663db1d 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -482,7 +482,7 @@ def _call( if not self.graph.db.has_collection(self.query_cache_collection_name): # type: ignore self.graph.db.create_collection(self.query_cache_collection_name) # type: ignore - cache_result = self.__get_cached_query( + cache_result = self._get_cached_query( user_input, query_cache_similarity_threshold ) @@ -580,7 +580,7 @@ def _call( "top_k": self.top_k, "list_limit": self.output_list_limit, "string_limit": self.output_string_limit, - } + } aql_result = aql_execution_func(aql_query, params) except (AQLQueryExecuteError, AQLQueryExplainError) as e: aql_error = str(e.error_message) From be709f7b52f2231c9ced0f8b649ce3f9a51eed31 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 5 Aug 2025 19:48:34 -0700 Subject: [PATCH 35/45] add unit test for chat history --- .../tests/unit_tests/chains/test_graph_qa.py | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py index a2b7b78..8f5d0df 100644 --- a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py +++ b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py @@ -654,3 +654,67 @@ def mock_execute(query, bind_vars): # type: ignore # 4. Test with query cache disabled result4 = chain.invoke({"query": "What is the name of the first movie?"}) assert result4["result"] == "```FOR m IN Movies LIMIT 1 RETURN m```" + + def test_call_chat_history_mocked( + self, fake_graph_store: FakeGraphStore, mock_chains: Dict[str, Runnable] + ) -> None: + """test _call with chat history""" + + chat_history_store = Mock(spec=ArangoChatMessageHistory) + + # Add fake message history (as objects, not dicts) + chat_history_store.messages = [ + Mock(type="human", content="What is 1+1?"), + Mock(type="ai", content="2"), + Mock(type="human", content="What is 2+2?"), + Mock(type="ai", content="4"), + ] + + # Mock LLM chains + mock_chains[ # type: ignore + "aql_generation_chain" + ].invoke.return_value = "```aql\nFOR m IN Movies RETURN m\n```" # noqa: E501 + mock_chains["qa_chain"].invoke.return_value = AIMessage( # type: ignore + content="Here are the movies." + ) # noqa: E501 + + # Build the chain + chain = ArangoGraphQAChain( + graph=fake_graph_store, + aql_generation_chain=mock_chains["aql_generation_chain"], + aql_fix_chain=mock_chains["aql_fix_chain"], + qa_chain=mock_chains["qa_chain"], + allow_dangerous_requests=True, + include_history=True, + chat_history_store=chat_history_store, + max_history_messages=10, + return_aql_result=True, + ) + + # Run the call + result = chain.invoke({"query": "List all movies"}) + + # LLM received the latest 2 pairs (4 messages) + llm_input = mock_chains["aql_generation_chain"].invoke.call_args[0][0] # type: ignore + chat_history = llm_input["chat_history"] + assert len(chat_history) == 4 + + # result has expected fields + assert result["result"].content == "Here are the movies." + assert result["aql_result"][0]["title"] == "Inception" + + # Error: chat history enabled but store is missing + chain.chat_history_store = None + with pytest.raises( + ValueError, + match="Chat message history is required if include_history is True", + ): + chain.invoke({"query": "List again"}) + + # Error: invalid max_history_messages + chain.chat_history_store = chat_history_store + chain.max_history_messages = 0 + with pytest.raises( + ValueError, match="max_history_messages must be greater than 0" + ): + chain.invoke({"query": "List again"}) From 7fa7f4afe909a1052f5dcccbcb8faa876c1d5a61 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 5 Aug 2025 19:49:34 -0700 Subject: [PATCH 36/45] format & lint --- .../chains/graph_qa/arangodb.py | 19 +++++++++---------- .../chains/graph_qa/prompts.py | 8 +++++++- .../chains/test_graph_database.py | 2 +- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 663db1d..a92876e 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -10,11 +10,10 @@ from langchain_core.callbacks import CallbackManagerForChainRun from langchain_core.embeddings import Embeddings from langchain_core.language_models import BaseLanguageModel -from langchain_core.messages import AIMessage +from langchain_core.messages import AIMessage, HumanMessage from langchain_core.prompts import BasePromptTemplate from langchain_core.runnables import Runnable from pydantic import Field -from langchain_core.messages import HumanMessage, AIMessage from langchain_arangodb.chains.graph_qa.prompts import ( AQL_FIX_PROMPT, @@ -58,7 +57,6 @@ class ArangoGraphQAChain(Chain): include_history: bool = Field(default=True) max_history_messages: int = Field(default=10) chat_history_store: Optional[ArangoChatMessageHistory] = Field(default=None) - top_k: int = 10 """Number of results to return from the query""" @@ -149,7 +147,8 @@ def from_llm( :type query_cache_collection_name: str :param include_history: Whether to include the chat history in the prompt. :type include_history: bool - :param max_history_messages: The maximum number of messages to include in the chat history. + :param max_history_messages: The maximum number of messages to + include in the chat history. :type max_history_messages: int :param chat_history_store: The chat history store to use. :type chat_history_store: ArangoChatMessageHistory @@ -455,18 +454,20 @@ def _call( # ###################### if self.include_history and self.chat_history_store is None: - raise ValueError("Chat message history is required if include_history is True") - + raise ValueError( + "Chat message history is required if include_history is True" + ) + if self.max_history_messages <= 0: raise ValueError("max_history_messages must be greater than 0") chat_history = [] if self.include_history and self.chat_history_store is not None: - for msg in self.chat_history_store.messages[-self.max_history_messages:]: + for msg in self.chat_history_store.messages[-self.max_history_messages :]: if msg.type == "human": chat_history.append(HumanMessage(content=msg.content)) else: - chat_history.append(AIMessage(content=msg.content)) + chat_history.append(AIMessage(content=msg.content)) # type: ignore ###################### @@ -688,7 +689,6 @@ def _call( str(aql_result), color="green", end="\n", verbose=self.verbose ) - if not self.execute_aql_query: result = {self.output_key: aql_query, "aql_result": aql_result} @@ -735,7 +735,6 @@ def _call( self.chat_history_store.add_user_message(user_input) self.chat_history_store.add_ai_message(result) - return results def _is_read_only_query(self, aql_query: str) -> Tuple[bool, Optional[str]]: diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py index 732afaa..e2a0652 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py @@ -140,6 +140,12 @@ Summary: """ AQL_QA_PROMPT = PromptTemplate( - input_variables=["adb_schema", "user_input", "aql_query", "aql_result", "chat_history"], + input_variables=[ + "adb_schema", + "user_input", + "aql_query", + "aql_result", + "chat_history", + ], template=AQL_QA_TEMPLATE, ) diff --git a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py index 9dc3ac2..d5c5c67 100644 --- a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py +++ b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py @@ -10,8 +10,8 @@ from langchain_core.runnables import RunnableLambda from langchain_arangodb.chains.graph_qa.arangodb import ArangoGraphQAChain -from langchain_arangodb.graphs.arangodb_graph import ArangoGraph from langchain_arangodb.chat_message_histories.arangodb import ArangoChatMessageHistory +from langchain_arangodb.graphs.arangodb_graph import ArangoGraph from tests.llms.fake_llm import FakeLLM From f17db2efa3a5be0b415d8a61ff84711b39581a92 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Mon, 11 Aug 2025 13:47:50 -0700 Subject: [PATCH 37/45] sync with pre-rebase state --- .../chains/graph_qa/arangodb.py | 121 +----------------- .../chains/test_graph_database.py | 65 ++++++++++ .../tests/unit_tests/chains/test_graph_qa.py | 13 +- 3 files changed, 73 insertions(+), 126 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index a92876e..8169f96 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -20,6 +20,7 @@ AQL_GENERATION_PROMPT, AQL_QA_PROMPT, ) +from langchain_arangodb.chat_message_histories.arangodb import ArangoChatMessageHistory from langchain_arangodb.graphs.arangodb_graph import ArangoGraph AQL_WRITE_OPERATIONS: List[str] = [ @@ -54,7 +55,7 @@ class ArangoGraphQAChain(Chain): qa_chain: Runnable[Dict[str, Any], Any] input_key: str = "query" #: :meta private: output_key: str = "result" #: :meta private: - include_history: bool = Field(default=True) + include_history: bool = Field(default=False) max_history_messages: int = Field(default=10) chat_history_store: Optional[ArangoChatMessageHistory] = Field(default=None) @@ -393,10 +394,6 @@ def _call( Defaults to 256. :type output_string_limit: int """ - - if not isinstance(self.graph, GraphStore): - raise ValueError("Graph must be an GraphStore instance") - _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() callbacks = _run_manager.get_child() user_input = inputs[self.input_key].strip().lower() @@ -408,47 +405,6 @@ def _call( if use_query_cache and self.embedding is None: raise ValueError("Cannot enable query cache without passing embedding") - ###################### - # Check Query Cache # - ###################### - - cached_query, score = None, None - if use_query_cache: - if self.embedding is None: - m = "Embedding must be provided when using query cache" - raise ValueError(m) - - if not self.graph.db.has_collection(self.query_cache_collection_name): - self.graph.db.create_collection(self.query_cache_collection_name) - - cache_result = self._get_cached_query( - user_input, query_cache_similarity_threshold - ) - - if cache_result is not None: - cached_query, score = cache_result - - if cached_query: - aql_generation_output = f"```aql{cached_query}```" - else: - aql_generation_output = self.aql_generation_chain.invoke( - { - "adb_schema": self.graph.schema_yaml, - "aql_examples": self.aql_examples, - "user_input": user_input, - }, - callbacks=callbacks, - ) - - aql_query = "" - aql_error = "" - aql_result = None - aql_generation_attempt = 1 - - # aql_execution_func = ( - # self.graph.query if self.execute_aql_query else self.graph.explain - # ) - # ###################### # # Get Chat History # # ###################### @@ -469,7 +425,6 @@ def _call( else: chat_history.append(AIMessage(content=msg.content)) # type: ignore - ###################### # Check Query Cache # ###################### @@ -616,73 +571,6 @@ def _call( """ raise ValueError(m) - if use_query_cache and cached_query: - aql_query = cached_query - aql_result = aql_execution_func(aql_query, params) - - query_message = f"AQL Query ({aql_generation_attempt})\n" - if cached_query: - score_string = score if score is not None else "1.0" - query_message = ( - f"AQL Query (used cached query, score: {score_string})\n" # noqa: E501 - ) - - _run_manager.on_text(query_message, verbose=self.verbose) - _run_manager.on_text( - aql_query, color="green", end="\n", verbose=self.verbose - ) - else: - _run_manager.on_text( - f"AQL Query ({aql_generation_attempt}):\n", verbose=self.verbose - ) - _run_manager.on_text( - aql_query, color="green", end="\n", verbose=self.verbose - ) - - ############################# - # Execute/Explain AQL Query # - ############################# - - try: - params = { - "top_k": self.top_k, - "list_limit": self.output_list_limit, - "string_limit": self.output_string_limit, - } - aql_result = aql_execution_func(aql_query, params) - except (AQLQueryExecuteError, AQLQueryExplainError) as e: - aql_error = str(e.error_message) - - _run_manager.on_text( - "AQL Query Execution Error: ", end="\n", verbose=self.verbose - ) - _run_manager.on_text( - aql_error, color="yellow", end="\n\n", verbose=self.verbose - ) - - ######################## - # Retry AQL Generation # - ######################## - - aql_generation_output = self.aql_fix_chain.invoke( - { - "adb_schema": self.graph.schema_yaml, - "aql_query": aql_query, - "aql_error": aql_error, - }, - callbacks=callbacks, - ) - - aql_generation_attempt += 1 - - if aql_result is None: - m = f""" - Maximum amount of AQL Query Generation attempts reached. - Unable to execute the AQL Query due to the following error: - {aql_error} - """ - raise ValueError(m) - text = "AQL Result:" if self.execute_aql_query else "AQL Explain:" _run_manager.on_text(text, end="\n", verbose=self.verbose) _run_manager.on_text( @@ -713,7 +601,10 @@ def _call( text = "Summary:" if self.execute_aql_query else "AQL Explain:" _run_manager.on_text(text, end="\n", verbose=self.verbose) _run_manager.on_text( - str(result.content), color="green", end="\n", verbose=self.verbose + str(result.content) if isinstance(result, AIMessage) else result, + color="green", + end="\n", + verbose=self.verbose, ) results: Dict[str, Any] = {self.output_key: result} diff --git a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py index d5c5c67..0edbe78 100644 --- a/libs/arangodb/tests/integration_tests/chains/test_graph_database.py +++ b/libs/arangodb/tests/integration_tests/chains/test_graph_database.py @@ -1063,3 +1063,68 @@ def test_query_cache(db: StandardDatabase) -> None: )() query = chain._get_cached_query("gibberish", 0.99) assert query is None + + +@pytest.mark.usefixtures("clear_arangodb_database") +def test_chat_history(db: StandardDatabase) -> None: + """ + Test chat history that enables context-aware query generation. + """ + # 1. Create required collections + graph = ArangoGraph(db) + db.create_collection("Movies") + db.collection("Movies").insert_many( + [ + {"_key": "matrix", "title": "The Matrix", "year": 1999}, + {"_key": "inception", "title": "Inception", "year": 2010}, + ] + ) + graph.refresh_schema() + + # 2. Create chat history store + history = ArangoChatMessageHistory( + session_id="test", + collection_name="test_chat_sessions", + db=db, + ) + history.clear() + + # 3. Dummy LLM: simulate coreference to "The Matrix" + def dummy_llm(prompt): # type: ignore + if "when was it released" in str(prompt).lower(): # type: ignore + return AIMessage( + content="""```aql + WITH Movies + FOR m IN Movies + FILTER m.title == "The Matrix" + RETURN m.year + ```""" + ) + return AIMessage( + content="""```aql + WITH Movies + FOR m IN Movies + SORT m._key ASC + LIMIT 1 + RETURN m.title + ```""" + ) + + dummy_chain = ArangoGraphQAChain.from_llm( + llm=RunnableLambda(dummy_llm), # type: ignore + graph=graph, + allow_dangerous_requests=True, + include_history=True, + max_history_messages=5, + chat_history_store=history, + return_aql_result=True, + return_aql_query=True, + ) + + # 4. Ask initial question + result1 = dummy_chain.invoke({"query": "What is the first movie?"}) + assert "Inception" in result1["aql_result"] + + # 5. Ask follow-up question using pronoun "it" + result2 = dummy_chain.invoke({"query": "When was it released?"}) + assert 1999 in result2["aql_result"] diff --git a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py index 8f5d0df..89944df 100644 --- a/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py +++ b/libs/arangodb/tests/unit_tests/chains/test_graph_qa.py @@ -11,6 +11,7 @@ from langchain_core.runnables import Runnable, RunnableLambda from langchain_arangodb.chains.graph_qa.arangodb import ArangoGraphQAChain +from langchain_arangodb.chat_message_histories.arangodb import ArangoChatMessageHistory from langchain_arangodb.graphs.arangodb_graph import ArangoGraph from tests.llms.fake_llm import FakeLLM @@ -28,16 +29,6 @@ def __init__(self) -> None: self.refreshed = False self.graph_documents_added = [] # type: ignore - # Mock the database interface - self.db = Mock() - self.db.collection = Mock() - mock_queries_collection = Mock() - mock_queries_collection.find = Mock(return_value=[]) - mock_queries_collection.insert = Mock() - self.db.collection.return_value = mock_queries_collection - self.db.aql = Mock() - self.db.aql.execute = Mock(return_value=[]) - # Mock the database interface self.__db = Mock() self.__db.collection = Mock() @@ -655,7 +646,7 @@ def mock_execute(query, bind_vars): # type: ignore result4 = chain.invoke({"query": "What is the name of the first movie?"}) assert result4["result"] == "```FOR m IN Movies LIMIT 1 RETURN m```" - def test_call_chat_history_mocked( + def test_chat_history( self, fake_graph_store: FakeGraphStore, mock_chains: Dict[str, Runnable] ) -> None: """test _call with chat history""" From a52ba429c2cebbb08a7d523e14369973d1b27a9c Mon Sep 17 00:00:00 2001 From: Anthony Mahanna Date: Tue, 12 Aug 2025 13:07:24 -0400 Subject: [PATCH 38/45] new: parameter override at runtime --- .../chains/graph_qa/arangodb.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 8169f96..77a1363 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -55,6 +55,8 @@ class ArangoGraphQAChain(Chain): qa_chain: Runnable[Dict[str, Any], Any] input_key: str = "query" #: :meta private: output_key: str = "result" #: :meta private: + use_query_cache: bool = Field(default=False) + query_cache_similarity_threshold: float = Field(default=0.80) include_history: bool = Field(default=False) max_history_messages: int = Field(default=10) chat_history_store: Optional[ArangoChatMessageHistory] = Field(default=None) @@ -397,11 +399,17 @@ def _call( _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() callbacks = _run_manager.get_child() user_input = inputs[self.input_key].strip().lower() - use_query_cache = inputs.get("use_query_cache", False) + + # Query Cache Parameters (can be overridden by inputs at runtime) + use_query_cache = inputs.get("use_query_cache", self.use_query_cache) query_cache_similarity_threshold = inputs.get( - "query_cache_similarity_threshold", 0.80 + "query_cache_similarity_threshold", self.query_cache_similarity_threshold ) + # Chat History Parameters (can be overridden by inputs at runtime) + include_history = inputs.get("include_history", self.include_history) + max_history_messages = inputs.get("max_history_messages", self.max_history_messages) + if use_query_cache and self.embedding is None: raise ValueError("Cannot enable query cache without passing embedding") @@ -409,16 +417,16 @@ def _call( # # Get Chat History # # ###################### - if self.include_history and self.chat_history_store is None: + if include_history and self.chat_history_store is None: raise ValueError( "Chat message history is required if include_history is True" ) - if self.max_history_messages <= 0: + if max_history_messages <= 0: raise ValueError("max_history_messages must be greater than 0") chat_history = [] - if self.include_history and self.chat_history_store is not None: + if include_history and self.chat_history_store is not None: for msg in self.chat_history_store.messages[-self.max_history_messages :]: if msg.type == "human": chat_history.append(HumanMessage(content=msg.content)) From c287077e680a9dc11e362fa8334d21ecb22ddf4a Mon Sep 17 00:00:00 2001 From: MonikaLiu <144009805+anyxling@users.noreply.github.com> Date: Tue, 12 Aug 2025 11:22:13 -0700 Subject: [PATCH 39/45] instantiate w/ ternary operator Co-authored-by: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> --- .../arangodb/langchain_arangodb/chains/graph_qa/arangodb.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 77a1363..8caeb76 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -428,10 +428,8 @@ def _call( chat_history = [] if include_history and self.chat_history_store is not None: for msg in self.chat_history_store.messages[-self.max_history_messages :]: - if msg.type == "human": - chat_history.append(HumanMessage(content=msg.content)) - else: - chat_history.append(AIMessage(content=msg.content)) # type: ignore + cls = HumanMessage if msg.type == "human" else AIMessage + chat_history.append(cls(content=msg.content)) ###################### # Check Query Cache # From f21f5178486b85d4999920d320602f794573dc16 Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 12 Aug 2025 14:11:44 -0700 Subject: [PATCH 40/45] remove chat history from aql2text --- .../arangodb/langchain_arangodb/chains/graph_qa/arangodb.py | 3 +-- libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py | 6 ------ 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 8caeb76..6868b10 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -427,7 +427,7 @@ def _call( chat_history = [] if include_history and self.chat_history_store is not None: - for msg in self.chat_history_store.messages[-self.max_history_messages :]: + for msg in self.chat_history_store.messages[-self.max_history_messages:]: cls = HumanMessage if msg.type == "human" else AIMessage chat_history.append(cls(content=msg.content)) @@ -598,7 +598,6 @@ def _call( "user_input": user_input, "aql_query": aql_query, "aql_result": aql_result, - "chat_history": chat_history, }, callbacks=callbacks, ) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py index e2a0652..b368c89 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py @@ -105,8 +105,6 @@ You are an ArangoDB Query Language (AQL) expert responsible for creating a well-written `Summary` from the `User Input` and associated `AQL Result`. -You may also be given a `Chat History` to help you create the `Summary`. - A user has executed an ArangoDB Query Language query, which has returned the AQL Result in JSON format. You are responsible for creating an `Summary` based on the AQL Result. @@ -115,16 +113,12 @@ - `User Input`: the original question/request of the user, which has been translated into an AQL Query. - `AQL Query`: the AQL equivalent of the `User Input`, translated by another AI Model. Should you deem it to be incorrect, suggest a different AQL Query. - `AQL Result`: the JSON output returned by executing the `AQL Query` within the ArangoDB Database. -- `Chat History`: the chat history between the user and the AI model, which may contain information about the user's request and the AI model's response. Remember to think step by step. Your `Summary` should sound like it is a response to the `User Input`. Your `Summary` should not include any mention of the `AQL Query` or the `AQL Result`. -Chat History: -{chat_history} - ArangoDB Schema: {adb_schema} From b0030f36fcc3432494d2c984818eca5bb260a6f4 Mon Sep 17 00:00:00 2001 From: MonikaLiu <144009805+anyxling@users.noreply.github.com> Date: Tue, 12 Aug 2025 14:12:32 -0700 Subject: [PATCH 41/45] simplify printing of summary Co-authored-by: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> --- libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 8caeb76..3a6f032 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -604,7 +604,7 @@ def _call( ) # Add summary - text = "Summary:" if self.execute_aql_query else "AQL Explain:" + text = "Summary:" _run_manager.on_text(text, end="\n", verbose=self.verbose) _run_manager.on_text( str(result.content) if isinstance(result, AIMessage) else result, From d7841ebca55960c32b3db98634d3a28b41a4e512 Mon Sep 17 00:00:00 2001 From: MonikaLiu <144009805+anyxling@users.noreply.github.com> Date: Tue, 12 Aug 2025 14:13:57 -0700 Subject: [PATCH 42/45] remove sort in prompt Co-authored-by: Anthony Mahanna <43019056+aMahanna@users.noreply.github.com> --- libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py | 1 - 1 file changed, 1 deletion(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py index e2a0652..ae6e994 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py @@ -25,7 +25,6 @@ - Use the Chat History only to resolve ambiguous references (e.g., pronouns like “he”, “she”, “they”, or “that”) in the current User Input. Generate an AQL query for the current User Input. - Chat History is ordered chronologically: earlier messages come first, more recent ones last. Prioritize later entries when resolving context or references. - Begin the `AQL Query` by the `WITH` AQL keyword to specify all of the ArangoDB Collections required. -- If the User Input implies order (e.g., “first”, “latest”, “top”), use SORT before LIMIT to ensure deterministic results. For example, to get the “first” item, use SORT character._key ASC LIMIT 1. - If a `View Schema` is defined and contains analyzers for specific fields, prefer using the View with the `SEARCH` and `ANALYZER` clauses instead of a direct collection scan. - Use `PHRASE(...)`, `TOKENS(...)`, or `IN TOKENS(...)` as appropriate when analyzers are available on a field. - Return the `AQL Query` wrapped in 3 backticks (```). From a5778eadedf2dbf9d0834431c27717dbdc65d04c Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 12 Aug 2025 17:20:08 -0700 Subject: [PATCH 43/45] disable history in prompt --- .../langchain_arangodb/chains/graph_qa/arangodb.py | 2 +- .../langchain_arangodb/chains/graph_qa/prompts.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 2de9d82..c69730c 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -427,7 +427,7 @@ def _call( chat_history = [] if include_history and self.chat_history_store is not None: - for msg in self.chat_history_store.messages[-self.max_history_messages:]: + for msg in self.chat_history_store.messages[-max_history_messages:]: cls = HumanMessage if msg.type == "human" else AIMessage chat_history.append(cls(content=msg.content)) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py index 7fea794..f06eed1 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py @@ -13,7 +13,7 @@ You may also be given a set of `AQL Query Examples` to help you create the `AQL Query`. If provided, the `AQL Query Examples` should be used as a reference, similar to how `ArangoDB Schema` should be used. -You may also be given a Chat History. If provided, use it only as a reference to help clarify the current User Input — for example, to resolve pronouns or implicit references. +You may also be given a Chat History. If it is not empty, use it only as a reference to help clarify the current User Input — for example, to resolve pronouns or implicit references. If the Chat History is empty, do not use it or refer to it in any way. Treat the User Input as a fully self-contained and standalone question. Things you should do: - Think step by step. @@ -22,7 +22,6 @@ - Pay close attention to descriptive references in the User Input — including gendered terms (e.g., father, she), attribute-based descriptions (e.g., young, active, French), and implicit types or categories (e.g., products over $100, available items) — and, if these correspond to fields in the schema, include appropriate filters in the AQL query (e.g., gender == "male", status == "active", price > 100). - Rely on `ArangoDB Schema` and `AQL Query Examples` (if provided) to generate the query. -- Use the Chat History only to resolve ambiguous references (e.g., pronouns like “he”, “she”, “they”, or “that”) in the current User Input. Generate an AQL query for the current User Input. - Chat History is ordered chronologically: earlier messages come first, more recent ones last. Prioritize later entries when resolving context or references. - Begin the `AQL Query` by the `WITH` AQL keyword to specify all of the ArangoDB Collections required. - If a `View Schema` is defined and contains analyzers for specific fields, prefer using the View with the `SEARCH` and `ANALYZER` clauses instead of a direct collection scan. @@ -33,6 +32,8 @@ - If a request is unrelated to generating AQL Query, say that you cannot help the user. Things you should not do: +- Do not use or refer to Chat History if it is empty. +- Do not assume any previously discussed context, or try to resolve pronouns or references to prior questions if the Chat History is empty. - Do not use any properties/relationships that can't be inferred from the `ArangoDB Schema` or the `AQL Query Examples`. - Do not include any text except the generated AQL Query. - Do not provide explanations or apologies in your responses. @@ -41,7 +42,7 @@ Under no circumstance should you generate an AQL Query that deletes any data whatsoever. -Chat History: +Chat History (Optional): {chat_history} ArangoDB Schema: @@ -138,7 +139,6 @@ "user_input", "aql_query", "aql_result", - "chat_history", ], template=AQL_QA_TEMPLATE, ) From f7ac176f1a20be4a8d8da1ea71362e05a1f12d3f Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 12 Aug 2025 17:38:45 -0700 Subject: [PATCH 44/45] add aql query to llm input --- .../langchain_arangodb/chains/graph_qa/arangodb.py | 1 + .../arangodb/langchain_arangodb/chains/graph_qa/prompts.py | 7 +++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index c69730c..429faaf 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -629,6 +629,7 @@ def _call( if self.chat_history_store: self.chat_history_store.add_user_message(user_input) + self.chat_history_store.add_ai_message(aql_query) self.chat_history_store.add_ai_message(result) return results diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py index f06eed1..21a724e 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/prompts.py @@ -13,7 +13,11 @@ You may also be given a set of `AQL Query Examples` to help you create the `AQL Query`. If provided, the `AQL Query Examples` should be used as a reference, similar to how `ArangoDB Schema` should be used. -You may also be given a Chat History. If it is not empty, use it only as a reference to help clarify the current User Input — for example, to resolve pronouns or implicit references. If the Chat History is empty, do not use it or refer to it in any way. Treat the User Input as a fully self-contained and standalone question. +Rules for Using Chat History: +- If the Chat History is not empty, use it only as a reference to help clarify the current User Input — for example, to resolve pronouns or implicit references. +- Chat History is ordered chronologically. Prioritize latest entries when resolving context or references. +- If the Chat History is empty, do not use it or refer to it in any way. Treat the User Input as a fully self-contained and standalone question. +- The Chat History includes the User Input, the AQL Query generated by the AI Model, and the interpertation of AQL Result. Use all of them to generate the AQL Query. Things you should do: - Think step by step. @@ -22,7 +26,6 @@ - Pay close attention to descriptive references in the User Input — including gendered terms (e.g., father, she), attribute-based descriptions (e.g., young, active, French), and implicit types or categories (e.g., products over $100, available items) — and, if these correspond to fields in the schema, include appropriate filters in the AQL query (e.g., gender == "male", status == "active", price > 100). - Rely on `ArangoDB Schema` and `AQL Query Examples` (if provided) to generate the query. -- Chat History is ordered chronologically: earlier messages come first, more recent ones last. Prioritize later entries when resolving context or references. - Begin the `AQL Query` by the `WITH` AQL keyword to specify all of the ArangoDB Collections required. - If a `View Schema` is defined and contains analyzers for specific fields, prefer using the View with the `SEARCH` and `ANALYZER` clauses instead of a direct collection scan. - Use `PHRASE(...)`, `TOKENS(...)`, or `IN TOKENS(...)` as appropriate when analyzers are available on a field. From cb934e00c5788a2b2caebf92030514115a83a49f Mon Sep 17 00:00:00 2001 From: MonikaLiu Date: Tue, 12 Aug 2025 17:48:56 -0700 Subject: [PATCH 45/45] remove type ignore --- .../langchain_arangodb/chains/graph_qa/arangodb.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py index 429faaf..e29df44 100644 --- a/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py +++ b/libs/arangodb/langchain_arangodb/chains/graph_qa/arangodb.py @@ -228,7 +228,7 @@ def cache_query(self, text: Optional[str] = None, aql: Optional[str] = None) -> if self.embedding is None: raise ValueError("Cannot cache queries without an embedding model.") - if not self.graph.db.has_collection(self.query_cache_collection_name): # type: ignore + if not self.graph.db.has_collection(self.query_cache_collection_name): m = f"Collection {self.query_cache_collection_name} does not exist" # noqa: E501 raise ValueError(m) @@ -267,7 +267,7 @@ def clear_query_cache(self, text: Optional[str] = None) -> str: m = f"Collection {self.query_cache_collection_name} does not exist" raise ValueError(m) - collection = self.graph.db.collection(self.query_cache_collection_name) # type: ignore + collection = self.graph.db.collection(self.query_cache_collection_name) if text is None: collection.truncate() @@ -408,7 +408,9 @@ def _call( # Chat History Parameters (can be overridden by inputs at runtime) include_history = inputs.get("include_history", self.include_history) - max_history_messages = inputs.get("max_history_messages", self.max_history_messages) + max_history_messages = inputs.get( + "max_history_messages", self.max_history_messages + ) if use_query_cache and self.embedding is None: raise ValueError("Cannot enable query cache without passing embedding") @@ -441,8 +443,8 @@ def _call( m = "Embedding must be provided when using query cache" raise ValueError(m) - if not self.graph.db.has_collection(self.query_cache_collection_name): # type: ignore - self.graph.db.create_collection(self.query_cache_collection_name) # type: ignore + if not self.graph.db.has_collection(self.query_cache_collection_name): + self.graph.db.create_collection(self.query_cache_collection_name) cache_result = self._get_cached_query( user_input, query_cache_similarity_threshold