From 779f4c0c17d165561f37103c4ef7b61631ae9a90 Mon Sep 17 00:00:00 2001 From: gayathrivijayakumar Date: Fri, 2 Aug 2024 13:42:54 +0530 Subject: [PATCH 1/4] Llama-index-upgrade --- pyproject.toml | 37 +++++++++---------- .../vectordb/weaviate/src/weaviate.py | 21 ++++++----- src/unstract/sdk/utils/token_counter.py | 24 +++++------- 3 files changed, 39 insertions(+), 43 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 65127df3..07472913 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,46 +12,43 @@ dependencies = [ "python-magic~=0.4.27", "python-dotenv==1.0.0", # Adapter changes - "llama-index==0.10.38", - # Temporary hack to get out of llama-index issues - # To be removed once llama-index version is upgraded - "llama-index-core==0.10.56", + "llama-index==0.10.58", "tiktoken~=0.4.0", "transformers==4.37.0", "llama-index-embeddings-google==0.1.5", - "llama-index-embeddings-azure-openai==0.1.6", + "llama-index-embeddings-azure-openai==0.1.11", # Disabling Hugging Face & FastEmbed to # keep the image size under check # "llama-index-embeddings-huggingface==0.2.0", # Disabling fast embed due to high processing power # "llama-index-embeddings-fastembed==0.1.4", "llama-index-embeddings-openai==0.1.11", - "llama-index-embeddings-azure-openai==0.1.6", + "llama-index-embeddings-azure-openai==0.1.11", "llama-index-embeddings-ollama==0.1.2", - "llama-index-vector-stores-postgres==0.1.3", + "llama-index-vector-stores-postgres==0.1.13", # Including Supabase conflicts with postgres on pg-vector. # Hence, commenting it out at the moment # "llama-index-vector-stores-supabase==0.1.3", - "llama-index-vector-stores-milvus==0.1.18", - "llama-index-vector-stores-weaviate==0.1.4", - "llama-index-vector-stores-pinecone==0.1.4", - "llama-index-vector-stores-qdrant==0.2.8", - "llama-index-llms-openai==0.1.26", + "llama-index-vector-stores-milvus==0.1.21", + "llama-index-vector-stores-weaviate==1.0.2", + "llama-index-vector-stores-pinecone==0.1.8", + "llama-index-vector-stores-qdrant==0.2.14", + "llama-index-llms-openai==0.1.27", "llama-index-llms-palm==0.1.5", - "llama-index-llms-mistralai==0.1.10", - "llama-index-llms-anyscale==0.1.3", - "llama-index-llms-anthropic==0.1.11", - "llama-index-llms-azure-openai==0.1.5", - "llama-index-llms-vertex==0.1.8", + "llama-index-llms-mistralai==0.1.19", + "llama-index-llms-anyscale==0.1.4", + "llama-index-llms-anthropic==0.1.16", + "llama-index-llms-azure-openai==0.1.10", + "llama-index-llms-vertex==0.2.2", "llama-index-llms-replicate==0.1.3", - "llama-index-llms-ollama==0.1.3", + "llama-index-llms-ollama==0.2.2", + # For Llama Parse X2Text + "llama-parse==0.4.9", # OCR "filetype~=1.2.0", # Others # For singleton classes "singleton-decorator~=1.0.0", - # For Llama Parse X2Text - "llama-parse==0.4.1", "httpx>=0.25.2", ] readme = "README.md" diff --git a/src/unstract/sdk/adapters/vectordb/weaviate/src/weaviate.py b/src/unstract/sdk/adapters/vectordb/weaviate/src/weaviate.py index 0a16501d..5b5c86f1 100644 --- a/src/unstract/sdk/adapters/vectordb/weaviate/src/weaviate.py +++ b/src/unstract/sdk/adapters/vectordb/weaviate/src/weaviate.py @@ -5,7 +5,8 @@ import weaviate from llama_index.core.vector_stores.types import BasePydanticVectorStore from llama_index.vector_stores.weaviate import WeaviateVectorStore -from weaviate import UnexpectedStatusCodeException +from weaviate.classes.init import Auth +from weaviate.exceptions import UnexpectedStatusCodeException from unstract.sdk.adapters.exceptions import AdapterError from unstract.sdk.adapters.vectordb.constants import VectorDbConstants @@ -64,11 +65,9 @@ def _get_vector_db_instance(self) -> BasePydanticVectorStore: # LLama-index throws the error if not capitalised while using # Weaviate self._collection_name = collection_name.capitalize() - self._client = weaviate.Client( - url=str(self._config.get(Constants.URL)), - auth_client_secret=weaviate.AuthApiKey( - api_key=str(self._config.get(Constants.API_KEY)) - ), + self._client = weaviate.connect_to_weaviate_cloud( + cluster_url=str(self._config.get(Constants.URL)), + auth_credentials=Auth.api_key(str(self._config.get(Constants.API_KEY))), ) try: @@ -78,8 +77,8 @@ def _get_vector_db_instance(self) -> BasePydanticVectorStore: "class": self._collection_name, "vectorizer": "none", } - # Add the class to the schema - self._client.schema.create_class(class_obj) + # Create the colletion + self._client.collections.create_from_dict(class_obj) except Exception as e: if isinstance(e, UnexpectedStatusCodeException): if "already exists" in e.message: @@ -101,5 +100,9 @@ def test_connection(self) -> bool: ) # Delete the collection that was created for testing if self._client is not None: - self._client.schema.delete_class(self._collection_name) + self._client.collections.delete(self._collection_name) return test_result + + def close(self, **kwargs: Any) -> None: + if self._client: + self._client.close(**kwargs) diff --git a/src/unstract/sdk/utils/token_counter.py b/src/unstract/sdk/utils/token_counter.py index d54812c8..3337f6a0 100644 --- a/src/unstract/sdk/utils/token_counter.py +++ b/src/unstract/sdk/utils/token_counter.py @@ -39,7 +39,7 @@ def get_llm_token_counts(payload: dict[str, Any]) -> TokenCounter: if EventPayload.PROMPT in payload: completion_raw = payload.get(EventPayload.COMPLETION).raw if completion_raw: - if completion_raw.get(Constants.KEY_USAGE): + if hasattr(completion_raw, Constants.KEY_USAGE): token_counts: dict[ str, int ] = TokenCounter._get_prompt_completion_tokens(completion_raw) @@ -47,14 +47,12 @@ def get_llm_token_counts(payload: dict[str, Any]) -> TokenCounter: input_tokens=token_counts[Constants.PROMPT_TOKENS], output_tokens=token_counts[Constants.COMPLETION_TOKENS], ) - elif completion_raw.get(Constants.KEY_RAW_RESPONSE): + elif hasattr(completion_raw, Constants.KEY_RAW_RESPONSE): if hasattr( - completion_raw.get(Constants.KEY_RAW_RESPONSE), + completion_raw._raw_response, Constants.KEY_USAGE_METADATA, ): - usage = completion_raw.get( - Constants.KEY_RAW_RESPONSE - ).usage_metadata + usage = completion_raw._raw_response.usage_metadata token_counter = TokenCounter( input_tokens=usage.prompt_token_count, output_tokens=usage.candidates_token_count, @@ -62,12 +60,10 @@ def get_llm_token_counts(payload: dict[str, Any]) -> TokenCounter: else: prompt_tokens = Constants.DEFAULT_TOKEN_COUNT completion_tokens = Constants.DEFAULT_TOKEN_COUNT - if completion_raw.get(Constants.KEY_PROMPT_EVAL_COUNT): - prompt_tokens = completion_raw.get( - Constants.KEY_PROMPT_EVAL_COUNT - ) - if completion_raw.get(Constants.KEY_EVAL_COUNT): - completion_tokens = completion_raw.get(Constants.KEY_EVAL_COUNT) + if hasattr(completion_raw, Constants.KEY_PROMPT_EVAL_COUNT): + prompt_tokens = completion_raw.prompt_eval_count + if hasattr(completion_raw, Constants.KEY_EVAL_COUNT): + completion_tokens = completion_raw.eval_count token_counter = TokenCounter( input_tokens=prompt_tokens, output_tokens=completion_tokens, @@ -90,8 +86,8 @@ def _get_prompt_completion_tokens(response) -> dict[str, int]: prompt_tokens = Constants.DEFAULT_TOKEN_COUNT completion_tokens = Constants.DEFAULT_TOKEN_COUNT - usage = response.get(Constants.KEY_USAGE) - if usage: + if hasattr(response, Constants.KEY_USAGE): + usage = response.usage if hasattr(usage, Constants.INPUT_TOKENS): prompt_tokens = usage.input_tokens elif hasattr(usage, Constants.PROMPT_TOKENS): From 684f23a3976076d8968afc25eff91c384f9bebca Mon Sep 17 00:00:00 2001 From: gayathrivijayakumar Date: Fri, 2 Aug 2024 13:44:18 +0530 Subject: [PATCH 2/4] Upgrade SDK version --- src/unstract/sdk/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/unstract/sdk/__init__.py b/src/unstract/sdk/__init__.py index e044f050..b3246670 100644 --- a/src/unstract/sdk/__init__.py +++ b/src/unstract/sdk/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.40.0" +__version__ = "0.41.0" def get_sdk_version(): From 8508890326bacd8a05b2f407553c6ddbd5419998 Mon Sep 17 00:00:00 2001 From: gayathrivijayakumar Date: Mon, 5 Aug 2024 11:38:23 +0530 Subject: [PATCH 3/4] pdm lock file update --- pdm.lock | 512 ++++++++++++++++++++++++++++++++----------------------- 1 file changed, 303 insertions(+), 209 deletions(-) diff --git a/pdm.lock b/pdm.lock index 461b1e11..06ad64d4 100644 --- a/pdm.lock +++ b/pdm.lock @@ -5,15 +5,27 @@ groups = ["default", "docs", "lint", "test"] strategy = ["cross_platform", "inherit_metadata"] lock_version = "4.4.1" -content_hash = "sha256:cf364e6a4d2acace552281bc7e34640d6f5cb0070350efd29175ce4b12a6a182" +content_hash = "sha256:147e1b2304da936e35ec6f07ad09ac257699abcf8bdc56281dcabf72c0e891eb" + +[[package]] +name = "aiohappyeyeballs" +version = "2.3.4" +requires_python = "<4.0,>=3.8" +summary = "Happy Eyeballs for asyncio" +groups = ["default"] +files = [ + {file = "aiohappyeyeballs-2.3.4-py3-none-any.whl", hash = "sha256:40a16ceffcf1fc9e142fd488123b2e218abc4188cf12ac20c67200e1579baa42"}, + {file = "aiohappyeyeballs-2.3.4.tar.gz", hash = "sha256:7e1ae8399c320a8adec76f6c919ed5ceae6edd4c3672f4d9eae2b27e37c80ff6"}, +] [[package]] name = "aiohttp" -version = "3.9.5" +version = "3.10.1" requires_python = ">=3.8" summary = "Async http client/server framework (asyncio)" groups = ["default"] dependencies = [ + "aiohappyeyeballs>=2.3.0", "aiosignal>=1.1.2", "async-timeout<5.0,>=4.0; python_version < \"3.11\"", "attrs>=17.3.0", @@ -22,52 +34,52 @@ dependencies = [ "yarl<2.0,>=1.0", ] files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:47b4c2412960e64d97258f40616efddaebcb34ff664c8a972119ed38fac2a62c"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7dbf637f87dd315fa1f36aaed8afa929ee2c607454fb7791e74c88a0d94da59"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c8fb76214b5b739ce59e2236a6489d9dc3483649cfd6f563dbf5d8e40dbdd57d"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c577cdcf8f92862363b3d598d971c6a84ed8f0bf824d4cc1ce70c2fb02acb4a"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:777e23609899cb230ad2642b4bdf1008890f84968be78de29099a8a86f10b261"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b07286a1090483799599a2f72f76ac396993da31f6e08efedb59f40876c144fa"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9db600a86414a9a653e3c1c7f6a2f6a1894ab8f83d11505247bd1b90ad57157"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c3f1eb280008e51965a8d160a108c333136f4a39d46f516c64d2aa2e6a53f2"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f5dd109a925fee4c9ac3f6a094900461a2712df41745f5d04782ebcbe6479ccb"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8c81ff4afffef9b1186639506d70ea90888218f5ddfff03870e74ec80bb59970"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2a384dfbe8bfebd203b778a30a712886d147c61943675f4719b56725a8bbe803"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b9fb6508893dc31cfcbb8191ef35abd79751db1d6871b3e2caee83959b4d91eb"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:88596384c3bec644a96ae46287bb646d6a23fa6014afe3799156aef42669c6bd"}, + {file = "aiohttp-3.10.1-cp310-cp310-win32.whl", hash = "sha256:68164d43c580c2e8bf8e0eb4960142919d304052ccab92be10250a3a33b53268"}, + {file = "aiohttp-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:d6bbe2c90c10382ca96df33b56e2060404a4f0f88673e1e84b44c8952517e5f3"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6979b4f20d3e557a867da9d9227de4c156fcdcb348a5848e3e6190fd7feb972"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03c0c380c83f8a8d4416224aafb88d378376d6f4cadebb56b060688251055cd4"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c2b104e81b3c3deba7e6f5bc1a9a0e9161c380530479970766a6655b8b77c7c"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b023b68c61ab0cd48bd38416b421464a62c381e32b9dc7b4bdfa2905807452a4"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a07c76a82390506ca0eabf57c0540cf5a60c993c442928fe4928472c4c6e5e6"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41d8dab8c64ded1edf117d2a64f353efa096c52b853ef461aebd49abae979f16"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:615348fab1a9ef7d0960a905e83ad39051ae9cb0d2837da739b5d3a7671e497a"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:256ee6044214ee9d66d531bb374f065ee94e60667d6bbeaa25ca111fc3997158"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7d5bb926805022508b7ddeaad957f1fce7a8d77532068d7bdb431056dc630cd"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:028faf71b338f069077af6315ad54281612705d68889f5d914318cbc2aab0d50"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5c12310d153b27aa630750be44e79313acc4e864c421eb7d2bc6fa3429c41bf8"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:de1a91d5faded9054957ed0a9e01b9d632109341942fc123947ced358c5d9009"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c186b270979fb1dee3ababe2d12fb243ed7da08b30abc83ebac3a928a4ddb15"}, + {file = "aiohttp-3.10.1-cp311-cp311-win32.whl", hash = "sha256:4a9ce70f5e00380377aac0e568abd075266ff992be2e271765f7b35d228a990c"}, + {file = "aiohttp-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:a77c79bac8d908d839d32c212aef2354d2246eb9deb3e2cb01ffa83fb7a6ea5d"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:440954ddc6b77257e67170d57b1026aa9545275c33312357472504eef7b4cc0b"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f9f8beed277488a52ee2b459b23c4135e54d6a819eaba2e120e57311015b58e9"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8a8221a63602008550022aa3a4152ca357e1dde7ab3dd1da7e1925050b56863"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a702bd3663b5cbf3916e84bf332400d24cdb18399f0877ca6b313ce6c08bfb43"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1988b370536eb14f0ce7f3a4a5b422ab64c4e255b3f5d7752c5f583dc8c967fc"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ccf1f0a304352c891d124ac1a9dea59b14b2abed1704aaa7689fc90ef9c5be1"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3ea6ef2a83edad84bbdb5d96e22f587b67c68922cd7b6f9d8f24865e655bcf"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b47c125ab07f0831803b88aeb12b04c564d5f07a1c1a225d4eb4d2f26e8b5e"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21778552ef3d44aac3278cc6f6d13a6423504fa5f09f2df34bfe489ed9ded7f5"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bde0693073fd5e542e46ea100aa6c1a5d36282dbdbad85b1c3365d5421490a92"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bf66149bb348d8e713f3a8e0b4f5b952094c2948c408e1cfef03b49e86745d60"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:587237571a85716d6f71f60d103416c9df7d5acb55d96d3d3ced65f39bff9c0c"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bfe33cba6e127d0b5b417623c9aa621f0a69f304742acdca929a9fdab4593693"}, + {file = "aiohttp-3.10.1-cp39-cp39-win32.whl", hash = "sha256:9fbff00646cf8211b330690eb2fd64b23e1ce5b63a342436c1d1d6951d53d8dd"}, + {file = "aiohttp-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:5951c328f9ac42d7bce7a6ded535879bc9ae13032818d036749631fa27777905"}, + {file = "aiohttp-3.10.1.tar.gz", hash = "sha256:8b0d058e4e425d3b45e8ec70d49b402f4d6b21041e674798b1f91ba027c73f28"}, ] [[package]] @@ -97,7 +109,7 @@ files = [ [[package]] name = "anthropic" -version = "0.23.1" +version = "0.28.1" requires_python = ">=3.7" summary = "The official Python library for the anthropic API" groups = ["default"] @@ -105,14 +117,15 @@ dependencies = [ "anyio<5,>=3.5.0", "distro<2,>=1.7.0", "httpx<1,>=0.23.0", + "jiter<1,>=0.4.0", "pydantic<3,>=1.9.0", "sniffio", "tokenizers>=0.13.0", "typing-extensions<5,>=4.7", ] files = [ - {file = "anthropic-0.23.1-py3-none-any.whl", hash = "sha256:6dc5779dae83a5834864f4a4af0166c972b70f4cb8fd2765e1558282cc6d6242"}, - {file = "anthropic-0.23.1.tar.gz", hash = "sha256:9325103702cbc96bb09d1b58c36bde75c726f6a01029fb4d85f41ebba07e9066"}, + {file = "anthropic-0.28.1-py3-none-any.whl", hash = "sha256:c4773ae2b42951a6b747bed328b0d03fa412938c95c3a8b9dce70d69badb710b"}, + {file = "anthropic-0.28.1.tar.gz", hash = "sha256:e3a6d595bde241141bdc685edc393903ec95c7fa378013a71186cfb8f32b1793"}, ] [[package]] @@ -183,13 +196,13 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.1.0" requires_python = ">=3.7" summary = "Classes Without Boilerplate" groups = ["default"] files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.1.0-py3-none-any.whl", hash = "sha256:377b47448cb61fea38533f671fba0d0f8a96fd58facd4dc518e3dac9dbea0905"}, + {file = "attrs-24.1.0.tar.gz", hash = "sha256:adbdec84af72d38be7628e353a09b6a6790d15cd71819f6e9d7b0faa8a125745"}, ] [[package]] @@ -862,7 +875,7 @@ files = [ [[package]] name = "google-cloud-resource-manager" -version = "1.12.4" +version = "1.12.5" requires_python = ">=3.7" summary = "Google Cloud Resource Manager API client library" groups = ["default"] @@ -874,8 +887,8 @@ dependencies = [ "protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev,>=3.20.2", ] files = [ - {file = "google-cloud-resource-manager-1.12.4.tar.gz", hash = "sha256:3eda914a925e92465ef80faaab7e0f7a9312d486dd4e123d2c76e04bac688ff0"}, - {file = "google_cloud_resource_manager-1.12.4-py2.py3-none-any.whl", hash = "sha256:0b6663585f7f862166c0fb4c55fdda721fce4dc2dc1d5b52d03ee4bf2653a85f"}, + {file = "google_cloud_resource_manager-1.12.5-py2.py3-none-any.whl", hash = "sha256:2708a718b45c79464b7b21559c701b5c92e6b0b1ab2146d0a256277a623dc175"}, + {file = "google_cloud_resource_manager-1.12.5.tar.gz", hash = "sha256:b7af4254401ed4efa3aba3a929cb3ddb803fa6baf91a78485e45583597de5891"}, ] [[package]] @@ -1072,39 +1085,54 @@ files = [ [[package]] name = "grpcio" -version = "1.65.1" +version = "1.65.4" requires_python = ">=3.8" summary = "HTTP/2-based RPC framework" groups = ["default"] files = [ - {file = "grpcio-1.65.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062"}, - {file = "grpcio-1.65.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:7af64838b6e615fff0ec711960ed9b6ee83086edfa8c32670eafb736f169d719"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb64b4166362d9326f7efbf75b1c72106c1aa87f13a8c8b56a1224fac152f5c"}, - {file = "grpcio-1.65.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8422dc13ad93ec8caa2612b5032a2b9cd6421c13ed87f54db4a3a2c93afaf77"}, - {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4effc0562b6c65d4add6a873ca132e46ba5e5a46f07c93502c37a9ae7f043857"}, - {file = "grpcio-1.65.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a6c71575a2fedf259724981fd73a18906513d2f306169c46262a5bae956e6364"}, - {file = "grpcio-1.65.1-cp310-cp310-win32.whl", hash = "sha256:34966cf526ef0ea616e008d40d989463e3db157abb213b2f20c6ce0ae7928875"}, - {file = "grpcio-1.65.1-cp310-cp310-win_amd64.whl", hash = "sha256:ca931de5dd6d9eb94ff19a2c9434b23923bce6f767179fef04dfa991f282eaad"}, - {file = "grpcio-1.65.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:bbb46330cc643ecf10bd9bd4ca8e7419a14b6b9dedd05f671c90fb2c813c6037"}, - {file = "grpcio-1.65.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d827a6fb9215b961eb73459ad7977edb9e748b23e3407d21c845d1d8ef6597e5"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:6e71aed8835f8d9fbcb84babc93a9da95955d1685021cceb7089f4f1e717d719"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a1c84560b3b2d34695c9ba53ab0264e2802721c530678a8f0a227951f453462"}, - {file = "grpcio-1.65.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27adee2338d697e71143ed147fe286c05810965d5d30ec14dd09c22479bfe48a"}, - {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f62652ddcadc75d0e7aa629e96bb61658f85a993e748333715b4ab667192e4e8"}, - {file = "grpcio-1.65.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:71a05fd814700dd9cb7d9a507f2f6a1ef85866733ccaf557eedacec32d65e4c2"}, - {file = "grpcio-1.65.1-cp311-cp311-win32.whl", hash = "sha256:b590f1ad056294dfaeac0b7e1b71d3d5ace638d8dd1f1147ce4bd13458783ba8"}, - {file = "grpcio-1.65.1-cp311-cp311-win_amd64.whl", hash = "sha256:12e9bdf3b5fd48e5fbe5b3da382ad8f97c08b47969f3cca81dd9b36b86ed39e2"}, - {file = "grpcio-1.65.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:cb5175f45c980ff418998723ea1b3869cce3766d2ab4e4916fbd3cedbc9d0ed3"}, - {file = "grpcio-1.65.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b12c1aa7b95abe73b3e04e052c8b362655b41c7798da69f1eaf8d186c7d204df"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:3019fb50128b21a5e018d89569ffaaaa361680e1346c2f261bb84a91082eb3d3"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ae15275ed98ea267f64ee9ddedf8ecd5306a5b5bb87972a48bfe24af24153e8"}, - {file = "grpcio-1.65.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f096ffb881f37e8d4f958b63c74bfc400c7cebd7a944b027357cd2fb8d91a57"}, - {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2f56b5a68fdcf17a0a1d524bf177218c3c69b3947cb239ea222c6f1867c3ab68"}, - {file = "grpcio-1.65.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:941596d419b9736ab548aa0feb5bbba922f98872668847bf0720b42d1d227b9e"}, - {file = "grpcio-1.65.1-cp39-cp39-win32.whl", hash = "sha256:5fd7337a823b890215f07d429f4f193d24b80d62a5485cf88ee06648591a0c57"}, - {file = "grpcio-1.65.1-cp39-cp39-win_amd64.whl", hash = "sha256:1bceeec568372cbebf554eae1b436b06c2ff24cfaf04afade729fb9035408c6c"}, - {file = "grpcio-1.65.1.tar.gz", hash = "sha256:3c492301988cd720cd145d84e17318d45af342e29ef93141228f9cd73222368b"}, + {file = "grpcio-1.65.4-cp310-cp310-linux_armv7l.whl", hash = "sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c"}, + {file = "grpcio-1.65.4-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e"}, + {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de"}, + {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d"}, + {file = "grpcio-1.65.4-cp310-cp310-win32.whl", hash = "sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1"}, + {file = "grpcio-1.65.4-cp310-cp310-win_amd64.whl", hash = "sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec"}, + {file = "grpcio-1.65.4-cp311-cp311-linux_armv7l.whl", hash = "sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef"}, + {file = "grpcio-1.65.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4"}, + {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e"}, + {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5"}, + {file = "grpcio-1.65.4-cp311-cp311-win32.whl", hash = "sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b"}, + {file = "grpcio-1.65.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558"}, + {file = "grpcio-1.65.4-cp39-cp39-linux_armv7l.whl", hash = "sha256:874acd010e60a2ec1e30d5e505b0651ab12eb968157cd244f852b27c6dbed733"}, + {file = "grpcio-1.65.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b07f36faf01fca5427d4aa23645e2d492157d56c91fab7e06fe5697d7e171ad4"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b81711bf4ec08a3710b534e8054c7dcf90f2edc22bebe11c1775a23f145595fe"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88fcabc332a4aef8bcefadc34a02e9ab9407ab975d2c7d981a8e12c1aed92aa1"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9ba3e63108a8749994f02c7c0e156afb39ba5bdf755337de8e75eb685be244b"}, + {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8eb485801957a486bf5de15f2c792d9f9c897a86f2f18db8f3f6795a094b4bb2"}, + {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075f3903bc1749ace93f2b0664f72964ee5f2da5c15d4b47e0ab68e4f442c257"}, + {file = "grpcio-1.65.4-cp39-cp39-win32.whl", hash = "sha256:0a0720299bdb2cc7306737295d56e41ce8827d5669d4a3cd870af832e3b17c4d"}, + {file = "grpcio-1.65.4-cp39-cp39-win_amd64.whl", hash = "sha256:a146bc40fa78769f22e1e9ff4f110ef36ad271b79707577bf2a31e3e931141b9"}, + {file = "grpcio-1.65.4.tar.gz", hash = "sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39"}, +] + +[[package]] +name = "grpcio-health-checking" +version = "1.62.2" +requires_python = ">=3.6" +summary = "Standard Health Checking Service for gRPC" +groups = ["default"] +dependencies = [ + "grpcio>=1.62.2", + "protobuf>=4.21.6", +] +files = [ + {file = "grpcio-health-checking-1.62.2.tar.gz", hash = "sha256:a44d1ea1e1510b5c62265dada04d86621bb1491d75de987713c9c0ea005c10a8"}, + {file = "grpcio_health_checking-1.62.2-py3-none-any.whl", hash = "sha256:f0d77e02457aa00e98ce12c741dca6df7e34dbcc3859681c4a473dc589288e56"}, ] [[package]] @@ -1253,7 +1281,7 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.24.2" +version = "0.24.5" requires_python = ">=3.8.0" summary = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" groups = ["default"] @@ -1267,8 +1295,8 @@ dependencies = [ "typing-extensions>=3.7.4.3", ] files = [ - {file = "huggingface_hub-0.24.2-py3-none-any.whl", hash = "sha256:abdf3244d3a274c4b1fbc5c4a1ef700032b3f60ba93cc63e4f036fd082aa2805"}, - {file = "huggingface_hub-0.24.2.tar.gz", hash = "sha256:92be892405d2f6a7a8479016f9a5662354f202b2c6c1ff499609621aed1fae10"}, + {file = "huggingface_hub-0.24.5-py3-none-any.whl", hash = "sha256:d93fb63b1f1a919a22ce91a14518974e81fc4610bf344dfe7572343ce8d3aced"}, + {file = "huggingface_hub-0.24.5.tar.gz", hash = "sha256:7b45d6744dd53ce9cbf9880957de00e9d10a9ae837f1c9b7255fc8fa4e8264f3"}, ] [[package]] @@ -1315,6 +1343,52 @@ files = [ {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, ] +[[package]] +name = "jiter" +version = "0.5.0" +requires_python = ">=3.8" +summary = "Fast iterable JSON parser." +groups = ["default"] +files = [ + {file = "jiter-0.5.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b599f4e89b3def9a94091e6ee52e1d7ad7bc33e238ebb9c4c63f211d74822c3f"}, + {file = "jiter-0.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a063f71c4b06225543dddadbe09d203dc0c95ba352d8b85f1221173480a71d5"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc0d5b8b3dd12e91dd184b87273f864b363dfabc90ef29a1092d269f18c7e28"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c22541f0b672f4d741382a97c65609332a783501551445ab2df137ada01e019e"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63314832e302cc10d8dfbda0333a384bf4bcfce80d65fe99b0f3c0da8945a91a"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a25fbd8a5a58061e433d6fae6d5298777c0814a8bcefa1e5ecfff20c594bd749"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:503b2c27d87dfff5ab717a8200fbbcf4714516c9d85558048b1fc14d2de7d8dc"}, + {file = "jiter-0.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d1f3d27cce923713933a844872d213d244e09b53ec99b7a7fdf73d543529d6d"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c95980207b3998f2c3b3098f357994d3fd7661121f30669ca7cb945f09510a87"}, + {file = "jiter-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:afa66939d834b0ce063f57d9895e8036ffc41c4bd90e4a99631e5f261d9b518e"}, + {file = "jiter-0.5.0-cp310-none-win32.whl", hash = "sha256:f16ca8f10e62f25fd81d5310e852df6649af17824146ca74647a018424ddeccf"}, + {file = "jiter-0.5.0-cp310-none-win_amd64.whl", hash = "sha256:b2950e4798e82dd9176935ef6a55cf6a448b5c71515a556da3f6b811a7844f1e"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4c8e1ed0ef31ad29cae5ea16b9e41529eb50a7fba70600008e9f8de6376d553"}, + {file = "jiter-0.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c6f16e21276074a12d8421692515b3fd6d2ea9c94fd0734c39a12960a20e85f3"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5280e68e7740c8c128d3ae5ab63335ce6d1fb6603d3b809637b11713487af9e6"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:583c57fc30cc1fec360e66323aadd7fc3edeec01289bfafc35d3b9dcb29495e4"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26351cc14507bdf466b5f99aba3df3143a59da75799bf64a53a3ad3155ecded9"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4829df14d656b3fb87e50ae8b48253a8851c707da9f30d45aacab2aa2ba2d614"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42a4bdcf7307b86cb863b2fb9bb55029b422d8f86276a50487982d99eed7c6e"}, + {file = "jiter-0.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04d461ad0aebf696f8da13c99bc1b3e06f66ecf6cfd56254cc402f6385231c06"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e6375923c5f19888c9226582a124b77b622f8fd0018b843c45eeb19d9701c403"}, + {file = "jiter-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cec323a853c24fd0472517113768c92ae0be8f8c384ef4441d3632da8baa646"}, + {file = "jiter-0.5.0-cp311-none-win32.whl", hash = "sha256:aa1db0967130b5cab63dfe4d6ff547c88b2a394c3410db64744d491df7f069bb"}, + {file = "jiter-0.5.0-cp311-none-win_amd64.whl", hash = "sha256:aa9d2b85b2ed7dc7697597dcfaac66e63c1b3028652f751c81c65a9f220899ae"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0af3838cfb7e6afee3f00dc66fa24695199e20ba87df26e942820345b0afc566"}, + {file = "jiter-0.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:550b11d669600dbc342364fd4adbe987f14d0bbedaf06feb1b983383dcc4b961"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:489875bf1a0ffb3cb38a727b01e6673f0f2e395b2aad3c9387f94187cb214bbf"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b250ca2594f5599ca82ba7e68785a669b352156260c5362ea1b4e04a0f3e2389"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ea18e01f785c6667ca15407cd6dabbe029d77474d53595a189bdc813347218e"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462a52be85b53cd9bffd94e2d788a09984274fe6cebb893d6287e1c296d50653"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92cc68b48d50fa472c79c93965e19bd48f40f207cb557a8346daa020d6ba973b"}, + {file = "jiter-0.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1c834133e59a8521bc87ebcad773608c6fa6ab5c7a022df24a45030826cf10bc"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab3a71ff31cf2d45cb216dc37af522d335211f3a972d2fe14ea99073de6cb104"}, + {file = "jiter-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cccd3af9c48ac500c95e1bcbc498020c87e1781ff0345dd371462d67b76643eb"}, + {file = "jiter-0.5.0-cp39-none-win32.whl", hash = "sha256:368084d8d5c4fc40ff7c3cc513c4f73e02c85f6009217922d0823a48ee7adf61"}, + {file = "jiter-0.5.0-cp39-none-win_amd64.whl", hash = "sha256:ce03f7b4129eb72f1687fa11300fbf677b02990618428934662406d2a76742a1"}, + {file = "jiter-0.5.0.tar.gz", hash = "sha256:1d916ba875bcab5c5f7d927df998c4cb694d27dceddf3392e58beaf10563368a"}, +] + [[package]] name = "joblib" version = "1.4.2" @@ -1371,29 +1445,44 @@ files = [ {file = "lazydocs-0.4.8.tar.gz", hash = "sha256:8ac1fda05f03e0c5ae1d30b81eaeb785476efa161194a5e8bfa8630e14af9562"}, ] +[[package]] +name = "llama-cloud" +version = "0.0.11" +requires_python = "<4,>=3.8" +summary = "" +groups = ["default"] +dependencies = [ + "httpx>=0.20.0", + "pydantic>=1.10", +] +files = [ + {file = "llama_cloud-0.0.11-py3-none-any.whl", hash = "sha256:7e2c673a4ab50749e20807f57bc511e81a04cf70d0dbd5db2a1d74d6599fec3f"}, + {file = "llama_cloud-0.0.11.tar.gz", hash = "sha256:100882b528892065211436da048252d57eca14d8683d2fde6f89eeb20950ac18"}, +] + [[package]] name = "llama-index" -version = "0.10.38" +version = "0.10.58" requires_python = "<4.0,>=3.8.1" summary = "Interface between LLMs and your data" groups = ["default"] dependencies = [ "llama-index-agent-openai<0.3.0,>=0.1.4", "llama-index-cli<0.2.0,>=0.1.2", - "llama-index-core<0.11.0,>=0.10.38", + "llama-index-core==0.10.58", "llama-index-embeddings-openai<0.2.0,>=0.1.5", - "llama-index-indices-managed-llama-cloud<0.2.0,>=0.1.2", + "llama-index-indices-managed-llama-cloud>=0.2.0", "llama-index-legacy<0.10.0,>=0.9.48", - "llama-index-llms-openai<0.2.0,>=0.1.13", + "llama-index-llms-openai<0.2.0,>=0.1.27", "llama-index-multi-modal-llms-openai<0.2.0,>=0.1.3", "llama-index-program-openai<0.2.0,>=0.1.3", "llama-index-question-gen-openai<0.2.0,>=0.1.2", "llama-index-readers-file<0.2.0,>=0.1.4", - "llama-index-readers-llama-parse<0.2.0,>=0.1.2", + "llama-index-readers-llama-parse>=0.1.2", ] files = [ - {file = "llama_index-0.10.38-py3-none-any.whl", hash = "sha256:5d521b0ea7111679521292432960d3b9fb53c98d55414bd42d753bc6271d234d"}, - {file = "llama_index-0.10.38.tar.gz", hash = "sha256:5281cfa8b6e7f0f5f12897c00adcd790f7b51c130037f3561fd5630fca37bfb3"}, + {file = "llama_index-0.10.58-py3-none-any.whl", hash = "sha256:4a6cd89aeb9a450ce5b367fc4d771193c38ac226baa71af63494e096c5043951"}, + {file = "llama_index-0.10.58.tar.gz", hash = "sha256:8fe09b4d6e9071f89cf2f5af4eae490b08713e5238492236de893e758428d4dc"}, ] [[package]] @@ -1430,7 +1519,7 @@ files = [ [[package]] name = "llama-index-core" -version = "0.10.56" +version = "0.10.58" requires_python = "<4.0,>=3.8.1" summary = "Interface between LLMs and your data" groups = ["default"] @@ -1459,14 +1548,14 @@ dependencies = [ "wrapt", ] files = [ - {file = "llama_index_core-0.10.56-py3-none-any.whl", hash = "sha256:9f9e217731c94c60c4dc2163c2c3e0c17f9b2445d6d2ba008bb624545d75d924"}, - {file = "llama_index_core-0.10.56.tar.gz", hash = "sha256:a646ba3f42329c3160fa3535adb6b9a99bb34fc60e027dfd861fa248a93d90c6"}, + {file = "llama_index_core-0.10.58-py3-none-any.whl", hash = "sha256:2345d9b20e21d0ec00e9282fe88a5e4a0eba2e732e577d1b0348512e9181a74f"}, + {file = "llama_index_core-0.10.58.tar.gz", hash = "sha256:f5730be1861a8fd0ef94e9f412a713132184b887b8a79347514efb672bf749fc"}, ] [[package]] name = "llama-index-embeddings-azure-openai" -version = "0.1.6" -requires_python = ">=3.8.1,<4.0" +version = "0.1.11" +requires_python = "<4.0,>=3.8.1" summary = "llama-index embeddings azure openai integration" groups = ["default"] dependencies = [ @@ -1475,8 +1564,8 @@ dependencies = [ "llama-index-llms-azure-openai<0.2.0,>=0.1.3", ] files = [ - {file = "llama_index_embeddings_azure_openai-0.1.6-py3-none-any.whl", hash = "sha256:a84a6d7d67296690e5d20070ce5d9920ec56b0d339338d276eae2a7b2f822b9e"}, - {file = "llama_index_embeddings_azure_openai-0.1.6.tar.gz", hash = "sha256:05092b1b31bd0f45257d161f1e5a17261c60e688f4c6a4fe316557349ac2aebc"}, + {file = "llama_index_embeddings_azure_openai-0.1.11-py3-none-any.whl", hash = "sha256:afefe55ee69934528c569ddf71fb1e9ddf2992b6c344c4c9d72a03fa8c33cf40"}, + {file = "llama_index_embeddings_azure_openai-0.1.11.tar.gz", hash = "sha256:40a4fd9a31ba74f071739d6c8405187b66e7f584ae2f64a30316c6c7b6a25325"}, ] [[package]] @@ -1524,17 +1613,17 @@ files = [ [[package]] name = "llama-index-indices-managed-llama-cloud" -version = "0.1.6" +version = "0.2.7" requires_python = "<4.0,>=3.8.1" summary = "llama-index indices llama-cloud integration" groups = ["default"] dependencies = [ - "llama-index-core<0.11.0,>=0.10.0", - "llamaindex-py-client<0.2.0,>=0.1.19", + "llama-cloud>=0.0.11", + "llama-index-core<0.11.0,>=0.10.48.post1", ] files = [ - {file = "llama_index_indices_managed_llama_cloud-0.1.6-py3-none-any.whl", hash = "sha256:cba33e1a3677b2a2ae7f239119acbf6dc3818f105edc92315729842b56fbc949"}, - {file = "llama_index_indices_managed_llama_cloud-0.1.6.tar.gz", hash = "sha256:74b3b0e9ebf9d348d3054f9fc0c657031acceb9351c31116ad8d5a7ae4729f5c"}, + {file = "llama_index_indices_managed_llama_cloud-0.2.7-py3-none-any.whl", hash = "sha256:94335504eab2a6baf7361bbd8bda3ae20a68c7d0111587c9a0793440e9edff21"}, + {file = "llama_index_indices_managed_llama_cloud-0.2.7.tar.gz", hash = "sha256:d7e9b4cc50214b3cfcd75ea63cacce4ee36092cb672c003f15fd23ba31c49ec0"}, ] [[package]] @@ -1570,23 +1659,23 @@ files = [ [[package]] name = "llama-index-llms-anthropic" -version = "0.1.11" +version = "0.1.16" requires_python = "<4.0,>=3.8.1" summary = "llama-index llms anthropic integration" groups = ["default"] dependencies = [ - "anthropic<0.24.0,>=0.23.1", - "llama-index-core<0.11.0,>=0.10.1", + "anthropic<0.29.0,>=0.26.2", + "llama-index-core<0.11.0,>=0.10.57", ] files = [ - {file = "llama_index_llms_anthropic-0.1.11-py3-none-any.whl", hash = "sha256:488964147907058c81f5c272830401fa17da3bfe0a6688db87ec2538d5887491"}, - {file = "llama_index_llms_anthropic-0.1.11.tar.gz", hash = "sha256:7ec7008b54076cbb846cc3d4f5811354778148d75562f92f83e5622cde7657b9"}, + {file = "llama_index_llms_anthropic-0.1.16-py3-none-any.whl", hash = "sha256:6037410d54eb1315858c9c1452a396fbb2039be937bcdb9c5c37c77c7b7f5fc5"}, + {file = "llama_index_llms_anthropic-0.1.16.tar.gz", hash = "sha256:09fa9188a362cfed4cfad0d2a13a46265d755657fe1e5071e4cd3b8e7174362d"}, ] [[package]] name = "llama-index-llms-anyscale" -version = "0.1.3" -requires_python = ">=3.8.1,<4.0" +version = "0.1.4" +requires_python = "<4.0,>=3.8.1" summary = "llama-index llms anyscale integration" groups = ["default"] dependencies = [ @@ -1594,14 +1683,14 @@ dependencies = [ "llama-index-llms-openai<0.2.0,>=0.1.1", ] files = [ - {file = "llama_index_llms_anyscale-0.1.3-py3-none-any.whl", hash = "sha256:3ac1d33169dff8d5b7364bcafc7abb6780ded77fabe7d70897dcf58e097c7432"}, - {file = "llama_index_llms_anyscale-0.1.3.tar.gz", hash = "sha256:d5a7a5d1ed7e196b51d8e356c025ff428cebbadcd5c4a8b85ceffa4189c8e640"}, + {file = "llama_index_llms_anyscale-0.1.4-py3-none-any.whl", hash = "sha256:94c081c97102529ebc35be606cf31caddde2aaba2dc4fbf4940bff86fd9dae3e"}, + {file = "llama_index_llms_anyscale-0.1.4.tar.gz", hash = "sha256:a8426b261e85ac59d2fc0712065a86cf0d1dbaef34a1d910ede9f331d0cc44f7"}, ] [[package]] name = "llama-index-llms-azure-openai" -version = "0.1.5" -requires_python = ">=3.8.1,<4.0" +version = "0.1.10" +requires_python = "<4.0,>=3.8.1" summary = "llama-index llms azure openai integration" groups = ["default"] dependencies = [ @@ -1611,51 +1700,52 @@ dependencies = [ "llama-index-llms-openai<0.2.0,>=0.1.1", ] files = [ - {file = "llama_index_llms_azure_openai-0.1.5-py3-none-any.whl", hash = "sha256:180805a7114198155aad7cc3abdf599142c59242d366b11ee8a9150de35b7773"}, - {file = "llama_index_llms_azure_openai-0.1.5.tar.gz", hash = "sha256:5a1c3d1a6a4fe4d03acb50b61594e6775dc86a431738afa291f3708029299a92"}, + {file = "llama_index_llms_azure_openai-0.1.10-py3-none-any.whl", hash = "sha256:8666b095118ed9c5087dc2d91a83a826d4549ea4d442b9eef363e243207d3539"}, + {file = "llama_index_llms_azure_openai-0.1.10.tar.gz", hash = "sha256:f1624c9bd7bf4458e98cca6f3b805eec06105fa951536ff24b098d913d2368bd"}, ] [[package]] name = "llama-index-llms-mistralai" -version = "0.1.10" +version = "0.1.19" requires_python = "<4.0,>=3.9" summary = "llama-index llms mistral ai integration" groups = ["default"] dependencies = [ - "llama-index-core<0.11.0,>=0.10.24", - "mistralai>=0.1.3", + "llama-index-core<0.11.0,>=0.10.57", + "mistralai>=0.4.2", ] files = [ - {file = "llama_index_llms_mistralai-0.1.10-py3-none-any.whl", hash = "sha256:5c079ea5d2fcdb9fc2c8d1b9e3b48ba82c0016d234fb03c2b46ce9b0e50ad0c2"}, - {file = "llama_index_llms_mistralai-0.1.10.tar.gz", hash = "sha256:19ba21606940e3ba53cd58b790240b44435d62b7edb323f06dbcac328e32a771"}, + {file = "llama_index_llms_mistralai-0.1.19-py3-none-any.whl", hash = "sha256:268341e87f9de4765dd0d3afcc8c2c2e6c9304fd1e69fccc90d2b3510869dc47"}, + {file = "llama_index_llms_mistralai-0.1.19.tar.gz", hash = "sha256:77d9dd0accc0e4aebeba59bf7416ae6e7c42f9998b4ec1eedb5d4f7cb80da4ea"}, ] [[package]] name = "llama-index-llms-ollama" -version = "0.1.3" +version = "0.2.2" requires_python = "<4.0,>=3.8.1" summary = "llama-index llms ollama integration" groups = ["default"] dependencies = [ "llama-index-core<0.11.0,>=0.10.1", + "ollama>=0.3.0", ] files = [ - {file = "llama_index_llms_ollama-0.1.3-py3-none-any.whl", hash = "sha256:510cefea9a3a76160e47340a870ba9f66603e0a495f0e1c54aaae42cd096edf4"}, - {file = "llama_index_llms_ollama-0.1.3.tar.gz", hash = "sha256:c796654b73d103f9324ed4c55c76c49b73738abc6e50d02596ed5e2b1c66dec5"}, + {file = "llama_index_llms_ollama-0.2.2-py3-none-any.whl", hash = "sha256:c224d7c17d641045bc9b6a6681dab434c1c421af0bacb5825eea444fefd8ed78"}, + {file = "llama_index_llms_ollama-0.2.2.tar.gz", hash = "sha256:0c7f192cb8b768707bd5154b97e2a41284732d62070eb76190dee125e95245ea"}, ] [[package]] name = "llama-index-llms-openai" -version = "0.1.26" +version = "0.1.27" requires_python = "<4.0,>=3.8.1" summary = "llama-index llms openai integration" groups = ["default"] dependencies = [ - "llama-index-core<0.11.0,>=0.10.24", + "llama-index-core<0.11.0,>=0.10.57", ] files = [ - {file = "llama_index_llms_openai-0.1.26-py3-none-any.whl", hash = "sha256:1ad8e4eb02f9410c2091749d4d9aa9db4452646b595eb5eb937edbc496fb65fe"}, - {file = "llama_index_llms_openai-0.1.26.tar.gz", hash = "sha256:08a408cd53af4cd4623dd5807be4cbbd5e5b3ca01272128cd678d667343e4d5d"}, + {file = "llama_index_llms_openai-0.1.27-py3-none-any.whl", hash = "sha256:8da0e90d4a558667d2b9cf1b3f577a4cb7723b7680ed6d22027b0baf9cd5999e"}, + {file = "llama_index_llms_openai-0.1.27.tar.gz", hash = "sha256:37c2d1159b56607d3a807d90260ee25b4f002086d6251c7272afbc53f2514603"}, ] [[package]] @@ -1689,18 +1779,18 @@ files = [ [[package]] name = "llama-index-llms-vertex" -version = "0.1.8" +version = "0.2.2" requires_python = "<4.0,>=3.8.1" summary = "llama-index llms vertex integration" groups = ["default"] dependencies = [ "google-cloud-aiplatform<2.0.0,>=1.39.0", - "llama-index-core<0.11.0,>=0.10.1", + "llama-index-core<0.11.0,>=0.10.57", "pyarrow<16.0.0,>=15.0.2", ] files = [ - {file = "llama_index_llms_vertex-0.1.8-py3-none-any.whl", hash = "sha256:6cab358970bb2767b649123a087c7bd6ab1eb5ead14745497644f4bc076b4baf"}, - {file = "llama_index_llms_vertex-0.1.8.tar.gz", hash = "sha256:1c39bc9e1163e338679208da9908dd68de85c32feca9230c26435505ea5a6a8d"}, + {file = "llama_index_llms_vertex-0.2.2-py3-none-any.whl", hash = "sha256:d9be5676d70b737cd0d35c2c343d3efe7dfc79357e310832b9fa349a138d53cd"}, + {file = "llama_index_llms_vertex-0.2.2.tar.gz", hash = "sha256:75ad7d49f7f4ddde3e8f05c02274efc3a903441f380681ed251d3bfc3d9660b2"}, ] [[package]] @@ -1720,18 +1810,18 @@ files = [ [[package]] name = "llama-index-program-openai" -version = "0.1.6" +version = "0.1.7" requires_python = "<4.0,>=3.8.1" summary = "llama-index program openai integration" groups = ["default"] dependencies = [ "llama-index-agent-openai<0.3.0,>=0.1.1", - "llama-index-core<0.11.0,>=0.10.1", - "llama-index-llms-openai<0.2.0,>=0.1.1", + "llama-index-core<0.11.0,>=0.10.57", + "llama-index-llms-openai>=0.1.1", ] files = [ - {file = "llama_index_program_openai-0.1.6-py3-none-any.whl", hash = "sha256:4660b338503537c5edca1e0dab606af6ce372b4f1b597e2833c6b602447c5d8d"}, - {file = "llama_index_program_openai-0.1.6.tar.gz", hash = "sha256:c6a4980c5ea826088b28b4dee3367edb20221e6d05eb0e05019049190131d772"}, + {file = "llama_index_program_openai-0.1.7-py3-none-any.whl", hash = "sha256:33489b573c1050a3f583ff68fcbc4bcbd49f29e74f3e5baea08ab0d5f363403c"}, + {file = "llama_index_program_openai-0.1.7.tar.gz", hash = "sha256:bf7eb61a073381714be5a049d93b40044dfe51bd4333bee539d1532b7407621f"}, ] [[package]] @@ -1752,7 +1842,7 @@ files = [ [[package]] name = "llama-index-readers-file" -version = "0.1.30" +version = "0.1.32" requires_python = "<4.0,>=3.8.1" summary = "llama-index readers file integration" groups = ["default"] @@ -1763,8 +1853,8 @@ dependencies = [ "striprtf<0.0.27,>=0.0.26", ] files = [ - {file = "llama_index_readers_file-0.1.30-py3-none-any.whl", hash = "sha256:d5f6cdd4685ee73103c68b9bc0dfb0d05439033133fc6bd45ef31ff41519e723"}, - {file = "llama_index_readers_file-0.1.30.tar.gz", hash = "sha256:32f40465f2a8a65fa5773e03c9f4dd55164be934ae67fad62113680436787d91"}, + {file = "llama_index_readers_file-0.1.32-py3-none-any.whl", hash = "sha256:699d6f80c5c922321b6202b565c7cc22ab9e27a2d1c6df1e42550089ccd25290"}, + {file = "llama_index_readers_file-0.1.32.tar.gz", hash = "sha256:80a2a2aeefba7deae289dfd4aaec6e8ab8ee331820bcdd1db821d1879bd21515"}, ] [[package]] @@ -1784,7 +1874,7 @@ files = [ [[package]] name = "llama-index-vector-stores-milvus" -version = "0.1.18" +version = "0.1.21" requires_python = "<4.0,>=3.8.1" summary = "llama-index vector_stores milvus integration" groups = ["default"] @@ -1793,14 +1883,14 @@ dependencies = [ "pymilvus<3.0.0,>=2.3.6", ] files = [ - {file = "llama_index_vector_stores_milvus-0.1.18-py3-none-any.whl", hash = "sha256:a4b1e4d1f31c4eeb5bd4583365f8a2175a4d0856cf1fd0acd202e6e428e0842a"}, - {file = "llama_index_vector_stores_milvus-0.1.18.tar.gz", hash = "sha256:af2e3cf8e8270b90b3c4209a864f9805a2b367e33a4f9c0961fa2f8d3f9ea9dd"}, + {file = "llama_index_vector_stores_milvus-0.1.21-py3-none-any.whl", hash = "sha256:272047d5fec42a00fb44b3368f194b074888688195a12b46d9d94b25bb903377"}, + {file = "llama_index_vector_stores_milvus-0.1.21.tar.gz", hash = "sha256:09467f730ebc4cdebfe8a5fc1b0ce49c5ed0ca4cf68b5826f64fc31535992da6"}, ] [[package]] name = "llama-index-vector-stores-pinecone" -version = "0.1.4" -requires_python = ">=3.8.1,<3.13" +version = "0.1.8" +requires_python = "<3.13,>=3.8.1" summary = "llama-index vector_stores pinecone integration" groups = ["default"] dependencies = [ @@ -1808,62 +1898,62 @@ dependencies = [ "pinecone-client<4.0.0,>=3.0.2", ] files = [ - {file = "llama_index_vector_stores_pinecone-0.1.4-py3-none-any.whl", hash = "sha256:af729deb397bc2d2604525be04b3056675697080fe4ae34eb7bbd839f09e77dc"}, - {file = "llama_index_vector_stores_pinecone-0.1.4.tar.gz", hash = "sha256:ab5f2141d44404c9ad36611c11e8b6afb35b6f0a80959726f8eab5b65a157549"}, + {file = "llama_index_vector_stores_pinecone-0.1.8-py3-none-any.whl", hash = "sha256:162e7aab267f995080c6b0c69ad9a19bc32a81c112afe9b5ae68d7174b843b6f"}, + {file = "llama_index_vector_stores_pinecone-0.1.8.tar.gz", hash = "sha256:ed06555a87581427e791de8a1ab2b139958f4ac927d43bdb8f6947b0eb523f92"}, ] [[package]] name = "llama-index-vector-stores-postgres" -version = "0.1.3" -requires_python = ">=3.8.1,<4.0" +version = "0.1.13" +requires_python = "<4.0,>=3.8.1" summary = "llama-index vector_stores postgres integration" groups = ["default"] dependencies = [ "asyncpg<0.30.0,>=0.29.0", - "llama-index-core<0.11.0,>=0.10.1", + "llama-index-core<0.11.0,>=0.10.20", "pgvector<0.3.0,>=0.2.4", "psycopg2-binary<3.0.0,>=2.9.9", - "sqlalchemy[asyncio]<3.0.0,>=2.0.25", + "sqlalchemy[asyncio]<2.1,>=1.4.49", ] files = [ - {file = "llama_index_vector_stores_postgres-0.1.3-py3-none-any.whl", hash = "sha256:efe3eb0aa4e9647bf5707b5b6bace495536461a4cb1e9777c6ad6f09c51d8f26"}, - {file = "llama_index_vector_stores_postgres-0.1.3.tar.gz", hash = "sha256:bd6a8271d6bd7433cb71ea0e120322be906690b6cab3fa68123cc293fab81f02"}, + {file = "llama_index_vector_stores_postgres-0.1.13-py3-none-any.whl", hash = "sha256:1cfc545b7b8ce5632f569843362d920beb6e851c3aa05822c5e84ed3a6f32859"}, + {file = "llama_index_vector_stores_postgres-0.1.13.tar.gz", hash = "sha256:6aa4928176f57e6118f7ca63f31350a250cb3856ecabf50e5c255945e1d8803e"}, ] [[package]] name = "llama-index-vector-stores-qdrant" -version = "0.2.8" +version = "0.2.14" requires_python = "<3.13,>=3.9" summary = "llama-index vector_stores qdrant integration" groups = ["default"] dependencies = [ "grpcio<2.0.0,>=1.60.0", "llama-index-core<0.11.0,>=0.10.1", - "qdrant-client<2.0.0,>=1.7.1", + "qdrant-client>=1.7.1", ] files = [ - {file = "llama_index_vector_stores_qdrant-0.2.8-py3-none-any.whl", hash = "sha256:04a4a3c094ab7c75b6b543a584291385aa127b4cb379f46e2576e1da91a67310"}, - {file = "llama_index_vector_stores_qdrant-0.2.8.tar.gz", hash = "sha256:a9e295ee39414ee8d411831c3c24be3c00fac519530558bf0c6259fcc48deb80"}, + {file = "llama_index_vector_stores_qdrant-0.2.14-py3-none-any.whl", hash = "sha256:ddf5d6ac85d93459dc1e578394de97f6eb5ecb56c584daec3cf6063f0d715cc0"}, + {file = "llama_index_vector_stores_qdrant-0.2.14.tar.gz", hash = "sha256:fb0c3064d417b76341495a98705e404e2cb92b60a4bbe02eba1ba78bbbac9ca2"}, ] [[package]] name = "llama-index-vector-stores-weaviate" -version = "0.1.4" -requires_python = ">=3.8.1,<4.0" +version = "1.0.2" +requires_python = "<4.0,>=3.8.1" summary = "llama-index vector_stores weaviate integration" groups = ["default"] dependencies = [ "llama-index-core<0.11.0,>=0.10.1", - "weaviate-client<4.0.0,>=3.26.2", + "weaviate-client<5.0.0,>=4.5.7", ] files = [ - {file = "llama_index_vector_stores_weaviate-0.1.4-py3-none-any.whl", hash = "sha256:ea1545eb63938114ffdb1189d06ad32f2fe221cc01bdb8e6be60292356c035a1"}, - {file = "llama_index_vector_stores_weaviate-0.1.4.tar.gz", hash = "sha256:3819ae4144d374eafca2efb1a286d575e385e5d84e7bb0657f505663ba95fea2"}, + {file = "llama_index_vector_stores_weaviate-1.0.2-py3-none-any.whl", hash = "sha256:3de2b5f5d666c34f143df7660bff9dc9741899db5f4b6c316604b9722a04ec57"}, + {file = "llama_index_vector_stores_weaviate-1.0.2.tar.gz", hash = "sha256:80d36e098a8a11c3d806cebe02c53f67bcf0f32777be067b4af933d19c5373b5"}, ] [[package]] name = "llama-parse" -version = "0.4.1" +version = "0.4.9" requires_python = "<4.0,>=3.8.1" summary = "Parse files into RAG-Optimized formats." groups = ["default"] @@ -1871,23 +1961,8 @@ dependencies = [ "llama-index-core>=0.10.29", ] files = [ - {file = "llama_parse-0.4.1-py3-none-any.whl", hash = "sha256:2c08962b66791c61fc360ae2042f953729c7b8decc3590d01fea5a98ca1f6676"}, - {file = "llama_parse-0.4.1.tar.gz", hash = "sha256:d723af84d6a1fc99eb431915d21865d20b76d8a246dbaa124d1f96c956a644f7"}, -] - -[[package]] -name = "llamaindex-py-client" -version = "0.1.19" -requires_python = "<4,>=3.8" -summary = "" -groups = ["default"] -dependencies = [ - "httpx>=0.20.0", - "pydantic>=1.10", -] -files = [ - {file = "llamaindex_py_client-0.1.19-py3-none-any.whl", hash = "sha256:fd9416fd78b97209bf323bc3c7fab314499778563e7274f10853ad560563d10e"}, - {file = "llamaindex_py_client-0.1.19.tar.gz", hash = "sha256:73f74792bb8c092bae6dc626627a09ac13a099fa8d10f8fcc83e17a2b332cca7"}, + {file = "llama_parse-0.4.9-py3-none-any.whl", hash = "sha256:71974a57a73d642608cc406942bee4e7fc1a713fa410f51df67da509479ba544"}, + {file = "llama_parse-0.4.9.tar.gz", hash = "sha256:657f8fa5f7d399f14c0454fc05cae6034da0373f191df6cfca17a1b4a704ef87"}, ] [[package]] @@ -2158,9 +2233,23 @@ files = [ {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] +[[package]] +name = "ollama" +version = "0.3.1" +requires_python = "<4.0,>=3.8" +summary = "The official Python client for Ollama." +groups = ["default"] +dependencies = [ + "httpx<0.28.0,>=0.27.0", +] +files = [ + {file = "ollama-0.3.1-py3-none-any.whl", hash = "sha256:db50034c73d6350349bdfba19c3f0d54a3cea73eb97b35f9d7419b2fc7206454"}, + {file = "ollama-0.3.1.tar.gz", hash = "sha256:032572fb494a4fba200c65013fe937a65382c846b5f358d9e8918ecbc9ac44b5"}, +] + [[package]] name = "openai" -version = "1.37.0" +version = "1.38.0" requires_python = ">=3.7.1" summary = "The official Python library for the openai API" groups = ["default"] @@ -2174,8 +2263,8 @@ dependencies = [ "typing-extensions<5,>=4.7", ] files = [ - {file = "openai-1.37.0-py3-none-any.whl", hash = "sha256:a903245c0ecf622f2830024acdaa78683c70abb8e9d37a497b851670864c9f73"}, - {file = "openai-1.37.0.tar.gz", hash = "sha256:dc8197fc40ab9d431777b6620d962cc49f4544ffc3011f03ce0a805e6eb54adb"}, + {file = "openai-1.38.0-py3-none-any.whl", hash = "sha256:a19ef052f1676320f52183ae6f9775da6d888fbe3aec57886117163c095d9f7c"}, + {file = "openai-1.38.0.tar.gz", hash = "sha256:30fb324bf452ecb1194ca7dbc64566a4d7aa054c6a5da857937ede7d517a220b"}, ] [[package]] @@ -2684,29 +2773,29 @@ files = [ [[package]] name = "pyjwt" -version = "2.8.0" -requires_python = ">=3.7" +version = "2.9.0" +requires_python = ">=3.8" summary = "JSON Web Token implementation in Python" groups = ["default"] files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [[package]] name = "pyjwt" -version = "2.8.0" +version = "2.9.0" extras = ["crypto"] -requires_python = ">=3.7" +requires_python = ">=3.8" summary = "JSON Web Token implementation in Python" groups = ["default"] dependencies = [ - "PyJWT==2.8.0", + "PyJWT==2.9.0", "cryptography>=3.4.0", ] files = [ - {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, - {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, + {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, + {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, ] [[package]] @@ -3125,13 +3214,13 @@ files = [ [[package]] name = "setuptools" -version = "71.1.0" +version = "72.1.0" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" groups = ["default"] files = [ - {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"}, - {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"}, + {file = "setuptools-72.1.0-py3-none-any.whl", hash = "sha256:5a03e1860cf56bb6ef48ce186b0e557fdba433237481a9a625176c2831be15d1"}, + {file = "setuptools-72.1.0.tar.gz", hash = "sha256:8d243eff56d095e5817f796ede6ae32941278f542e0f941867cc05ae52b162ec"}, ] [[package]] @@ -3443,7 +3532,7 @@ files = [ [[package]] name = "tqdm" -version = "4.66.4" +version = "4.66.5" requires_python = ">=3.7" summary = "Fast, Extensible Progress Meter" groups = ["default"] @@ -3451,8 +3540,8 @@ dependencies = [ "colorama; platform_system == \"Windows\"", ] files = [ - {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, - {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, + {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, + {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, ] [[package]] @@ -3628,18 +3717,23 @@ files = [ [[package]] name = "weaviate-client" -version = "3.26.6" +version = "4.7.1" requires_python = ">=3.8" summary = "A python native Weaviate client" groups = ["default"] dependencies = [ - "authlib<2.0.0,>=1.3.1", - "requests<3.0.0,>=2.32.2", - "validators<1.0.0,>=0.21.2", + "authlib<2.0.0,>=1.2.1", + "grpcio-health-checking<2.0.0,>=1.57.0", + "grpcio-tools<2.0.0,>=1.57.0", + "grpcio<2.0.0,>=1.57.0", + "httpx<=0.27.0,>=0.25.0", + "pydantic<3.0.0,>=2.5.0", + "requests<3.0.0,>=2.30.0", + "validators==0.33.0", ] files = [ - {file = "weaviate_client-3.26.6-py3-none-any.whl", hash = "sha256:ecdac157d1180d41fe4618118948b59c0f6ab533b499b7a496774322c507cc56"}, - {file = "weaviate_client-3.26.6.tar.gz", hash = "sha256:6c3c4b67ff1536b29abcd135008f79ef9338e9dff394190334e993da886fd388"}, + {file = "weaviate_client-4.7.1-py3-none-any.whl", hash = "sha256:342f5c67b126cee4dc3a60467ad1ae74971cd5614e27af6fb13d687a345352c4"}, + {file = "weaviate_client-4.7.1.tar.gz", hash = "sha256:af99ac4e53613d2ff5b797372e95d004d0c8a1dd10a7f592068bcb423a30af30"}, ] [[package]] From db3cc978d037b698730f81e545607d88d77c698e Mon Sep 17 00:00:00 2001 From: gayathrivijayakumar Date: Tue, 6 Aug 2024 14:30:02 +0530 Subject: [PATCH 4/4] Token counter fixes --- src/unstract/sdk/__init__.py | 2 +- src/unstract/sdk/utils/token_counter.py | 112 +++++++++++++++--------- 2 files changed, 74 insertions(+), 40 deletions(-) diff --git a/src/unstract/sdk/__init__.py b/src/unstract/sdk/__init__.py index b3246670..6da01775 100644 --- a/src/unstract/sdk/__init__.py +++ b/src/unstract/sdk/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.41.0" +__version__ = "0.41.1" def get_sdk_version(): diff --git a/src/unstract/sdk/utils/token_counter.py b/src/unstract/sdk/utils/token_counter.py index 3337f6a0..444b0514 100644 --- a/src/unstract/sdk/utils/token_counter.py +++ b/src/unstract/sdk/utils/token_counter.py @@ -2,6 +2,8 @@ from llama_index.core.callbacks.schema import EventPayload from llama_index.core.utilities.token_counting import TokenCounter +from openai.types import CompletionUsage +from openai.types.chat import ChatCompletion class Constants: @@ -10,6 +12,9 @@ class Constants: KEY_EVAL_COUNT = "eval_count" KEY_PROMPT_EVAL_COUNT = "prompt_eval_count" KEY_RAW_RESPONSE = "_raw_response" + KEY_TEXT_TOKEN_COUNT = "inputTextTokenCount" + KEY_TOKEN_COUNT = "tokenCount" + KEY_RESULTS = "results" INPUT_TOKENS = "input_tokens" OUTPUT_TOKENS = "output_tokens" PROMPT_TOKENS = "prompt_tokens" @@ -32,62 +37,91 @@ def __init__(self, input_tokens, output_tokens): @staticmethod def get_llm_token_counts(payload: dict[str, Any]) -> TokenCounter: - token_counter = TokenCounter( - input_tokens=Constants.DEFAULT_TOKEN_COUNT, - output_tokens=Constants.DEFAULT_TOKEN_COUNT, - ) + prompt_tokens = Constants.DEFAULT_TOKEN_COUNT + completion_tokens = Constants.DEFAULT_TOKEN_COUNT if EventPayload.PROMPT in payload: completion_raw = payload.get(EventPayload.COMPLETION).raw if completion_raw: - if hasattr(completion_raw, Constants.KEY_USAGE): - token_counts: dict[ - str, int - ] = TokenCounter._get_prompt_completion_tokens(completion_raw) - token_counter = TokenCounter( - input_tokens=token_counts[Constants.PROMPT_TOKENS], - output_tokens=token_counts[Constants.COMPLETION_TOKENS], - ) - elif hasattr(completion_raw, Constants.KEY_RAW_RESPONSE): - if hasattr( - completion_raw._raw_response, - Constants.KEY_USAGE_METADATA, - ): - usage = completion_raw._raw_response.usage_metadata - token_counter = TokenCounter( - input_tokens=usage.prompt_token_count, - output_tokens=usage.candidates_token_count, - ) - else: - prompt_tokens = Constants.DEFAULT_TOKEN_COUNT - completion_tokens = Constants.DEFAULT_TOKEN_COUNT - if hasattr(completion_raw, Constants.KEY_PROMPT_EVAL_COUNT): - prompt_tokens = completion_raw.prompt_eval_count - if hasattr(completion_raw, Constants.KEY_EVAL_COUNT): - completion_tokens = completion_raw.eval_count - token_counter = TokenCounter( - input_tokens=prompt_tokens, - output_tokens=completion_tokens, - ) + # For Open AI models, token count is part of ChatCompletion + if isinstance(completion_raw, ChatCompletion): + if hasattr(completion_raw, Constants.KEY_USAGE): + token_counts: dict[ + str, int + ] = TokenCounter._get_prompt_completion_tokens(completion_raw) + prompt_tokens = token_counts[Constants.PROMPT_TOKENS] + completion_tokens = token_counts[Constants.COMPLETION_TOKENS] + # For other models + elif isinstance(completion_raw, dict): + # For Gemini models + if completion_raw.get(Constants.KEY_RAW_RESPONSE): + if hasattr( + completion_raw.get(Constants.KEY_RAW_RESPONSE), + Constants.KEY_USAGE_METADATA, + ): + usage = completion_raw.get( + Constants.KEY_RAW_RESPONSE + ).usage_metadata + prompt_tokens = usage.prompt_token_count + completion_tokens = usage.candidates_token_count + elif completion_raw.get(Constants.KEY_USAGE): + token_counts: dict[ + str, int + ] = TokenCounter._get_prompt_completion_tokens(completion_raw) + prompt_tokens = token_counts[Constants.PROMPT_TOKENS] + completion_tokens = token_counts[Constants.COMPLETION_TOKENS] + # For Bedrock models + elif Constants.KEY_TEXT_TOKEN_COUNT in completion_raw: + prompt_tokens = completion_raw[Constants.KEY_TEXT_TOKEN_COUNT] + if Constants.KEY_RESULTS in completion_raw: + result_list: list = completion_raw[Constants.KEY_RESULTS] + if len(result_list) > 0: + result: dict = result_list[0] + if Constants.KEY_TOKEN_COUNT in result: + completion_tokens = result.get( + Constants.KEY_TOKEN_COUNT + ) + else: + if completion_raw.get(Constants.KEY_PROMPT_EVAL_COUNT): + prompt_tokens = completion_raw.get( + Constants.KEY_PROMPT_EVAL_COUNT + ) + if completion_raw.get(Constants.KEY_EVAL_COUNT): + completion_tokens = completion_raw.get( + Constants.KEY_EVAL_COUNT + ) + # For Anthropic models elif EventPayload.MESSAGES in payload: response_raw = payload.get(EventPayload.RESPONSE).raw if response_raw: token_counts: dict[ str, int ] = TokenCounter._get_prompt_completion_tokens(response_raw) - token_counter = TokenCounter( - input_tokens=token_counts[Constants.PROMPT_TOKENS], - output_tokens=token_counts[Constants.COMPLETION_TOKENS], - ) + prompt_tokens = token_counts[Constants.PROMPT_TOKENS] + completion_tokens = token_counts[Constants.COMPLETION_TOKENS] + token_counter = TokenCounter( + input_tokens=prompt_tokens, + output_tokens=completion_tokens, + ) return token_counter @staticmethod def _get_prompt_completion_tokens(response) -> dict[str, int]: + usage = None prompt_tokens = Constants.DEFAULT_TOKEN_COUNT completion_tokens = Constants.DEFAULT_TOKEN_COUNT - - if hasattr(response, Constants.KEY_USAGE): + # For OpenAI models,response is an obj of CompletionUsage + if ( + isinstance(response, ChatCompletion) + and hasattr(response, Constants.KEY_USAGE) + and isinstance(response.usage, CompletionUsage) + ): usage = response.usage + # For LLM models other than OpenAI, response is a dict + elif isinstance(response, dict) and Constants.KEY_USAGE in response: + usage = response.get(Constants.KEY_USAGE) + + if usage: if hasattr(usage, Constants.INPUT_TOKENS): prompt_tokens = usage.input_tokens elif hasattr(usage, Constants.PROMPT_TOKENS):