diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 89464d20..d1b25422 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.4.0-alpha.11" + ".": "0.4.0-alpha.12" } diff --git a/.stats.yml b/.stats.yml index c9e7f120..4be566fc 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 103 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-185ec058880381a5526ac91a488af1833f55656e36cd10b3795eb8fd4d75026f.yml -openapi_spec_hash: fa935c08e25d23cff624e5e150f8e6ca +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-73fc7f59a69be032d1b18e2fff3ed7509e175703332723b27aac50e2514ca854.yml +openapi_spec_hash: a22051c017a4822ef689585896659675 config_hash: 39578cfdeb4a10121f2cb3fa3e4d5e20 diff --git a/CHANGELOG.md b/CHANGELOG.md index d718553c..f6f6ed4e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 0.4.0-alpha.12 (2025-11-19) + +Full Changelog: [v0.4.0-alpha.11...v0.4.0-alpha.12](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.11...v0.4.0-alpha.12) + +### Bug Fixes + +* Pydantic validation error with list-type metadata in vector search ([#3797](https://github.com/llamastack/llama-stack-client-python/issues/3797)) ([6729d3f](https://github.com/llamastack/llama-stack-client-python/commit/6729d3f41aac315ca24ef844ca1f9fa5ce8eb5de)) + ## 0.4.0-alpha.11 (2025-11-18) Full Changelog: [v0.4.0-alpha.10...v0.4.0-alpha.11](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.10...v0.4.0-alpha.11) diff --git a/pyproject.toml b/pyproject.toml index 334311cb..f6d543da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "llama_stack_client" -version = "0.4.0-alpha.11" +version = "0.4.0-alpha.12" description = "The official Python library for the llama-stack-client API" dynamic = ["readme"] license = "MIT" diff --git a/src/llama_stack_client/types/vector_stores/vector_store_file.py b/src/llama_stack_client/types/vector_stores/vector_store_file.py index 4248e618..bbf5f398 100644 --- a/src/llama_stack_client/types/vector_stores/vector_store_file.py +++ b/src/llama_stack_client/types/vector_stores/vector_store_file.py @@ -63,7 +63,14 @@ class VectorStoreFile(BaseModel): vector_store_id: str - attributes: Optional[Dict[str, object]] = None + attributes: Optional[Dict[str, Union[str, float, bool]]] = None + """Set of 16 key-value pairs that can be attached to an object. + + This can be useful for storing additional information about the object in a + structured format, and querying for objects via API or the dashboard. Keys are + strings with a maximum length of 64 characters. Values are strings with a + maximum length of 512 characters, booleans, or numbers. + """ last_error: Optional[LastError] = None """Error information for failed vector store file processing."""