diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index ca1d94e3..d357339a 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.2.17"
+ ".": "0.2.18-alpha.1"
}
diff --git a/.stats.yml b/.stats.yml
index bd77bef1..60347bbc 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 106
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-f59f1c7d33001d60b5190f68aa49eacec90f05dbe694620b8916152c3922051d.yml
-openapi_spec_hash: 804edd2e834493906dc430145402be3b
-config_hash: de16e52db65de71ac35adcdb665a74f5
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-c371abef4463f174f8d35ef3da4697fae5eb221db615f9c305319196472f313b.yml
+openapi_spec_hash: d9bb62faf229c2c2875c732715e9cfd1
+config_hash: e67fd054e95c1e82f78f4b834e96bb65
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 888d369a..8b96c651 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,23 @@
# Changelog
+## 0.2.18-alpha.1 (2025-08-12)
+
+Full Changelog: [v0.2.17...v0.2.18-alpha.1](https://github.com/llamastack/llama-stack-client-python/compare/v0.2.17...v0.2.18-alpha.1)
+
+### Features
+
+* **api:** update via SDK Studio ([8afae6c](https://github.com/llamastack/llama-stack-client-python/commit/8afae6c1e1a4614cc59db7ae511440693e0479a6))
+* **api:** update via SDK Studio ([143a973](https://github.com/llamastack/llama-stack-client-python/commit/143a973ea9ff81da1d93c421af8c85dbd171ef3c))
+* **api:** update via SDK Studio ([b8e32bb](https://github.com/llamastack/llama-stack-client-python/commit/b8e32bbbf68f8a75c956079119c6b65d7ac165e5))
+* **api:** update via SDK Studio ([1a2c77d](https://github.com/llamastack/llama-stack-client-python/commit/1a2c77df732eb9d0c031e0ff7558176fbf754ad8))
+* **api:** update via SDK Studio ([d66fb5f](https://github.com/llamastack/llama-stack-client-python/commit/d66fb5fe89acb66a55066d82b849bbf4d402db99))
+
+
+### Chores
+
+* **internal:** update comment in script ([8d599cd](https://github.com/llamastack/llama-stack-client-python/commit/8d599cd47f98f704f89c9bd979a55cc334895107))
+* update @stainless-api/prism-cli to v5.15.0 ([5f8ae94](https://github.com/llamastack/llama-stack-client-python/commit/5f8ae94955bb3403c0abe89f2999c2d49af97b07))
+
## 0.2.17 (2025-08-06)
Full Changelog: [v0.2.15...v0.2.17](https://github.com/llamastack/llama-stack-client-python/compare/v0.2.15...v0.2.17)
diff --git a/api.md b/api.md
index e01851c1..4056f4a6 100644
--- a/api.md
+++ b/api.md
@@ -89,7 +89,7 @@ Methods:
- client.responses.create(\*\*params) -> ResponseObject
- client.responses.retrieve(response_id) -> ResponseObject
-- client.responses.list(\*\*params) -> ResponseListResponse
+- client.responses.list(\*\*params) -> SyncOpenAICursorPage[ResponseListResponse]
## InputItems
@@ -290,7 +290,7 @@ Methods:
- client.chat.completions.create(\*\*params) -> CompletionCreateResponse
- client.chat.completions.retrieve(completion_id) -> CompletionRetrieveResponse
-- client.chat.completions.list(\*\*params) -> CompletionListResponse
+- client.chat.completions.list(\*\*params) -> SyncOpenAICursorPage[CompletionListResponse]
# Completions
@@ -355,7 +355,7 @@ Methods:
- client.vector_stores.create(\*\*params) -> VectorStore
- client.vector_stores.retrieve(vector_store_id) -> VectorStore
- client.vector_stores.update(vector_store_id, \*\*params) -> VectorStore
-- client.vector_stores.list(\*\*params) -> ListVectorStoresResponse
+- client.vector_stores.list(\*\*params) -> SyncOpenAICursorPage[VectorStore]
- client.vector_stores.delete(vector_store_id) -> VectorStoreDeleteResponse
- client.vector_stores.search(vector_store_id, \*\*params) -> VectorStoreSearchResponse
@@ -366,7 +366,6 @@ Types:
```python
from llama_stack_client.types.vector_stores import (
VectorStoreFile,
- FileListResponse,
FileDeleteResponse,
FileContentResponse,
)
@@ -377,7 +376,7 @@ Methods:
- client.vector_stores.files.create(vector_store_id, \*\*params) -> VectorStoreFile
- client.vector_stores.files.retrieve(file_id, \*, vector_store_id) -> VectorStoreFile
- client.vector_stores.files.update(file_id, \*, vector_store_id, \*\*params) -> VectorStoreFile
-- client.vector_stores.files.list(vector_store_id, \*\*params) -> FileListResponse
+- client.vector_stores.files.list(vector_store_id, \*\*params) -> SyncOpenAICursorPage[VectorStoreFile]
- client.vector_stores.files.delete(file_id, \*, vector_store_id) -> FileDeleteResponse
- client.vector_stores.files.content(file_id, \*, vector_store_id) -> FileContentResponse
@@ -589,6 +588,6 @@ Methods:
- client.files.create(\*\*params) -> File
- client.files.retrieve(file_id) -> File
-- client.files.list(\*\*params) -> ListFilesResponse
+- client.files.list(\*\*params) -> SyncOpenAICursorPage[File]
- client.files.delete(file_id) -> DeleteFileResponse
- client.files.content(file_id) -> object
diff --git a/pyproject.toml b/pyproject.toml
index c784560b..5d47b8e1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "llama_stack_client"
-version = "0.2.17"
+version = "0.2.18-alpha.1"
description = "The official Python library for the llama-stack-client API"
dynamic = ["readme"]
license = "MIT"
diff --git a/scripts/mock b/scripts/mock
index d2814ae6..0b28f6ea 100755
--- a/scripts/mock
+++ b/scripts/mock
@@ -21,7 +21,7 @@ echo "==> Starting mock server with URL ${URL}"
# Run prism mock on the given spec
if [ "$1" == "--daemon" ]; then
- npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log &
+ npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log &
# Wait for server to come online
echo -n "Waiting for server"
@@ -37,5 +37,5 @@ if [ "$1" == "--daemon" ]; then
echo
else
- npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL"
+ npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL"
fi
diff --git a/scripts/test b/scripts/test
new file mode 100755
index 00000000..dbeda2d2
--- /dev/null
+++ b/scripts/test
@@ -0,0 +1,61 @@
+#!/usr/bin/env bash
+
+set -e
+
+cd "$(dirname "$0")/.."
+
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[0;33m'
+NC='\033[0m' # No Color
+
+function prism_is_running() {
+ curl --silent "http://localhost:4010" >/dev/null 2>&1
+}
+
+kill_server_on_port() {
+ pids=$(lsof -t -i tcp:"$1" || echo "")
+ if [ "$pids" != "" ]; then
+ kill "$pids"
+ echo "Stopped $pids."
+ fi
+}
+
+function is_overriding_api_base_url() {
+ [ -n "$TEST_API_BASE_URL" ]
+}
+
+if ! is_overriding_api_base_url && ! prism_is_running ; then
+ # When we exit this script, make sure to kill the background mock server process
+ trap 'kill_server_on_port 4010' EXIT
+
+ # Start the dev server
+ ./scripts/mock --daemon
+fi
+
+if is_overriding_api_base_url ; then
+ echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}"
+ echo
+elif ! prism_is_running ; then
+ echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server"
+ echo -e "running against your OpenAPI spec."
+ echo
+ echo -e "To run the server, pass in the path or url of your OpenAPI"
+ echo -e "spec to the prism command:"
+ echo
+ echo -e " \$ ${YELLOW}npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock path/to/your.openapi.yml${NC}"
+ echo
+
+ exit 1
+else
+ echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}"
+ echo
+fi
+
+export DEFER_PYDANTIC_BUILD=false
+
+echo "==> Running tests"
+rye run pytest "$@"
+
+echo "==> Running Pydantic v1 tests"
+rye run nox -s test-pydantic-v1 -- "$@"
diff --git a/src/llama_stack_client/pagination.py b/src/llama_stack_client/pagination.py
index c2f7fe80..9122ff46 100644
--- a/src/llama_stack_client/pagination.py
+++ b/src/llama_stack_client/pagination.py
@@ -5,7 +5,7 @@
from ._base_client import BasePage, PageInfo, BaseSyncPage, BaseAsyncPage
-__all__ = ["SyncDatasetsIterrows", "AsyncDatasetsIterrows"]
+__all__ = ["SyncDatasetsIterrows", "AsyncDatasetsIterrows", "SyncOpenAICursorPage", "AsyncOpenAICursorPage"]
_T = TypeVar("_T")
@@ -48,3 +48,61 @@ def next_page_info(self) -> Optional[PageInfo]:
return None
return PageInfo(params={"start_index": next_index})
+
+
+class SyncOpenAICursorPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]):
+ data: List[_T]
+ has_more: Optional[bool] = None
+ last_id: Optional[str] = None
+
+ @override
+ def _get_page_items(self) -> List[_T]:
+ data = self.data
+ if not data:
+ return []
+ return data
+
+ @override
+ def has_next_page(self) -> bool:
+ has_more = self.has_more
+ if has_more is not None and has_more is False:
+ return False
+
+ return super().has_next_page()
+
+ @override
+ def next_page_info(self) -> Optional[PageInfo]:
+ last_id = self.last_id
+ if not last_id:
+ return None
+
+ return PageInfo(params={"after": last_id})
+
+
+class AsyncOpenAICursorPage(BaseAsyncPage[_T], BasePage[_T], Generic[_T]):
+ data: List[_T]
+ has_more: Optional[bool] = None
+ last_id: Optional[str] = None
+
+ @override
+ def _get_page_items(self) -> List[_T]:
+ data = self.data
+ if not data:
+ return []
+ return data
+
+ @override
+ def has_next_page(self) -> bool:
+ has_more = self.has_more
+ if has_more is not None and has_more is False:
+ return False
+
+ return super().has_next_page()
+
+ @override
+ def next_page_info(self) -> Optional[PageInfo]:
+ last_id = self.last_id
+ if not last_id:
+ return None
+
+ return PageInfo(params={"after": last_id})
diff --git a/src/llama_stack_client/resources/chat/completions.py b/src/llama_stack_client/resources/chat/completions.py
index 1355f97a..ccf2cba9 100644
--- a/src/llama_stack_client/resources/chat/completions.py
+++ b/src/llama_stack_client/resources/chat/completions.py
@@ -18,8 +18,9 @@
async_to_streamed_response_wrapper,
)
from ..._streaming import Stream, AsyncStream
+from ...pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
from ...types.chat import completion_list_params, completion_create_params
-from ..._base_client import make_request_options
+from ..._base_client import AsyncPaginator, make_request_options
from ...types.chat_completion_chunk import ChatCompletionChunk
from ...types.chat.completion_list_response import CompletionListResponse
from ...types.chat.completion_create_response import CompletionCreateResponse
@@ -466,7 +467,7 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> CompletionListResponse:
+ ) -> SyncOpenAICursorPage[CompletionListResponse]:
"""
List all chat completions.
@@ -487,8 +488,9 @@ def list(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return self._get(
+ return self._get_api_list(
"/v1/openai/v1/chat/completions",
+ page=SyncOpenAICursorPage[CompletionListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -504,7 +506,7 @@ def list(
completion_list_params.CompletionListParams,
),
),
- cast_to=CompletionListResponse,
+ model=CompletionListResponse,
)
@@ -933,7 +935,7 @@ async def retrieve(
cast_to=CompletionRetrieveResponse,
)
- async def list(
+ def list(
self,
*,
after: str | NotGiven = NOT_GIVEN,
@@ -946,7 +948,7 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> CompletionListResponse:
+ ) -> AsyncPaginator[CompletionListResponse, AsyncOpenAICursorPage[CompletionListResponse]]:
"""
List all chat completions.
@@ -967,14 +969,15 @@ async def list(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return await self._get(
+ return self._get_api_list(
"/v1/openai/v1/chat/completions",
+ page=AsyncOpenAICursorPage[CompletionListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=await async_maybe_transform(
+ query=maybe_transform(
{
"after": after,
"limit": limit,
@@ -984,7 +987,7 @@ async def list(
completion_list_params.CompletionListParams,
),
),
- cast_to=CompletionListResponse,
+ model=CompletionListResponse,
)
diff --git a/src/llama_stack_client/resources/files.py b/src/llama_stack_client/resources/files.py
index 3eac6486..96c5c871 100644
--- a/src/llama_stack_client/resources/files.py
+++ b/src/llama_stack_client/resources/files.py
@@ -18,9 +18,9 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
+from ..pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
from ..types.file import File
-from .._base_client import make_request_options
-from ..types.list_files_response import ListFilesResponse
+from .._base_client import AsyncPaginator, make_request_options
from ..types.delete_file_response import DeleteFileResponse
__all__ = ["FilesResource", "AsyncFilesResource"]
@@ -144,7 +144,7 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ListFilesResponse:
+ ) -> SyncOpenAICursorPage[File]:
"""
Returns a list of files that belong to the user's organization.
@@ -170,8 +170,9 @@ def list(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return self._get(
+ return self._get_api_list(
"/v1/openai/v1/files",
+ page=SyncOpenAICursorPage[File],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -187,7 +188,7 @@ def list(
file_list_params.FileListParams,
),
),
- cast_to=ListFilesResponse,
+ model=File,
)
def delete(
@@ -362,7 +363,7 @@ async def retrieve(
cast_to=File,
)
- async def list(
+ def list(
self,
*,
after: str | NotGiven = NOT_GIVEN,
@@ -375,7 +376,7 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ListFilesResponse:
+ ) -> AsyncPaginator[File, AsyncOpenAICursorPage[File]]:
"""
Returns a list of files that belong to the user's organization.
@@ -401,14 +402,15 @@ async def list(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return await self._get(
+ return self._get_api_list(
"/v1/openai/v1/files",
+ page=AsyncOpenAICursorPage[File],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=await async_maybe_transform(
+ query=maybe_transform(
{
"after": after,
"limit": limit,
@@ -418,7 +420,7 @@ async def list(
file_list_params.FileListParams,
),
),
- cast_to=ListFilesResponse,
+ model=File,
)
async def delete(
diff --git a/src/llama_stack_client/resources/responses/responses.py b/src/llama_stack_client/resources/responses/responses.py
index f958f556..375834ee 100644
--- a/src/llama_stack_client/resources/responses/responses.py
+++ b/src/llama_stack_client/resources/responses/responses.py
@@ -27,7 +27,8 @@
AsyncInputItemsResourceWithStreamingResponse,
)
from ..._streaming import Stream, AsyncStream
-from ..._base_client import make_request_options
+from ...pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
+from ..._base_client import AsyncPaginator, make_request_options
from ...types.response_object import ResponseObject
from ...types.response_list_response import ResponseListResponse
from ...types.response_object_stream import ResponseObjectStream
@@ -288,7 +289,7 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ResponseListResponse:
+ ) -> SyncOpenAICursorPage[ResponseListResponse]:
"""
List all OpenAI responses.
@@ -309,8 +310,9 @@ def list(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return self._get(
+ return self._get_api_list(
"/v1/openai/v1/responses",
+ page=SyncOpenAICursorPage[ResponseListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -326,7 +328,7 @@ def list(
response_list_params.ResponseListParams,
),
),
- cast_to=ResponseListResponse,
+ model=ResponseListResponse,
)
@@ -570,7 +572,7 @@ async def retrieve(
cast_to=ResponseObject,
)
- async def list(
+ def list(
self,
*,
after: str | NotGiven = NOT_GIVEN,
@@ -583,7 +585,7 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ResponseListResponse:
+ ) -> AsyncPaginator[ResponseListResponse, AsyncOpenAICursorPage[ResponseListResponse]]:
"""
List all OpenAI responses.
@@ -604,14 +606,15 @@ async def list(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return await self._get(
+ return self._get_api_list(
"/v1/openai/v1/responses",
+ page=AsyncOpenAICursorPage[ResponseListResponse],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=await async_maybe_transform(
+ query=maybe_transform(
{
"after": after,
"limit": limit,
@@ -621,7 +624,7 @@ async def list(
response_list_params.ResponseListParams,
),
),
- cast_to=ResponseListResponse,
+ model=ResponseListResponse,
)
diff --git a/src/llama_stack_client/resources/vector_stores/files.py b/src/llama_stack_client/resources/vector_stores/files.py
index 34757751..8589ebc6 100644
--- a/src/llama_stack_client/resources/vector_stores/files.py
+++ b/src/llama_stack_client/resources/vector_stores/files.py
@@ -17,10 +17,10 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
+from ...pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
+from ..._base_client import AsyncPaginator, make_request_options
from ...types.vector_stores import file_list_params, file_create_params, file_update_params
from ...types.vector_stores.vector_store_file import VectorStoreFile
-from ...types.vector_stores.file_list_response import FileListResponse
from ...types.vector_stores.file_delete_response import FileDeleteResponse
from ...types.vector_stores.file_content_response import FileContentResponse
@@ -188,7 +188,7 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> FileListResponse:
+ ) -> SyncOpenAICursorPage[VectorStoreFile]:
"""
List files in a vector store.
@@ -217,8 +217,9 @@ def list(
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
- return self._get(
+ return self._get_api_list(
f"/v1/openai/v1/vector_stores/{vector_store_id}/files",
+ page=SyncOpenAICursorPage[VectorStoreFile],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -235,7 +236,7 @@ def list(
file_list_params.FileListParams,
),
),
- cast_to=FileListResponse,
+ model=VectorStoreFile,
)
def delete(
@@ -457,7 +458,7 @@ async def update(
cast_to=VectorStoreFile,
)
- async def list(
+ def list(
self,
vector_store_id: str,
*,
@@ -472,7 +473,7 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> FileListResponse:
+ ) -> AsyncPaginator[VectorStoreFile, AsyncOpenAICursorPage[VectorStoreFile]]:
"""
List files in a vector store.
@@ -501,14 +502,15 @@ async def list(
"""
if not vector_store_id:
raise ValueError(f"Expected a non-empty value for `vector_store_id` but received {vector_store_id!r}")
- return await self._get(
+ return self._get_api_list(
f"/v1/openai/v1/vector_stores/{vector_store_id}/files",
+ page=AsyncOpenAICursorPage[VectorStoreFile],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=await async_maybe_transform(
+ query=maybe_transform(
{
"after": after,
"before": before,
@@ -519,7 +521,7 @@ async def list(
file_list_params.FileListParams,
),
),
- cast_to=FileListResponse,
+ model=VectorStoreFile,
)
async def delete(
diff --git a/src/llama_stack_client/resources/vector_stores/vector_stores.py b/src/llama_stack_client/resources/vector_stores/vector_stores.py
index 4b62604d..bdc38e19 100644
--- a/src/llama_stack_client/resources/vector_stores/vector_stores.py
+++ b/src/llama_stack_client/resources/vector_stores/vector_stores.py
@@ -30,9 +30,9 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ..._base_client import make_request_options
+from ...pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
+from ..._base_client import AsyncPaginator, make_request_options
from ...types.vector_store import VectorStore
-from ...types.list_vector_stores_response import ListVectorStoresResponse
from ...types.vector_store_delete_response import VectorStoreDeleteResponse
from ...types.vector_store_search_response import VectorStoreSearchResponse
@@ -228,7 +228,7 @@ def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ListVectorStoresResponse:
+ ) -> SyncOpenAICursorPage[VectorStore]:
"""Returns a list of vector stores.
Args:
@@ -254,8 +254,9 @@ def list(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return self._get(
+ return self._get_api_list(
"/v1/openai/v1/vector_stores",
+ page=SyncOpenAICursorPage[VectorStore],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -271,7 +272,7 @@ def list(
vector_store_list_params.VectorStoreListParams,
),
),
- cast_to=ListVectorStoresResponse,
+ model=VectorStore,
)
def delete(
@@ -548,7 +549,7 @@ async def update(
cast_to=VectorStore,
)
- async def list(
+ def list(
self,
*,
after: str | NotGiven = NOT_GIVEN,
@@ -561,7 +562,7 @@ async def list(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> ListVectorStoresResponse:
+ ) -> AsyncPaginator[VectorStore, AsyncOpenAICursorPage[VectorStore]]:
"""Returns a list of vector stores.
Args:
@@ -587,14 +588,15 @@ async def list(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return await self._get(
+ return self._get_api_list(
"/v1/openai/v1/vector_stores",
+ page=AsyncOpenAICursorPage[VectorStore],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=await async_maybe_transform(
+ query=maybe_transform(
{
"after": after,
"before": before,
@@ -604,7 +606,7 @@ async def list(
vector_store_list_params.VectorStoreListParams,
),
),
- cast_to=ListVectorStoresResponse,
+ model=VectorStore,
)
async def delete(
diff --git a/src/llama_stack_client/types/chat/completion_list_response.py b/src/llama_stack_client/types/chat/completion_list_response.py
index 5f7731cc..e448e35c 100644
--- a/src/llama_stack_client/types/chat/completion_list_response.py
+++ b/src/llama_stack_client/types/chat/completion_list_response.py
@@ -8,53 +8,52 @@
__all__ = [
"CompletionListResponse",
- "Data",
- "DataChoice",
- "DataChoiceMessage",
- "DataChoiceMessageOpenAIUserMessageParam",
- "DataChoiceMessageOpenAIUserMessageParamContentUnionMember1",
- "DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam",
- "DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam",
- "DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL",
- "DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile",
- "DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile",
- "DataChoiceMessageOpenAISystemMessageParam",
- "DataChoiceMessageOpenAISystemMessageParamContentUnionMember1",
- "DataChoiceMessageOpenAIAssistantMessageParam",
- "DataChoiceMessageOpenAIAssistantMessageParamContentUnionMember1",
- "DataChoiceMessageOpenAIAssistantMessageParamToolCall",
- "DataChoiceMessageOpenAIAssistantMessageParamToolCallFunction",
- "DataChoiceMessageOpenAIToolMessageParam",
- "DataChoiceMessageOpenAIToolMessageParamContentUnionMember1",
- "DataChoiceMessageOpenAIDeveloperMessageParam",
- "DataChoiceMessageOpenAIDeveloperMessageParamContentUnionMember1",
- "DataChoiceLogprobs",
- "DataChoiceLogprobsContent",
- "DataChoiceLogprobsContentTopLogprob",
- "DataChoiceLogprobsRefusal",
- "DataChoiceLogprobsRefusalTopLogprob",
- "DataInputMessage",
- "DataInputMessageOpenAIUserMessageParam",
- "DataInputMessageOpenAIUserMessageParamContentUnionMember1",
- "DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam",
- "DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam",
- "DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL",
- "DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile",
- "DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile",
- "DataInputMessageOpenAISystemMessageParam",
- "DataInputMessageOpenAISystemMessageParamContentUnionMember1",
- "DataInputMessageOpenAIAssistantMessageParam",
- "DataInputMessageOpenAIAssistantMessageParamContentUnionMember1",
- "DataInputMessageOpenAIAssistantMessageParamToolCall",
- "DataInputMessageOpenAIAssistantMessageParamToolCallFunction",
- "DataInputMessageOpenAIToolMessageParam",
- "DataInputMessageOpenAIToolMessageParamContentUnionMember1",
- "DataInputMessageOpenAIDeveloperMessageParam",
- "DataInputMessageOpenAIDeveloperMessageParamContentUnionMember1",
+ "Choice",
+ "ChoiceMessage",
+ "ChoiceMessageOpenAIUserMessageParam",
+ "ChoiceMessageOpenAIUserMessageParamContentUnionMember1",
+ "ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam",
+ "ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam",
+ "ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL",
+ "ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile",
+ "ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile",
+ "ChoiceMessageOpenAISystemMessageParam",
+ "ChoiceMessageOpenAISystemMessageParamContentUnionMember1",
+ "ChoiceMessageOpenAIAssistantMessageParam",
+ "ChoiceMessageOpenAIAssistantMessageParamContentUnionMember1",
+ "ChoiceMessageOpenAIAssistantMessageParamToolCall",
+ "ChoiceMessageOpenAIAssistantMessageParamToolCallFunction",
+ "ChoiceMessageOpenAIToolMessageParam",
+ "ChoiceMessageOpenAIToolMessageParamContentUnionMember1",
+ "ChoiceMessageOpenAIDeveloperMessageParam",
+ "ChoiceMessageOpenAIDeveloperMessageParamContentUnionMember1",
+ "ChoiceLogprobs",
+ "ChoiceLogprobsContent",
+ "ChoiceLogprobsContentTopLogprob",
+ "ChoiceLogprobsRefusal",
+ "ChoiceLogprobsRefusalTopLogprob",
+ "InputMessage",
+ "InputMessageOpenAIUserMessageParam",
+ "InputMessageOpenAIUserMessageParamContentUnionMember1",
+ "InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam",
+ "InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam",
+ "InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL",
+ "InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile",
+ "InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile",
+ "InputMessageOpenAISystemMessageParam",
+ "InputMessageOpenAISystemMessageParamContentUnionMember1",
+ "InputMessageOpenAIAssistantMessageParam",
+ "InputMessageOpenAIAssistantMessageParamContentUnionMember1",
+ "InputMessageOpenAIAssistantMessageParamToolCall",
+ "InputMessageOpenAIAssistantMessageParamToolCallFunction",
+ "InputMessageOpenAIToolMessageParam",
+ "InputMessageOpenAIToolMessageParamContentUnionMember1",
+ "InputMessageOpenAIDeveloperMessageParam",
+ "InputMessageOpenAIDeveloperMessageParamContentUnionMember1",
]
-class DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam(BaseModel):
+class ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam(BaseModel):
text: str
"""The text content of the message"""
@@ -62,7 +61,7 @@ class DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatComple
"""Must be "text" to identify this as text content"""
-class DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL(
+class ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL(
BaseModel
):
url: str
@@ -75,17 +74,15 @@ class DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatComple
"""
-class DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam(BaseModel):
- image_url: (
- DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL
- )
+class ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam(BaseModel):
+ image_url: ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL
"""Image URL specification and processing details"""
type: Literal["image_url"]
"""Must be "image_url" to identify this as image content"""
-class DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile(BaseModel):
+class ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile(BaseModel):
file_data: Optional[str] = None
file_id: Optional[str] = None
@@ -93,24 +90,24 @@ class DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile(B
filename: Optional[str] = None
-class DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile(BaseModel):
- file: DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile
+class ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile(BaseModel):
+ file: ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile
type: Literal["file"]
-DataChoiceMessageOpenAIUserMessageParamContentUnionMember1: TypeAlias = Annotated[
+ChoiceMessageOpenAIUserMessageParamContentUnionMember1: TypeAlias = Annotated[
Union[
- DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam,
- DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam,
- DataChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile,
+ ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam,
+ ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam,
+ ChoiceMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile,
],
PropertyInfo(discriminator="type"),
]
-class DataChoiceMessageOpenAIUserMessageParam(BaseModel):
- content: Union[str, List[DataChoiceMessageOpenAIUserMessageParamContentUnionMember1]]
+class ChoiceMessageOpenAIUserMessageParam(BaseModel):
+ content: Union[str, List[ChoiceMessageOpenAIUserMessageParamContentUnionMember1]]
"""The content of the message, which can include text and other media"""
role: Literal["user"]
@@ -120,7 +117,7 @@ class DataChoiceMessageOpenAIUserMessageParam(BaseModel):
"""(Optional) The name of the user message participant."""
-class DataChoiceMessageOpenAISystemMessageParamContentUnionMember1(BaseModel):
+class ChoiceMessageOpenAISystemMessageParamContentUnionMember1(BaseModel):
text: str
"""The text content of the message"""
@@ -128,8 +125,8 @@ class DataChoiceMessageOpenAISystemMessageParamContentUnionMember1(BaseModel):
"""Must be "text" to identify this as text content"""
-class DataChoiceMessageOpenAISystemMessageParam(BaseModel):
- content: Union[str, List[DataChoiceMessageOpenAISystemMessageParamContentUnionMember1]]
+class ChoiceMessageOpenAISystemMessageParam(BaseModel):
+ content: Union[str, List[ChoiceMessageOpenAISystemMessageParamContentUnionMember1]]
"""The content of the "system prompt".
If multiple system messages are provided, they are concatenated. The underlying
@@ -144,7 +141,7 @@ class DataChoiceMessageOpenAISystemMessageParam(BaseModel):
"""(Optional) The name of the system message participant."""
-class DataChoiceMessageOpenAIAssistantMessageParamContentUnionMember1(BaseModel):
+class ChoiceMessageOpenAIAssistantMessageParamContentUnionMember1(BaseModel):
text: str
"""The text content of the message"""
@@ -152,7 +149,7 @@ class DataChoiceMessageOpenAIAssistantMessageParamContentUnionMember1(BaseModel)
"""Must be "text" to identify this as text content"""
-class DataChoiceMessageOpenAIAssistantMessageParamToolCallFunction(BaseModel):
+class ChoiceMessageOpenAIAssistantMessageParamToolCallFunction(BaseModel):
arguments: Optional[str] = None
"""(Optional) Arguments to pass to the function as a JSON string"""
@@ -160,35 +157,35 @@ class DataChoiceMessageOpenAIAssistantMessageParamToolCallFunction(BaseModel):
"""(Optional) Name of the function to call"""
-class DataChoiceMessageOpenAIAssistantMessageParamToolCall(BaseModel):
+class ChoiceMessageOpenAIAssistantMessageParamToolCall(BaseModel):
type: Literal["function"]
"""Must be "function" to identify this as a function call"""
id: Optional[str] = None
"""(Optional) Unique identifier for the tool call"""
- function: Optional[DataChoiceMessageOpenAIAssistantMessageParamToolCallFunction] = None
+ function: Optional[ChoiceMessageOpenAIAssistantMessageParamToolCallFunction] = None
"""(Optional) Function call details"""
index: Optional[int] = None
"""(Optional) Index of the tool call in the list"""
-class DataChoiceMessageOpenAIAssistantMessageParam(BaseModel):
+class ChoiceMessageOpenAIAssistantMessageParam(BaseModel):
role: Literal["assistant"]
"""Must be "assistant" to identify this as the model's response"""
- content: Union[str, List[DataChoiceMessageOpenAIAssistantMessageParamContentUnionMember1], None] = None
+ content: Union[str, List[ChoiceMessageOpenAIAssistantMessageParamContentUnionMember1], None] = None
"""The content of the model's response"""
name: Optional[str] = None
"""(Optional) The name of the assistant message participant."""
- tool_calls: Optional[List[DataChoiceMessageOpenAIAssistantMessageParamToolCall]] = None
+ tool_calls: Optional[List[ChoiceMessageOpenAIAssistantMessageParamToolCall]] = None
"""List of tool calls. Each tool call is an OpenAIChatCompletionToolCall object."""
-class DataChoiceMessageOpenAIToolMessageParamContentUnionMember1(BaseModel):
+class ChoiceMessageOpenAIToolMessageParamContentUnionMember1(BaseModel):
text: str
"""The text content of the message"""
@@ -196,8 +193,8 @@ class DataChoiceMessageOpenAIToolMessageParamContentUnionMember1(BaseModel):
"""Must be "text" to identify this as text content"""
-class DataChoiceMessageOpenAIToolMessageParam(BaseModel):
- content: Union[str, List[DataChoiceMessageOpenAIToolMessageParamContentUnionMember1]]
+class ChoiceMessageOpenAIToolMessageParam(BaseModel):
+ content: Union[str, List[ChoiceMessageOpenAIToolMessageParamContentUnionMember1]]
"""The response content from the tool"""
role: Literal["tool"]
@@ -207,7 +204,7 @@ class DataChoiceMessageOpenAIToolMessageParam(BaseModel):
"""Unique identifier for the tool call this response is for"""
-class DataChoiceMessageOpenAIDeveloperMessageParamContentUnionMember1(BaseModel):
+class ChoiceMessageOpenAIDeveloperMessageParamContentUnionMember1(BaseModel):
text: str
"""The text content of the message"""
@@ -215,8 +212,8 @@ class DataChoiceMessageOpenAIDeveloperMessageParamContentUnionMember1(BaseModel)
"""Must be "text" to identify this as text content"""
-class DataChoiceMessageOpenAIDeveloperMessageParam(BaseModel):
- content: Union[str, List[DataChoiceMessageOpenAIDeveloperMessageParamContentUnionMember1]]
+class ChoiceMessageOpenAIDeveloperMessageParam(BaseModel):
+ content: Union[str, List[ChoiceMessageOpenAIDeveloperMessageParamContentUnionMember1]]
"""The content of the developer message"""
role: Literal["developer"]
@@ -226,19 +223,19 @@ class DataChoiceMessageOpenAIDeveloperMessageParam(BaseModel):
"""(Optional) The name of the developer message participant."""
-DataChoiceMessage: TypeAlias = Annotated[
+ChoiceMessage: TypeAlias = Annotated[
Union[
- DataChoiceMessageOpenAIUserMessageParam,
- DataChoiceMessageOpenAISystemMessageParam,
- DataChoiceMessageOpenAIAssistantMessageParam,
- DataChoiceMessageOpenAIToolMessageParam,
- DataChoiceMessageOpenAIDeveloperMessageParam,
+ ChoiceMessageOpenAIUserMessageParam,
+ ChoiceMessageOpenAISystemMessageParam,
+ ChoiceMessageOpenAIAssistantMessageParam,
+ ChoiceMessageOpenAIToolMessageParam,
+ ChoiceMessageOpenAIDeveloperMessageParam,
],
PropertyInfo(discriminator="role"),
]
-class DataChoiceLogprobsContentTopLogprob(BaseModel):
+class ChoiceLogprobsContentTopLogprob(BaseModel):
token: str
logprob: float
@@ -246,17 +243,17 @@ class DataChoiceLogprobsContentTopLogprob(BaseModel):
bytes: Optional[List[int]] = None
-class DataChoiceLogprobsContent(BaseModel):
+class ChoiceLogprobsContent(BaseModel):
token: str
logprob: float
- top_logprobs: List[DataChoiceLogprobsContentTopLogprob]
+ top_logprobs: List[ChoiceLogprobsContentTopLogprob]
bytes: Optional[List[int]] = None
-class DataChoiceLogprobsRefusalTopLogprob(BaseModel):
+class ChoiceLogprobsRefusalTopLogprob(BaseModel):
token: str
logprob: float
@@ -264,39 +261,39 @@ class DataChoiceLogprobsRefusalTopLogprob(BaseModel):
bytes: Optional[List[int]] = None
-class DataChoiceLogprobsRefusal(BaseModel):
+class ChoiceLogprobsRefusal(BaseModel):
token: str
logprob: float
- top_logprobs: List[DataChoiceLogprobsRefusalTopLogprob]
+ top_logprobs: List[ChoiceLogprobsRefusalTopLogprob]
bytes: Optional[List[int]] = None
-class DataChoiceLogprobs(BaseModel):
- content: Optional[List[DataChoiceLogprobsContent]] = None
+class ChoiceLogprobs(BaseModel):
+ content: Optional[List[ChoiceLogprobsContent]] = None
"""(Optional) The log probabilities for the tokens in the message"""
- refusal: Optional[List[DataChoiceLogprobsRefusal]] = None
+ refusal: Optional[List[ChoiceLogprobsRefusal]] = None
"""(Optional) The log probabilities for the tokens in the message"""
-class DataChoice(BaseModel):
+class Choice(BaseModel):
finish_reason: str
"""The reason the model stopped generating"""
index: int
"""The index of the choice"""
- message: DataChoiceMessage
+ message: ChoiceMessage
"""The message from the model"""
- logprobs: Optional[DataChoiceLogprobs] = None
+ logprobs: Optional[ChoiceLogprobs] = None
"""(Optional) The log probabilities for the tokens in the message"""
-class DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam(BaseModel):
+class InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam(BaseModel):
text: str
"""The text content of the message"""
@@ -304,9 +301,7 @@ class DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatComplet
"""Must be "text" to identify this as text content"""
-class DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL(
- BaseModel
-):
+class InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL(BaseModel):
url: str
"""URL of the image to include in the message"""
@@ -317,17 +312,15 @@ class DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatComplet
"""
-class DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam(BaseModel):
- image_url: (
- DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL
- )
+class InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam(BaseModel):
+ image_url: InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParamImageURL
"""Image URL specification and processing details"""
type: Literal["image_url"]
"""Must be "image_url" to identify this as image content"""
-class DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile(BaseModel):
+class InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile(BaseModel):
file_data: Optional[str] = None
file_id: Optional[str] = None
@@ -335,24 +328,24 @@ class DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile(Ba
filename: Optional[str] = None
-class DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile(BaseModel):
- file: DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile
+class InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile(BaseModel):
+ file: InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFileFile
type: Literal["file"]
-DataInputMessageOpenAIUserMessageParamContentUnionMember1: TypeAlias = Annotated[
+InputMessageOpenAIUserMessageParamContentUnionMember1: TypeAlias = Annotated[
Union[
- DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam,
- DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam,
- DataInputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile,
+ InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartTextParam,
+ InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIChatCompletionContentPartImageParam,
+ InputMessageOpenAIUserMessageParamContentUnionMember1OpenAIFile,
],
PropertyInfo(discriminator="type"),
]
-class DataInputMessageOpenAIUserMessageParam(BaseModel):
- content: Union[str, List[DataInputMessageOpenAIUserMessageParamContentUnionMember1]]
+class InputMessageOpenAIUserMessageParam(BaseModel):
+ content: Union[str, List[InputMessageOpenAIUserMessageParamContentUnionMember1]]
"""The content of the message, which can include text and other media"""
role: Literal["user"]
@@ -362,7 +355,7 @@ class DataInputMessageOpenAIUserMessageParam(BaseModel):
"""(Optional) The name of the user message participant."""
-class DataInputMessageOpenAISystemMessageParamContentUnionMember1(BaseModel):
+class InputMessageOpenAISystemMessageParamContentUnionMember1(BaseModel):
text: str
"""The text content of the message"""
@@ -370,8 +363,8 @@ class DataInputMessageOpenAISystemMessageParamContentUnionMember1(BaseModel):
"""Must be "text" to identify this as text content"""
-class DataInputMessageOpenAISystemMessageParam(BaseModel):
- content: Union[str, List[DataInputMessageOpenAISystemMessageParamContentUnionMember1]]
+class InputMessageOpenAISystemMessageParam(BaseModel):
+ content: Union[str, List[InputMessageOpenAISystemMessageParamContentUnionMember1]]
"""The content of the "system prompt".
If multiple system messages are provided, they are concatenated. The underlying
@@ -386,7 +379,7 @@ class DataInputMessageOpenAISystemMessageParam(BaseModel):
"""(Optional) The name of the system message participant."""
-class DataInputMessageOpenAIAssistantMessageParamContentUnionMember1(BaseModel):
+class InputMessageOpenAIAssistantMessageParamContentUnionMember1(BaseModel):
text: str
"""The text content of the message"""
@@ -394,7 +387,7 @@ class DataInputMessageOpenAIAssistantMessageParamContentUnionMember1(BaseModel):
"""Must be "text" to identify this as text content"""
-class DataInputMessageOpenAIAssistantMessageParamToolCallFunction(BaseModel):
+class InputMessageOpenAIAssistantMessageParamToolCallFunction(BaseModel):
arguments: Optional[str] = None
"""(Optional) Arguments to pass to the function as a JSON string"""
@@ -402,35 +395,35 @@ class DataInputMessageOpenAIAssistantMessageParamToolCallFunction(BaseModel):
"""(Optional) Name of the function to call"""
-class DataInputMessageOpenAIAssistantMessageParamToolCall(BaseModel):
+class InputMessageOpenAIAssistantMessageParamToolCall(BaseModel):
type: Literal["function"]
"""Must be "function" to identify this as a function call"""
id: Optional[str] = None
"""(Optional) Unique identifier for the tool call"""
- function: Optional[DataInputMessageOpenAIAssistantMessageParamToolCallFunction] = None
+ function: Optional[InputMessageOpenAIAssistantMessageParamToolCallFunction] = None
"""(Optional) Function call details"""
index: Optional[int] = None
"""(Optional) Index of the tool call in the list"""
-class DataInputMessageOpenAIAssistantMessageParam(BaseModel):
+class InputMessageOpenAIAssistantMessageParam(BaseModel):
role: Literal["assistant"]
"""Must be "assistant" to identify this as the model's response"""
- content: Union[str, List[DataInputMessageOpenAIAssistantMessageParamContentUnionMember1], None] = None
+ content: Union[str, List[InputMessageOpenAIAssistantMessageParamContentUnionMember1], None] = None
"""The content of the model's response"""
name: Optional[str] = None
"""(Optional) The name of the assistant message participant."""
- tool_calls: Optional[List[DataInputMessageOpenAIAssistantMessageParamToolCall]] = None
+ tool_calls: Optional[List[InputMessageOpenAIAssistantMessageParamToolCall]] = None
"""List of tool calls. Each tool call is an OpenAIChatCompletionToolCall object."""
-class DataInputMessageOpenAIToolMessageParamContentUnionMember1(BaseModel):
+class InputMessageOpenAIToolMessageParamContentUnionMember1(BaseModel):
text: str
"""The text content of the message"""
@@ -438,8 +431,8 @@ class DataInputMessageOpenAIToolMessageParamContentUnionMember1(BaseModel):
"""Must be "text" to identify this as text content"""
-class DataInputMessageOpenAIToolMessageParam(BaseModel):
- content: Union[str, List[DataInputMessageOpenAIToolMessageParamContentUnionMember1]]
+class InputMessageOpenAIToolMessageParam(BaseModel):
+ content: Union[str, List[InputMessageOpenAIToolMessageParamContentUnionMember1]]
"""The response content from the tool"""
role: Literal["tool"]
@@ -449,7 +442,7 @@ class DataInputMessageOpenAIToolMessageParam(BaseModel):
"""Unique identifier for the tool call this response is for"""
-class DataInputMessageOpenAIDeveloperMessageParamContentUnionMember1(BaseModel):
+class InputMessageOpenAIDeveloperMessageParamContentUnionMember1(BaseModel):
text: str
"""The text content of the message"""
@@ -457,8 +450,8 @@ class DataInputMessageOpenAIDeveloperMessageParamContentUnionMember1(BaseModel):
"""Must be "text" to identify this as text content"""
-class DataInputMessageOpenAIDeveloperMessageParam(BaseModel):
- content: Union[str, List[DataInputMessageOpenAIDeveloperMessageParamContentUnionMember1]]
+class InputMessageOpenAIDeveloperMessageParam(BaseModel):
+ content: Union[str, List[InputMessageOpenAIDeveloperMessageParamContentUnionMember1]]
"""The content of the developer message"""
role: Literal["developer"]
@@ -468,49 +461,32 @@ class DataInputMessageOpenAIDeveloperMessageParam(BaseModel):
"""(Optional) The name of the developer message participant."""
-DataInputMessage: TypeAlias = Annotated[
+InputMessage: TypeAlias = Annotated[
Union[
- DataInputMessageOpenAIUserMessageParam,
- DataInputMessageOpenAISystemMessageParam,
- DataInputMessageOpenAIAssistantMessageParam,
- DataInputMessageOpenAIToolMessageParam,
- DataInputMessageOpenAIDeveloperMessageParam,
+ InputMessageOpenAIUserMessageParam,
+ InputMessageOpenAISystemMessageParam,
+ InputMessageOpenAIAssistantMessageParam,
+ InputMessageOpenAIToolMessageParam,
+ InputMessageOpenAIDeveloperMessageParam,
],
PropertyInfo(discriminator="role"),
]
-class Data(BaseModel):
+class CompletionListResponse(BaseModel):
id: str
"""The ID of the chat completion"""
- choices: List[DataChoice]
+ choices: List[Choice]
"""List of choices"""
created: int
"""The Unix timestamp in seconds when the chat completion was created"""
- input_messages: List[DataInputMessage]
+ input_messages: List[InputMessage]
model: str
"""The model that was used to generate the chat completion"""
object: Literal["chat.completion"]
"""The object type, which will be "chat.completion" """
-
-
-class CompletionListResponse(BaseModel):
- data: List[Data]
- """List of chat completion objects with their input messages"""
-
- first_id: str
- """ID of the first completion in this list"""
-
- has_more: bool
- """Whether there are more completions available beyond this list"""
-
- last_id: str
- """ID of the last completion in this list"""
-
- object: Literal["list"]
- """Must be "list" to identify this as a list response"""
diff --git a/src/llama_stack_client/types/create_response.py b/src/llama_stack_client/types/create_response.py
index b0eaf3e5..fbb519f0 100644
--- a/src/llama_stack_client/types/create_response.py
+++ b/src/llama_stack_client/types/create_response.py
@@ -22,13 +22,7 @@ class Result(BaseModel):
"""
category_scores: Optional[Dict[str, float]] = None
- """A list of the categories along with their scores as predicted by model.
-
- Required set of categories that need to be in response - violence -
- violence/graphic - harassment - harassment/threatening - hate -
- hate/threatening - illicit - illicit/violent - sexual - sexual/minors -
- self-harm - self-harm/intent - self-harm/instructions
- """
+ """A list of the categories along with their scores as predicted by model."""
user_message: Optional[str] = None
diff --git a/src/llama_stack_client/types/response_list_response.py b/src/llama_stack_client/types/response_list_response.py
index 996fb00a..d7190a4a 100644
--- a/src/llama_stack_client/types/response_list_response.py
+++ b/src/llama_stack_client/types/response_list_response.py
@@ -10,46 +10,45 @@
__all__ = [
"ResponseListResponse",
- "Data",
- "DataInput",
- "DataInputOpenAIResponseOutputMessageWebSearchToolCall",
- "DataInputOpenAIResponseOutputMessageFileSearchToolCall",
- "DataInputOpenAIResponseOutputMessageFunctionToolCall",
- "DataInputOpenAIResponseInputFunctionToolCallOutput",
- "DataInputOpenAIResponseMessage",
- "DataInputOpenAIResponseMessageContentUnionMember1",
- "DataInputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
- "DataInputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
- "DataInputOpenAIResponseMessageContentUnionMember2",
- "DataInputOpenAIResponseMessageContentUnionMember2Annotation",
- "DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
- "DataOutput",
- "DataOutputOpenAIResponseMessage",
- "DataOutputOpenAIResponseMessageContentUnionMember1",
- "DataOutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
- "DataOutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
- "DataOutputOpenAIResponseMessageContentUnionMember2",
- "DataOutputOpenAIResponseMessageContentUnionMember2Annotation",
- "DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
- "DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
- "DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
- "DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
- "DataOutputOpenAIResponseOutputMessageWebSearchToolCall",
- "DataOutputOpenAIResponseOutputMessageFileSearchToolCall",
- "DataOutputOpenAIResponseOutputMessageFunctionToolCall",
- "DataOutputOpenAIResponseOutputMessageMcpCall",
- "DataOutputOpenAIResponseOutputMessageMcpListTools",
- "DataOutputOpenAIResponseOutputMessageMcpListToolsTool",
- "DataText",
- "DataTextFormat",
- "DataError",
+ "Input",
+ "InputOpenAIResponseOutputMessageWebSearchToolCall",
+ "InputOpenAIResponseOutputMessageFileSearchToolCall",
+ "InputOpenAIResponseOutputMessageFunctionToolCall",
+ "InputOpenAIResponseInputFunctionToolCallOutput",
+ "InputOpenAIResponseMessage",
+ "InputOpenAIResponseMessageContentUnionMember1",
+ "InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
+ "InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
+ "InputOpenAIResponseMessageContentUnionMember2",
+ "InputOpenAIResponseMessageContentUnionMember2Annotation",
+ "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
+ "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
+ "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "Output",
+ "OutputOpenAIResponseMessage",
+ "OutputOpenAIResponseMessageContentUnionMember1",
+ "OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText",
+ "OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage",
+ "OutputOpenAIResponseMessageContentUnionMember2",
+ "OutputOpenAIResponseMessageContentUnionMember2Annotation",
+ "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation",
+ "OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath",
+ "OutputOpenAIResponseOutputMessageWebSearchToolCall",
+ "OutputOpenAIResponseOutputMessageFileSearchToolCall",
+ "OutputOpenAIResponseOutputMessageFunctionToolCall",
+ "OutputOpenAIResponseOutputMessageMcpCall",
+ "OutputOpenAIResponseOutputMessageMcpListTools",
+ "OutputOpenAIResponseOutputMessageMcpListToolsTool",
+ "Text",
+ "TextFormat",
+ "Error",
]
-class DataInputOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
+class InputOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
id: str
"""Unique identifier for this tool call"""
@@ -60,7 +59,7 @@ class DataInputOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
"""Tool call type identifier, always "web_search_call" """
-class DataInputOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
+class InputOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
id: str
"""Unique identifier for this tool call"""
@@ -77,7 +76,7 @@ class DataInputOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
"""(Optional) Search results returned by the file search operation"""
-class DataInputOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
+class InputOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
arguments: str
"""JSON string containing the function arguments"""
@@ -97,7 +96,7 @@ class DataInputOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
"""(Optional) Current status of the function call execution"""
-class DataInputOpenAIResponseInputFunctionToolCallOutput(BaseModel):
+class InputOpenAIResponseInputFunctionToolCallOutput(BaseModel):
call_id: str
output: str
@@ -109,7 +108,7 @@ class DataInputOpenAIResponseInputFunctionToolCallOutput(BaseModel):
status: Optional[str] = None
-class DataInputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel):
text: str
"""The text content of the input message"""
@@ -117,7 +116,7 @@ class DataInputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessag
"""Content type identifier, always "input_text" """
-class DataInputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage(BaseModel):
detail: Literal["low", "high", "auto"]
"""Level of detail for image processing, can be "low", "high", or "auto" """
@@ -128,16 +127,16 @@ class DataInputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessag
"""(Optional) URL of the image content"""
-DataInputOpenAIResponseMessageContentUnionMember1: TypeAlias = Annotated[
+InputOpenAIResponseMessageContentUnionMember1: TypeAlias = Annotated[
Union[
- DataInputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText,
- DataInputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage,
+ InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText,
+ InputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage,
],
PropertyInfo(discriminator="type"),
]
-class DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
file_id: str
"""Unique identifier of the referenced file"""
@@ -151,7 +150,7 @@ class DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseA
"""Annotation type identifier, always "file_citation" """
-class DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
end_index: int
"""End position of the citation span in the content"""
@@ -168,9 +167,7 @@ class DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseA
"""URL of the referenced web resource"""
-class DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(
- BaseModel
-):
+class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
container_id: str
end_index: int
@@ -184,7 +181,7 @@ class DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseA
type: Literal["container_file_citation"]
-class DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
file_id: str
index: int
@@ -192,30 +189,28 @@ class DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseA
type: Literal["file_path"]
-DataInputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+InputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
Union[
- DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- DataInputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
+ InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
+ InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
+ InputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class DataInputOpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[DataInputOpenAIResponseMessageContentUnionMember2Annotation]
+class InputOpenAIResponseMessageContentUnionMember2(BaseModel):
+ annotations: List[InputOpenAIResponseMessageContentUnionMember2Annotation]
text: str
type: Literal["output_text"]
-class DataInputOpenAIResponseMessage(BaseModel):
+class InputOpenAIResponseMessage(BaseModel):
content: Union[
- str,
- List[DataInputOpenAIResponseMessageContentUnionMember1],
- List[DataInputOpenAIResponseMessageContentUnionMember2],
+ str, List[InputOpenAIResponseMessageContentUnionMember1], List[InputOpenAIResponseMessageContentUnionMember2]
]
role: Literal["system", "developer", "user", "assistant"]
@@ -227,16 +222,16 @@ class DataInputOpenAIResponseMessage(BaseModel):
status: Optional[str] = None
-DataInput: TypeAlias = Union[
- DataInputOpenAIResponseOutputMessageWebSearchToolCall,
- DataInputOpenAIResponseOutputMessageFileSearchToolCall,
- DataInputOpenAIResponseOutputMessageFunctionToolCall,
- DataInputOpenAIResponseInputFunctionToolCallOutput,
- DataInputOpenAIResponseMessage,
+Input: TypeAlias = Union[
+ InputOpenAIResponseOutputMessageWebSearchToolCall,
+ InputOpenAIResponseOutputMessageFileSearchToolCall,
+ InputOpenAIResponseOutputMessageFunctionToolCall,
+ InputOpenAIResponseInputFunctionToolCallOutput,
+ InputOpenAIResponseMessage,
]
-class DataOutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText(BaseModel):
text: str
"""The text content of the input message"""
@@ -244,7 +239,7 @@ class DataOutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessa
"""Content type identifier, always "input_text" """
-class DataOutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage(BaseModel):
detail: Literal["low", "high", "auto"]
"""Level of detail for image processing, can be "low", "high", or "auto" """
@@ -255,16 +250,16 @@ class DataOutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessa
"""(Optional) URL of the image content"""
-DataOutputOpenAIResponseMessageContentUnionMember1: TypeAlias = Annotated[
+OutputOpenAIResponseMessageContentUnionMember1: TypeAlias = Annotated[
Union[
- DataOutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText,
- DataOutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage,
+ OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentText,
+ OutputOpenAIResponseMessageContentUnionMember1OpenAIResponseInputMessageContentImage,
],
PropertyInfo(discriminator="type"),
]
-class DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation(BaseModel):
file_id: str
"""Unique identifier of the referenced file"""
@@ -278,7 +273,7 @@ class DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponse
"""Annotation type identifier, always "file_citation" """
-class DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation(BaseModel):
end_index: int
"""End position of the citation span in the content"""
@@ -295,9 +290,7 @@ class DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponse
"""URL of the referenced web resource"""
-class DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(
- BaseModel
-):
+class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation(BaseModel):
container_id: str
end_index: int
@@ -311,7 +304,7 @@ class DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponse
type: Literal["container_file_citation"]
-class DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
+class OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath(BaseModel):
file_id: str
index: int
@@ -319,30 +312,28 @@ class DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponse
type: Literal["file_path"]
-DataOutputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
+OutputOpenAIResponseMessageContentUnionMember2Annotation: TypeAlias = Annotated[
Union[
- DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
- DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
- DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
- DataOutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
+ OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFileCitation,
+ OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationCitation,
+ OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationContainerFileCitation,
+ OutputOpenAIResponseMessageContentUnionMember2AnnotationOpenAIResponseAnnotationFilePath,
],
PropertyInfo(discriminator="type"),
]
-class DataOutputOpenAIResponseMessageContentUnionMember2(BaseModel):
- annotations: List[DataOutputOpenAIResponseMessageContentUnionMember2Annotation]
+class OutputOpenAIResponseMessageContentUnionMember2(BaseModel):
+ annotations: List[OutputOpenAIResponseMessageContentUnionMember2Annotation]
text: str
type: Literal["output_text"]
-class DataOutputOpenAIResponseMessage(BaseModel):
+class OutputOpenAIResponseMessage(BaseModel):
content: Union[
- str,
- List[DataOutputOpenAIResponseMessageContentUnionMember1],
- List[DataOutputOpenAIResponseMessageContentUnionMember2],
+ str, List[OutputOpenAIResponseMessageContentUnionMember1], List[OutputOpenAIResponseMessageContentUnionMember2]
]
role: Literal["system", "developer", "user", "assistant"]
@@ -354,7 +345,7 @@ class DataOutputOpenAIResponseMessage(BaseModel):
status: Optional[str] = None
-class DataOutputOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
+class OutputOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
id: str
"""Unique identifier for this tool call"""
@@ -365,7 +356,7 @@ class DataOutputOpenAIResponseOutputMessageWebSearchToolCall(BaseModel):
"""Tool call type identifier, always "web_search_call" """
-class DataOutputOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
+class OutputOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
id: str
"""Unique identifier for this tool call"""
@@ -382,7 +373,7 @@ class DataOutputOpenAIResponseOutputMessageFileSearchToolCall(BaseModel):
"""(Optional) Search results returned by the file search operation"""
-class DataOutputOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
+class OutputOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
arguments: str
"""JSON string containing the function arguments"""
@@ -402,7 +393,7 @@ class DataOutputOpenAIResponseOutputMessageFunctionToolCall(BaseModel):
"""(Optional) Current status of the function call execution"""
-class DataOutputOpenAIResponseOutputMessageMcpCall(BaseModel):
+class OutputOpenAIResponseOutputMessageMcpCall(BaseModel):
id: str
"""Unique identifier for this MCP call"""
@@ -425,7 +416,7 @@ class DataOutputOpenAIResponseOutputMessageMcpCall(BaseModel):
"""(Optional) Output result from the successful MCP call"""
-class DataOutputOpenAIResponseOutputMessageMcpListToolsTool(BaseModel):
+class OutputOpenAIResponseOutputMessageMcpListToolsTool(BaseModel):
input_schema: Dict[str, Union[bool, float, str, List[object], object, None]]
"""JSON schema defining the tool's input parameters"""
@@ -436,34 +427,34 @@ class DataOutputOpenAIResponseOutputMessageMcpListToolsTool(BaseModel):
"""(Optional) Description of what the tool does"""
-class DataOutputOpenAIResponseOutputMessageMcpListTools(BaseModel):
+class OutputOpenAIResponseOutputMessageMcpListTools(BaseModel):
id: str
"""Unique identifier for this MCP list tools operation"""
server_label: str
"""Label identifying the MCP server providing the tools"""
- tools: List[DataOutputOpenAIResponseOutputMessageMcpListToolsTool]
+ tools: List[OutputOpenAIResponseOutputMessageMcpListToolsTool]
"""List of available tools provided by the MCP server"""
type: Literal["mcp_list_tools"]
"""Tool call type identifier, always "mcp_list_tools" """
-DataOutput: TypeAlias = Annotated[
+Output: TypeAlias = Annotated[
Union[
- DataOutputOpenAIResponseMessage,
- DataOutputOpenAIResponseOutputMessageWebSearchToolCall,
- DataOutputOpenAIResponseOutputMessageFileSearchToolCall,
- DataOutputOpenAIResponseOutputMessageFunctionToolCall,
- DataOutputOpenAIResponseOutputMessageMcpCall,
- DataOutputOpenAIResponseOutputMessageMcpListTools,
+ OutputOpenAIResponseMessage,
+ OutputOpenAIResponseOutputMessageWebSearchToolCall,
+ OutputOpenAIResponseOutputMessageFileSearchToolCall,
+ OutputOpenAIResponseOutputMessageFunctionToolCall,
+ OutputOpenAIResponseOutputMessageMcpCall,
+ OutputOpenAIResponseOutputMessageMcpListTools,
],
PropertyInfo(discriminator="type"),
]
-class DataTextFormat(BaseModel):
+class TextFormat(BaseModel):
type: Literal["text", "json_schema", "json_object"]
"""Must be "text", "json_schema", or "json_object" to identify the format type"""
@@ -488,12 +479,12 @@ class DataTextFormat(BaseModel):
"""
-class DataText(BaseModel):
- format: Optional[DataTextFormat] = None
+class Text(BaseModel):
+ format: Optional[TextFormat] = None
"""(Optional) Text format configuration specifying output format requirements"""
-class DataError(BaseModel):
+class Error(BaseModel):
code: str
"""Error code identifying the type of failure"""
@@ -501,14 +492,14 @@ class DataError(BaseModel):
"""Human-readable error message describing the failure"""
-class Data(BaseModel):
+class ResponseListResponse(BaseModel):
id: str
"""Unique identifier for this response"""
created_at: int
"""Unix timestamp when the response was created"""
- input: List[DataInput]
+ input: List[Input]
"""List of input items that led to this response"""
model: str
@@ -517,7 +508,7 @@ class Data(BaseModel):
object: Literal["response"]
"""Object type identifier, always "response" """
- output: List[DataOutput]
+ output: List[Output]
"""List of generated output items (messages, tool calls, etc.)"""
parallel_tool_calls: bool
@@ -526,10 +517,10 @@ class Data(BaseModel):
status: str
"""Current status of the response generation"""
- text: DataText
+ text: Text
"""Text formatting configuration for the response"""
- error: Optional[DataError] = None
+ error: Optional[Error] = None
"""(Optional) Error details if the response generation failed"""
previous_response_id: Optional[str] = None
@@ -546,20 +537,3 @@ class Data(BaseModel):
user: Optional[str] = None
"""(Optional) User identifier associated with the request"""
-
-
-class ResponseListResponse(BaseModel):
- data: List[Data]
- """List of response objects with their input context"""
-
- first_id: str
- """Identifier of the first item in this page"""
-
- has_more: bool
- """Whether there are more results available beyond this page"""
-
- last_id: str
- """Identifier of the last item in this page"""
-
- object: Literal["list"]
- """Object type identifier, always "list" """
diff --git a/src/llama_stack_client/types/vector_stores/__init__.py b/src/llama_stack_client/types/vector_stores/__init__.py
index 82fc5047..68bcf684 100644
--- a/src/llama_stack_client/types/vector_stores/__init__.py
+++ b/src/llama_stack_client/types/vector_stores/__init__.py
@@ -5,7 +5,6 @@
from .file_list_params import FileListParams as FileListParams
from .vector_store_file import VectorStoreFile as VectorStoreFile
from .file_create_params import FileCreateParams as FileCreateParams
-from .file_list_response import FileListResponse as FileListResponse
from .file_update_params import FileUpdateParams as FileUpdateParams
from .file_delete_response import FileDeleteResponse as FileDeleteResponse
from .file_content_response import FileContentResponse as FileContentResponse
diff --git a/src/llama_stack_client/types/vector_stores/file_list_response.py b/src/llama_stack_client/types/vector_stores/file_list_response.py
deleted file mode 100644
index 45ddc95f..00000000
--- a/src/llama_stack_client/types/vector_stores/file_list_response.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-from typing import List, Optional
-
-from ..._models import BaseModel
-from .vector_store_file import VectorStoreFile
-
-__all__ = ["FileListResponse"]
-
-
-class FileListResponse(BaseModel):
- data: List[VectorStoreFile]
- """List of vector store file objects"""
-
- has_more: bool
- """Whether there are more files available beyond this page"""
-
- object: str
- """Object type identifier, always "list" """
-
- first_id: Optional[str] = None
- """(Optional) ID of the first file in the list for pagination"""
-
- last_id: Optional[str] = None
- """(Optional) ID of the last file in the list for pagination"""
diff --git a/tests/api_resources/chat/test_completions.py b/tests/api_resources/chat/test_completions.py
index 496ea061..7a1e9d41 100644
--- a/tests/api_resources/chat/test_completions.py
+++ b/tests/api_resources/chat/test_completions.py
@@ -9,6 +9,7 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
+from llama_stack_client.pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
from llama_stack_client.types.chat import (
CompletionListResponse,
CompletionCreateResponse,
@@ -232,7 +233,7 @@ def test_path_params_retrieve(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_list(self, client: LlamaStackClient) -> None:
completion = client.chat.completions.list()
- assert_matches_type(CompletionListResponse, completion, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[CompletionListResponse], completion, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
@@ -242,7 +243,7 @@ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
model="model",
order="asc",
)
- assert_matches_type(CompletionListResponse, completion, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[CompletionListResponse], completion, path=["response"])
@parametrize
def test_raw_response_list(self, client: LlamaStackClient) -> None:
@@ -251,7 +252,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
completion = response.parse()
- assert_matches_type(CompletionListResponse, completion, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[CompletionListResponse], completion, path=["response"])
@parametrize
def test_streaming_response_list(self, client: LlamaStackClient) -> None:
@@ -260,7 +261,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
completion = response.parse()
- assert_matches_type(CompletionListResponse, completion, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[CompletionListResponse], completion, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -481,7 +482,7 @@ async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -
@parametrize
async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
completion = await async_client.chat.completions.list()
- assert_matches_type(CompletionListResponse, completion, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[CompletionListResponse], completion, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
@@ -491,7 +492,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackCl
model="model",
order="asc",
)
- assert_matches_type(CompletionListResponse, completion, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[CompletionListResponse], completion, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -500,7 +501,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
completion = await response.parse()
- assert_matches_type(CompletionListResponse, completion, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[CompletionListResponse], completion, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -509,6 +510,6 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
completion = await response.parse()
- assert_matches_type(CompletionListResponse, completion, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[CompletionListResponse], completion, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_files.py b/tests/api_resources/test_files.py
index 7fc5e107..d9b29ffc 100644
--- a/tests/api_resources/test_files.py
+++ b/tests/api_resources/test_files.py
@@ -9,7 +9,8 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
-from llama_stack_client.types import File, ListFilesResponse, DeleteFileResponse
+from llama_stack_client.types import File, DeleteFileResponse
+from llama_stack_client.pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -92,7 +93,7 @@ def test_path_params_retrieve(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_list(self, client: LlamaStackClient) -> None:
file = client.files.list()
- assert_matches_type(ListFilesResponse, file, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[File], file, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
@@ -102,7 +103,7 @@ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
order="asc",
purpose="assistants",
)
- assert_matches_type(ListFilesResponse, file, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[File], file, path=["response"])
@parametrize
def test_raw_response_list(self, client: LlamaStackClient) -> None:
@@ -111,7 +112,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
file = response.parse()
- assert_matches_type(ListFilesResponse, file, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[File], file, path=["response"])
@parametrize
def test_streaming_response_list(self, client: LlamaStackClient) -> None:
@@ -120,7 +121,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
file = response.parse()
- assert_matches_type(ListFilesResponse, file, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[File], file, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -281,7 +282,7 @@ async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -
@parametrize
async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
file = await async_client.files.list()
- assert_matches_type(ListFilesResponse, file, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[File], file, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
@@ -291,7 +292,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackCl
order="asc",
purpose="assistants",
)
- assert_matches_type(ListFilesResponse, file, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[File], file, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -300,7 +301,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
file = await response.parse()
- assert_matches_type(ListFilesResponse, file, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[File], file, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -309,7 +310,7 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
file = await response.parse()
- assert_matches_type(ListFilesResponse, file, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[File], file, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_responses.py b/tests/api_resources/test_responses.py
index a3fa9fd1..5604ddeb 100644
--- a/tests/api_resources/test_responses.py
+++ b/tests/api_resources/test_responses.py
@@ -10,6 +10,7 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
from llama_stack_client.types import ResponseObject, ResponseListResponse
+from llama_stack_client.pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -186,7 +187,7 @@ def test_path_params_retrieve(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_list(self, client: LlamaStackClient) -> None:
response = client.responses.list()
- assert_matches_type(ResponseListResponse, response, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
@@ -196,7 +197,7 @@ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
model="model",
order="asc",
)
- assert_matches_type(ResponseListResponse, response, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
@parametrize
def test_raw_response_list(self, client: LlamaStackClient) -> None:
@@ -205,7 +206,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None:
assert http_response.is_closed is True
assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
response = http_response.parse()
- assert_matches_type(ResponseListResponse, response, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
@parametrize
def test_streaming_response_list(self, client: LlamaStackClient) -> None:
@@ -214,7 +215,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
response = http_response.parse()
- assert_matches_type(ResponseListResponse, response, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
assert cast(Any, http_response.is_closed) is True
@@ -393,7 +394,7 @@ async def test_path_params_retrieve(self, async_client: AsyncLlamaStackClient) -
@parametrize
async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
response = await async_client.responses.list()
- assert_matches_type(ResponseListResponse, response, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
@@ -403,7 +404,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackCl
model="model",
order="asc",
)
- assert_matches_type(ResponseListResponse, response, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -412,7 +413,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N
assert http_response.is_closed is True
assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
response = await http_response.parse()
- assert_matches_type(ResponseListResponse, response, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -421,6 +422,6 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
assert http_response.http_request.headers.get("X-Stainless-Lang") == "python"
response = await http_response.parse()
- assert_matches_type(ResponseListResponse, response, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[ResponseListResponse], response, path=["response"])
assert cast(Any, http_response.is_closed) is True
diff --git a/tests/api_resources/test_vector_stores.py b/tests/api_resources/test_vector_stores.py
index 0547061a..3918624b 100644
--- a/tests/api_resources/test_vector_stores.py
+++ b/tests/api_resources/test_vector_stores.py
@@ -11,10 +11,10 @@
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
from llama_stack_client.types import (
VectorStore,
- ListVectorStoresResponse,
VectorStoreDeleteResponse,
VectorStoreSearchResponse,
)
+from llama_stack_client.pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -150,7 +150,7 @@ def test_path_params_update(self, client: LlamaStackClient) -> None:
@parametrize
def test_method_list(self, client: LlamaStackClient) -> None:
vector_store = client.vector_stores.list()
- assert_matches_type(ListVectorStoresResponse, vector_store, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[VectorStore], vector_store, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
@@ -160,7 +160,7 @@ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
limit=0,
order="order",
)
- assert_matches_type(ListVectorStoresResponse, vector_store, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[VectorStore], vector_store, path=["response"])
@parametrize
def test_raw_response_list(self, client: LlamaStackClient) -> None:
@@ -169,7 +169,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
vector_store = response.parse()
- assert_matches_type(ListVectorStoresResponse, vector_store, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[VectorStore], vector_store, path=["response"])
@parametrize
def test_streaming_response_list(self, client: LlamaStackClient) -> None:
@@ -178,7 +178,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
vector_store = response.parse()
- assert_matches_type(ListVectorStoresResponse, vector_store, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[VectorStore], vector_store, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -412,7 +412,7 @@ async def test_path_params_update(self, async_client: AsyncLlamaStackClient) ->
@parametrize
async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
vector_store = await async_client.vector_stores.list()
- assert_matches_type(ListVectorStoresResponse, vector_store, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[VectorStore], vector_store, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
@@ -422,7 +422,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackCl
limit=0,
order="order",
)
- assert_matches_type(ListVectorStoresResponse, vector_store, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[VectorStore], vector_store, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -431,7 +431,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
vector_store = await response.parse()
- assert_matches_type(ListVectorStoresResponse, vector_store, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[VectorStore], vector_store, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -440,7 +440,7 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
vector_store = await response.parse()
- assert_matches_type(ListVectorStoresResponse, vector_store, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[VectorStore], vector_store, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/vector_stores/test_files.py b/tests/api_resources/vector_stores/test_files.py
index 235f80e3..cf38bc2b 100644
--- a/tests/api_resources/vector_stores/test_files.py
+++ b/tests/api_resources/vector_stores/test_files.py
@@ -9,9 +9,9 @@
from tests.utils import assert_matches_type
from llama_stack_client import LlamaStackClient, AsyncLlamaStackClient
+from llama_stack_client.pagination import SyncOpenAICursorPage, AsyncOpenAICursorPage
from llama_stack_client.types.vector_stores import (
VectorStoreFile,
- FileListResponse,
FileDeleteResponse,
FileContentResponse,
)
@@ -180,7 +180,7 @@ def test_method_list(self, client: LlamaStackClient) -> None:
file = client.vector_stores.files.list(
vector_store_id="vector_store_id",
)
- assert_matches_type(FileListResponse, file, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[VectorStoreFile], file, path=["response"])
@parametrize
def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
@@ -192,7 +192,7 @@ def test_method_list_with_all_params(self, client: LlamaStackClient) -> None:
limit=0,
order="order",
)
- assert_matches_type(FileListResponse, file, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[VectorStoreFile], file, path=["response"])
@parametrize
def test_raw_response_list(self, client: LlamaStackClient) -> None:
@@ -203,7 +203,7 @@ def test_raw_response_list(self, client: LlamaStackClient) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
file = response.parse()
- assert_matches_type(FileListResponse, file, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[VectorStoreFile], file, path=["response"])
@parametrize
def test_streaming_response_list(self, client: LlamaStackClient) -> None:
@@ -214,7 +214,7 @@ def test_streaming_response_list(self, client: LlamaStackClient) -> None:
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
file = response.parse()
- assert_matches_type(FileListResponse, file, path=["response"])
+ assert_matches_type(SyncOpenAICursorPage[VectorStoreFile], file, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -485,7 +485,7 @@ async def test_method_list(self, async_client: AsyncLlamaStackClient) -> None:
file = await async_client.vector_stores.files.list(
vector_store_id="vector_store_id",
)
- assert_matches_type(FileListResponse, file, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[VectorStoreFile], file, path=["response"])
@parametrize
async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackClient) -> None:
@@ -497,7 +497,7 @@ async def test_method_list_with_all_params(self, async_client: AsyncLlamaStackCl
limit=0,
order="order",
)
- assert_matches_type(FileListResponse, file, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[VectorStoreFile], file, path=["response"])
@parametrize
async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -508,7 +508,7 @@ async def test_raw_response_list(self, async_client: AsyncLlamaStackClient) -> N
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
file = await response.parse()
- assert_matches_type(FileListResponse, file, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[VectorStoreFile], file, path=["response"])
@parametrize
async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient) -> None:
@@ -519,7 +519,7 @@ async def test_streaming_response_list(self, async_client: AsyncLlamaStackClient
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
file = await response.parse()
- assert_matches_type(FileListResponse, file, path=["response"])
+ assert_matches_type(AsyncOpenAICursorPage[VectorStoreFile], file, path=["response"])
assert cast(Any, response.is_closed) is True