diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock
index 80be7b20..ad83b298 100644
--- a/.speakeasy/gen.lock
+++ b/.speakeasy/gen.lock
@@ -1,12 +1,12 @@
lockVersion: 2.0.0
id: 2d045ec7-2ebb-4f4d-ad25-40953b132161
management:
- docChecksum: e9c447db719018a5721988252c09c2dc
+ docChecksum: 12226a12b72a82af24e4b098c631ff42
docVersion: 1.0.0
speakeasyVersion: 1.517.3
generationVersion: 2.548.6
- releaseVersion: 1.8.0
- configChecksum: 1f7adfac0b677cdca4c073a11cbcef02
+ releaseVersion: 1.8.1
+ configChecksum: a47e6a59e54c30528cf829e1a6fcc310
repoURL: https://github.com/mistralai/client-python.git
installationURL: https://github.com/mistralai/client-python.git
published: true
@@ -185,6 +185,7 @@ generatedFiles:
- docs/models/documentlibrarytooltype.md
- docs/models/documenturlchunk.md
- docs/models/documenturlchunktype.md
+ - docs/models/embeddingdtype.md
- docs/models/embeddingrequest.md
- docs/models/embeddingrequestinputs.md
- docs/models/embeddingresponse.md
@@ -398,7 +399,6 @@ generatedFiles:
- docs/sdks/ocr/README.md
- poetry.toml
- py.typed
- - pylintrc
- scripts/prepare_readme.py
- scripts/publish.sh
- src/mistralai/__init__.py
@@ -500,6 +500,7 @@ generatedFiles:
- src/mistralai/models/deltamessage.py
- src/mistralai/models/documentlibrarytool.py
- src/mistralai/models/documenturlchunk.py
+ - src/mistralai/models/embeddingdtype.py
- src/mistralai/models/embeddingrequest.py
- src/mistralai/models/embeddingresponse.py
- src/mistralai/models/embeddingresponsedata.py
@@ -1004,6 +1005,7 @@ examples:
responses:
"422":
application/json: {}
+ "200": {}
agents_api_v1_conversations_append_stream:
speakeasy-default-agents-api-v1-conversations-append-stream:
parameters:
@@ -1014,6 +1016,7 @@ examples:
responses:
"422":
application/json: {}
+ "200": {}
agents_api_v1_conversations_restart_stream:
speakeasy-default-agents-api-v1-conversations-restart-stream:
parameters:
@@ -1024,6 +1027,7 @@ examples:
responses:
"422":
application/json: {}
+ "200": {}
agents_api_v1_agents_create:
speakeasy-default-agents-api-v1-agents-create:
requestBody:
diff --git a/.speakeasy/gen.yaml b/.speakeasy/gen.yaml
index 820ed567..a8255953 100644
--- a/.speakeasy/gen.yaml
+++ b/.speakeasy/gen.yaml
@@ -15,7 +15,7 @@ generation:
oAuth2ClientCredentialsEnabled: true
oAuth2PasswordEnabled: false
python:
- version: 1.8.0
+ version: 1.8.1
additionalDependencies:
dev:
pytest: ^8.2.2
diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock
index d0361942..d38d57a2 100644
--- a/.speakeasy/workflow.lock
+++ b/.speakeasy/workflow.lock
@@ -14,10 +14,11 @@ sources:
- latest
mistral-openapi:
sourceNamespace: mistral-openapi
- sourceRevisionDigest: sha256:f2590d9933e1e9208fa5b8e509b671e6a86907268bcd5dad41dc4179e20c5b69
- sourceBlobDigest: sha256:3026ed65da39c94e9787697305e7e059bec5cff09bceeddc6e68c289cfaeb592
+ sourceRevisionDigest: sha256:ab414b4936bea95f6713273bbcee90d66af9da18c6672d62ce885769c47edc34
+ sourceBlobDigest: sha256:109235b09f0f6d400d146591dff959fac189a873fc95f4b32867d47d65779d28
tags:
- latest
+ - speakeasy-sdk-regen-1748446717
targets:
mistralai-azure-sdk:
source: mistral-azure-source
@@ -36,10 +37,10 @@ targets:
mistralai-sdk:
source: mistral-openapi
sourceNamespace: mistral-openapi
- sourceRevisionDigest: sha256:f2590d9933e1e9208fa5b8e509b671e6a86907268bcd5dad41dc4179e20c5b69
- sourceBlobDigest: sha256:3026ed65da39c94e9787697305e7e059bec5cff09bceeddc6e68c289cfaeb592
+ sourceRevisionDigest: sha256:ab414b4936bea95f6713273bbcee90d66af9da18c6672d62ce885769c47edc34
+ sourceBlobDigest: sha256:109235b09f0f6d400d146591dff959fac189a873fc95f4b32867d47d65779d28
codeSamplesNamespace: mistral-openapi-code-samples
- codeSamplesRevisionDigest: sha256:bd4031e558c0426c02f2a4f3bb1642068047aa555e0f9cbbc70de74ff7ec04ec
+ codeSamplesRevisionDigest: sha256:03b507fe6fdcabb21ec711d436300a3888b22fbfc970722bb3433db31c06047a
workflow:
workflowVersion: 1.0.0
speakeasyVersion: 1.517.3
diff --git a/RELEASES.md b/RELEASES.md
index fc9229a9..14663d6c 100644
--- a/RELEASES.md
+++ b/RELEASES.md
@@ -218,4 +218,14 @@ Based on:
### Generated
- [python v1.8.0] .
### Releases
-- [PyPI v1.8.0] https://pypi.org/project/mistralai/1.8.0 - .
\ No newline at end of file
+- [PyPI v1.8.0] https://pypi.org/project/mistralai/1.8.0 - .
+
+## 2025-05-28 15:38:22
+### Changes
+Based on:
+- OpenAPI Doc
+- Speakeasy CLI 1.517.3 (2.548.6) https://github.com/speakeasy-api/speakeasy
+### Generated
+- [python v1.8.1] .
+### Releases
+- [PyPI v1.8.1] https://pypi.org/project/mistralai/1.8.1 - .
\ No newline at end of file
diff --git a/docs/models/chatcompletionresponse.md b/docs/models/chatcompletionresponse.md
index ad376158..a0465ffb 100644
--- a/docs/models/chatcompletionresponse.md
+++ b/docs/models/chatcompletionresponse.md
@@ -9,5 +9,5 @@
| `object` | *str* | :heavy_check_mark: | N/A | chat.completion |
| `model` | *str* | :heavy_check_mark: | N/A | mistral-small-latest |
| `usage` | [models.UsageInfo](../models/usageinfo.md) | :heavy_check_mark: | N/A | |
-| `created` | *Optional[int]* | :heavy_minus_sign: | N/A | 1702256327 |
-| `choices` | List[[models.ChatCompletionChoice](../models/chatcompletionchoice.md)] | :heavy_minus_sign: | N/A | |
\ No newline at end of file
+| `created` | *int* | :heavy_check_mark: | N/A | 1702256327 |
+| `choices` | List[[models.ChatCompletionChoice](../models/chatcompletionchoice.md)] | :heavy_check_mark: | N/A | |
\ No newline at end of file
diff --git a/docs/models/embeddingdtype.md b/docs/models/embeddingdtype.md
new file mode 100644
index 00000000..01656b0a
--- /dev/null
+++ b/docs/models/embeddingdtype.md
@@ -0,0 +1,12 @@
+# EmbeddingDtype
+
+
+## Values
+
+| Name | Value |
+| --------- | --------- |
+| `FLOAT` | float |
+| `INT8` | int8 |
+| `UINT8` | uint8 |
+| `BINARY` | binary |
+| `UBINARY` | ubinary |
\ No newline at end of file
diff --git a/docs/models/embeddingrequest.md b/docs/models/embeddingrequest.md
index 242bb3e3..3a778a6f 100644
--- a/docs/models/embeddingrequest.md
+++ b/docs/models/embeddingrequest.md
@@ -6,4 +6,6 @@
| Field | Type | Required | Description | Example |
| -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- | -------------------------------------------------------------------- |
| `model` | *str* | :heavy_check_mark: | ID of the model to use. | mistral-embed |
-| `inputs` | [models.EmbeddingRequestInputs](../models/embeddingrequestinputs.md) | :heavy_check_mark: | Text to embed. | [
"Embed this sentence.",
"As well as this one."
] |
\ No newline at end of file
+| `inputs` | [models.EmbeddingRequestInputs](../models/embeddingrequestinputs.md) | :heavy_check_mark: | Text to embed. | [
"Embed this sentence.",
"As well as this one."
] |
+| `output_dimension` | *OptionalNullable[int]* | :heavy_minus_sign: | The dimension of the output embeddings. | |
+| `output_dtype` | [Optional[models.EmbeddingDtype]](../models/embeddingdtype.md) | :heavy_minus_sign: | N/A | |
\ No newline at end of file
diff --git a/docs/models/fimcompletionresponse.md b/docs/models/fimcompletionresponse.md
index da786a1f..cd62d034 100644
--- a/docs/models/fimcompletionresponse.md
+++ b/docs/models/fimcompletionresponse.md
@@ -9,5 +9,5 @@
| `object` | *str* | :heavy_check_mark: | N/A | chat.completion |
| `model` | *str* | :heavy_check_mark: | N/A | codestral-latest |
| `usage` | [models.UsageInfo](../models/usageinfo.md) | :heavy_check_mark: | N/A | |
-| `created` | *Optional[int]* | :heavy_minus_sign: | N/A | 1702256327 |
-| `choices` | List[[models.ChatCompletionChoice](../models/chatcompletionchoice.md)] | :heavy_minus_sign: | N/A | |
\ No newline at end of file
+| `created` | *int* | :heavy_check_mark: | N/A | 1702256327 |
+| `choices` | List[[models.ChatCompletionChoice](../models/chatcompletionchoice.md)] | :heavy_check_mark: | N/A | |
\ No newline at end of file
diff --git a/docs/sdks/conversations/README.md b/docs/sdks/conversations/README.md
index b5c12b24..8b462c16 100644
--- a/docs/sdks/conversations/README.md
+++ b/docs/sdks/conversations/README.md
@@ -3,6 +3,8 @@
## Overview
+(beta) Converstations API
+
### Available Operations
* [start](#start) - Create a conversation and append entries to it.
diff --git a/docs/sdks/embeddings/README.md b/docs/sdks/embeddings/README.md
index d55b38fb..91e33138 100644
--- a/docs/sdks/embeddings/README.md
+++ b/docs/sdks/embeddings/README.md
@@ -40,6 +40,8 @@ with Mistral(
| ----------------------------------------------------------------------- | ----------------------------------------------------------------------- | ----------------------------------------------------------------------- | ----------------------------------------------------------------------- | ----------------------------------------------------------------------- |
| `model` | *str* | :heavy_check_mark: | ID of the model to use. | mistral-embed |
| `inputs` | [models.EmbeddingRequestInputs](../../models/embeddingrequestinputs.md) | :heavy_check_mark: | Text to embed. | [
"Embed this sentence.",
"As well as this one."
] |
+| `output_dimension` | *OptionalNullable[int]* | :heavy_minus_sign: | The dimension of the output embeddings. | |
+| `output_dtype` | [Optional[models.EmbeddingDtype]](../../models/embeddingdtype.md) | :heavy_minus_sign: | N/A | |
| `retries` | [Optional[utils.RetryConfig]](../../models/utils/retryconfig.md) | :heavy_minus_sign: | Configuration to override the default retry behavior of the client. | |
### Response
diff --git a/docs/sdks/mistralagents/README.md b/docs/sdks/mistralagents/README.md
index aeb2b917..496016c3 100644
--- a/docs/sdks/mistralagents/README.md
+++ b/docs/sdks/mistralagents/README.md
@@ -3,6 +3,8 @@
## Overview
+(beta) Agents API
+
### Available Operations
* [create](#create) - Create a agent that can be used within a conversation.
diff --git a/pyproject.toml b/pyproject.toml
index dc055d7b..961af49d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "mistralai"
-version = "1.8.0"
+version = "1.8.1"
description = "Python Client SDK for the Mistral AI API."
authors = [{ name = "Mistral" },]
readme = "README-PYPI.md"
diff --git a/src/mistralai/_version.py b/src/mistralai/_version.py
index de6b8db8..ddccfbfa 100644
--- a/src/mistralai/_version.py
+++ b/src/mistralai/_version.py
@@ -3,10 +3,10 @@
import importlib.metadata
__title__: str = "mistralai"
-__version__: str = "1.8.0"
+__version__: str = "1.8.1"
__openapi_doc_version__: str = "1.0.0"
__gen_version__: str = "2.548.6"
-__user_agent__: str = "speakeasy-sdk/python 1.8.0 2.548.6 1.0.0 mistralai"
+__user_agent__: str = "speakeasy-sdk/python 1.8.1 2.548.6 1.0.0 mistralai"
try:
if __package__ is not None:
diff --git a/src/mistralai/beta.py b/src/mistralai/beta.py
index 6858b0a8..a0d45f67 100644
--- a/src/mistralai/beta.py
+++ b/src/mistralai/beta.py
@@ -8,7 +8,9 @@
class Beta(BaseSDK):
conversations: Conversations
+ r"""(beta) Converstations API"""
agents: MistralAgents
+ r"""(beta) Agents API"""
def __init__(self, sdk_config: SDKConfiguration) -> None:
BaseSDK.__init__(self, sdk_config)
diff --git a/src/mistralai/conversations.py b/src/mistralai/conversations.py
index 6e4b37ee..320e3d09 100644
--- a/src/mistralai/conversations.py
+++ b/src/mistralai/conversations.py
@@ -34,8 +34,9 @@
# endregion imports
-
class Conversations(BaseSDK):
+ r"""(beta) Converstations API"""
+
# region sdk-class-body
# Custom run code allowing client side execution of code
@@ -146,7 +147,9 @@ async def run_stream_async(
completion_args=completion_args,
)
- async def run_generator() -> AsyncGenerator[Union[RunResultEvents, RunResult], None]:
+ async def run_generator() -> (
+ AsyncGenerator[Union[RunResultEvents, RunResult], None]
+ ):
current_entries = input_entries
while True:
received_event_tracker: defaultdict[
diff --git a/src/mistralai/embeddings.py b/src/mistralai/embeddings.py
index b81a5e37..fee30251 100644
--- a/src/mistralai/embeddings.py
+++ b/src/mistralai/embeddings.py
@@ -18,6 +18,8 @@ def create(
inputs: Union[
models.EmbeddingRequestInputs, models.EmbeddingRequestInputsTypedDict
],
+ output_dimension: OptionalNullable[int] = UNSET,
+ output_dtype: Optional[models.EmbeddingDtype] = None,
retries: OptionalNullable[utils.RetryConfig] = UNSET,
server_url: Optional[str] = None,
timeout_ms: Optional[int] = None,
@@ -29,6 +31,8 @@ def create(
:param model: ID of the model to use.
:param inputs: Text to embed.
+ :param output_dimension: The dimension of the output embeddings.
+ :param output_dtype:
:param retries: Override the default retry configuration for this method
:param server_url: Override the default server URL for this method
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
@@ -47,6 +51,8 @@ def create(
request = models.EmbeddingRequest(
model=model,
inputs=inputs,
+ output_dimension=output_dimension,
+ output_dtype=output_dtype,
)
req = self._build_request(
@@ -125,6 +131,8 @@ async def create_async(
inputs: Union[
models.EmbeddingRequestInputs, models.EmbeddingRequestInputsTypedDict
],
+ output_dimension: OptionalNullable[int] = UNSET,
+ output_dtype: Optional[models.EmbeddingDtype] = None,
retries: OptionalNullable[utils.RetryConfig] = UNSET,
server_url: Optional[str] = None,
timeout_ms: Optional[int] = None,
@@ -136,6 +144,8 @@ async def create_async(
:param model: ID of the model to use.
:param inputs: Text to embed.
+ :param output_dimension: The dimension of the output embeddings.
+ :param output_dtype:
:param retries: Override the default retry configuration for this method
:param server_url: Override the default server URL for this method
:param timeout_ms: Override the default request timeout configuration for this method in milliseconds
@@ -154,6 +164,8 @@ async def create_async(
request = models.EmbeddingRequest(
model=model,
inputs=inputs,
+ output_dimension=output_dimension,
+ output_dtype=output_dtype,
)
req = self._build_request_async(
diff --git a/src/mistralai/mistral_agents.py b/src/mistralai/mistral_agents.py
index 5fdd8f32..a22ce41d 100644
--- a/src/mistralai/mistral_agents.py
+++ b/src/mistralai/mistral_agents.py
@@ -9,6 +9,8 @@
class MistralAgents(BaseSDK):
+ r"""(beta) Agents API"""
+
def create(
self,
*,
diff --git a/src/mistralai/models/__init__.py b/src/mistralai/models/__init__.py
index cf121986..9ed85c07 100644
--- a/src/mistralai/models/__init__.py
+++ b/src/mistralai/models/__init__.py
@@ -347,6 +347,7 @@
DocumentURLChunkType,
DocumentURLChunkTypedDict,
)
+from .embeddingdtype import EmbeddingDtype
from .embeddingrequest import (
EmbeddingRequest,
EmbeddingRequestInputs,
@@ -964,6 +965,7 @@
"DocumentURLChunk",
"DocumentURLChunkType",
"DocumentURLChunkTypedDict",
+ "EmbeddingDtype",
"EmbeddingRequest",
"EmbeddingRequestInputs",
"EmbeddingRequestInputsTypedDict",
diff --git a/src/mistralai/models/chatcompletionresponse.py b/src/mistralai/models/chatcompletionresponse.py
index 67f19651..3d03b126 100644
--- a/src/mistralai/models/chatcompletionresponse.py
+++ b/src/mistralai/models/chatcompletionresponse.py
@@ -4,8 +4,8 @@
from .chatcompletionchoice import ChatCompletionChoice, ChatCompletionChoiceTypedDict
from .usageinfo import UsageInfo, UsageInfoTypedDict
from mistralai.types import BaseModel
-from typing import List, Optional
-from typing_extensions import NotRequired, TypedDict
+from typing import List
+from typing_extensions import TypedDict
class ChatCompletionResponseTypedDict(TypedDict):
@@ -13,8 +13,8 @@ class ChatCompletionResponseTypedDict(TypedDict):
object: str
model: str
usage: UsageInfoTypedDict
- created: NotRequired[int]
- choices: NotRequired[List[ChatCompletionChoiceTypedDict]]
+ created: int
+ choices: List[ChatCompletionChoiceTypedDict]
class ChatCompletionResponse(BaseModel):
@@ -26,6 +26,6 @@ class ChatCompletionResponse(BaseModel):
usage: UsageInfo
- created: Optional[int] = None
+ created: int
- choices: Optional[List[ChatCompletionChoice]] = None
+ choices: List[ChatCompletionChoice]
diff --git a/src/mistralai/models/embeddingdtype.py b/src/mistralai/models/embeddingdtype.py
new file mode 100644
index 00000000..4f3c41bd
--- /dev/null
+++ b/src/mistralai/models/embeddingdtype.py
@@ -0,0 +1,7 @@
+"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
+
+from __future__ import annotations
+from typing import Literal
+
+
+EmbeddingDtype = Literal["float", "int8", "uint8", "binary", "ubinary"]
diff --git a/src/mistralai/models/embeddingrequest.py b/src/mistralai/models/embeddingrequest.py
index bf9ce3ff..56cccc72 100644
--- a/src/mistralai/models/embeddingrequest.py
+++ b/src/mistralai/models/embeddingrequest.py
@@ -1,10 +1,12 @@
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
from __future__ import annotations
-from mistralai.types import BaseModel
+from .embeddingdtype import EmbeddingDtype
+from mistralai.types import BaseModel, Nullable, OptionalNullable, UNSET, UNSET_SENTINEL
import pydantic
-from typing import List, Union
-from typing_extensions import Annotated, TypeAliasType, TypedDict
+from pydantic import model_serializer
+from typing import List, Optional, Union
+from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
EmbeddingRequestInputsTypedDict = TypeAliasType(
@@ -22,6 +24,9 @@ class EmbeddingRequestTypedDict(TypedDict):
r"""ID of the model to use."""
inputs: EmbeddingRequestInputsTypedDict
r"""Text to embed."""
+ output_dimension: NotRequired[Nullable[int]]
+ r"""The dimension of the output embeddings."""
+ output_dtype: NotRequired[EmbeddingDtype]
class EmbeddingRequest(BaseModel):
@@ -30,3 +35,38 @@ class EmbeddingRequest(BaseModel):
inputs: Annotated[EmbeddingRequestInputs, pydantic.Field(alias="input")]
r"""Text to embed."""
+
+ output_dimension: OptionalNullable[int] = UNSET
+ r"""The dimension of the output embeddings."""
+
+ output_dtype: Optional[EmbeddingDtype] = None
+
+ @model_serializer(mode="wrap")
+ def serialize_model(self, handler):
+ optional_fields = ["output_dimension", "output_dtype"]
+ nullable_fields = ["output_dimension"]
+ null_default_fields = []
+
+ serialized = handler(self)
+
+ m = {}
+
+ for n, f in self.model_fields.items():
+ k = f.alias or n
+ val = serialized.get(k)
+ serialized.pop(k, None)
+
+ optional_nullable = k in optional_fields and k in nullable_fields
+ is_set = (
+ self.__pydantic_fields_set__.intersection({n})
+ or k in null_default_fields
+ ) # pylint: disable=no-member
+
+ if val is not None and val != UNSET_SENTINEL:
+ m[k] = val
+ elif val != UNSET_SENTINEL and (
+ not k in optional_fields or (optional_nullable and is_set)
+ ):
+ m[k] = val
+
+ return m
diff --git a/src/mistralai/models/fimcompletionresponse.py b/src/mistralai/models/fimcompletionresponse.py
index 9fe05820..f27972b9 100644
--- a/src/mistralai/models/fimcompletionresponse.py
+++ b/src/mistralai/models/fimcompletionresponse.py
@@ -4,8 +4,8 @@
from .chatcompletionchoice import ChatCompletionChoice, ChatCompletionChoiceTypedDict
from .usageinfo import UsageInfo, UsageInfoTypedDict
from mistralai.types import BaseModel
-from typing import List, Optional
-from typing_extensions import NotRequired, TypedDict
+from typing import List
+from typing_extensions import TypedDict
class FIMCompletionResponseTypedDict(TypedDict):
@@ -13,8 +13,8 @@ class FIMCompletionResponseTypedDict(TypedDict):
object: str
model: str
usage: UsageInfoTypedDict
- created: NotRequired[int]
- choices: NotRequired[List[ChatCompletionChoiceTypedDict]]
+ created: int
+ choices: List[ChatCompletionChoiceTypedDict]
class FIMCompletionResponse(BaseModel):
@@ -26,6 +26,6 @@ class FIMCompletionResponse(BaseModel):
usage: UsageInfo
- created: Optional[int] = None
+ created: int
- choices: Optional[List[ChatCompletionChoice]] = None
+ choices: List[ChatCompletionChoice]