Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 48 additions & 22 deletions MIGRATION.md
Original file line number Diff line number Diff line change
Expand Up @@ -99,23 +99,28 @@ print(chat_response.choices[0].message.content)

```python
import os

from mistralai import Mistral, UserMessage

api_key = os.environ["MISTRAL_API_KEY"]
model = "mistral-large-latest"

client = Mistral(api_key=api_key)

messages = [
messages = [
{
"role": "user",
"content": "What is the best French cheese?",
},
]
]
# Or using the new message classes
# messages = [
# UserMessage(content="What is the best French cheese?"),
# ]

chat_response = client.chat.complete(
model = model,
messages = messages,
model=model,
messages=messages,
)

print(chat_response.choices[0].message.content)
Expand Down Expand Up @@ -146,6 +151,8 @@ for chunk in stream_response:
```
**New:**
```python
import os

from mistralai import Mistral, UserMessage

api_key = os.environ["MISTRAL_API_KEY"]
Expand All @@ -159,14 +166,19 @@ messages = [
"content": "What is the best French cheese?",
},
]
# Or using the new message classes
# messages = [
# UserMessage(content="What is the best French cheese?"),
# ]

stream_response = client.chat.stream(
model = model,
messages = messages,
model=model,
messages=messages,
)

for chunk in stream_response:
print(chunk.data.choices[0].delta.content)

```

### Example 3: Async
Expand Down Expand Up @@ -194,23 +206,37 @@ async for chunk in async_response:

**New:**
```python
from mistralai import Mistral, UserMessage

api_key = os.environ["MISTRAL_API_KEY"]
model = "mistral-large-latest"

client = Mistral(api_key=api_key)
import asyncio
import os

messages = [
{
"role": "user",
"content": "What is the best French cheese?",
},
]
from mistralai import Mistral, UserMessage

# With async
async_response = await client.chat.stream_async(model=model, messages=messages)

async for chunk in async_response:
print(chunk.data.choices[0].delta.content)
async def main():
client = Mistral(
api_key=os.getenv("MISTRAL_API_KEY", ""),
)

messages = [
{
"role": "user",
"content": "What is the best French cheese?",
},
]
# Or using the new message classes
# messages = [
# UserMessage(
# content="What is the best French cheese?",
# ),
# ]
async_response = await client.chat.completstream_asynce_async(
messages=messages,
model="mistral-large-latest",
)

async for chunk in async_response:
print(chunk.data.choices[0].delta.content)


asyncio.run(main())
```