Skip to content

Commit

Permalink
Align the chat request options for useChat across all frameworks (#859)
Browse files Browse the repository at this point in the history
Co-authored-by: Lars Grammel <lgrammel@Larss-MBP.fritz.box>
  • Loading branch information
lgrammel and Lars Grammel committed Dec 19, 2023
1 parent 2c1bbef commit ac503e0
Show file tree
Hide file tree
Showing 11 changed files with 221 additions and 28 deletions.
5 changes: 5 additions & 0 deletions .changeset/brown-tigers-destroy.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': patch
---

ai/solid: add chat request options to useChat
5 changes: 5 additions & 0 deletions .changeset/thirty-tips-smash.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': patch
---

ai/vue: add chat request options to useChat
6 changes: 3 additions & 3 deletions docs/pages/docs/api-reference/use-chat.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,9 @@ The `useChat` hook returns an object containing several helper methods and varia
],
[
'handleSubmit',
'(e: React.FormEvent<HTMLFormElement>) => void',
'Form submission handler that automatically resets the input field and appends a user message.',
'(e: React.FormEvent<HTMLFormElement>, chatRequestOptions?: ChatRequestOptions) => void',
'Form submission handler that automatically resets the input field and appends a user message. ' +
'You can use the `options` parameter to send additional data, headers and more to the server.',
],
[
'isLoading',
Expand Down Expand Up @@ -444,4 +445,3 @@ The `useChat` function returns an object containing several helper methods and v

</Tab>
</FrameworkTabs>
```
34 changes: 34 additions & 0 deletions examples/nuxt-openai/pages/vision/index.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
<script setup lang="ts">
import { useChat } from 'ai/vue';
const { messages, input, handleSubmit } = useChat({
api: '/api/chat-with-vision',
});
</script>

<template>
<div class="flex flex-col w-full max-w-md py-24 mx-auto stretch">
<div v-for="m in messages" key="m.id" class="whitespace-pre-wrap">
{{ m.role === 'user' ? 'User: ' : 'AI: ' }}
{{ m.content }}
</div>

<form
@submit="
e =>
handleSubmit(e, {
data: {
imageUrl:
'https://upload.wikimedia.org/wikipedia/commons/thumb/3/3c/Field_sparrow_in_CP_%2841484%29_%28cropped%29.jpg/733px-Field_sparrow_in_CP_%2841484%29_%28cropped%29.jpg',
},
})
"
>
<input
class="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl"
v-model="input"
placeholder="Say something..."
/>
</form>
</div>
</template>
46 changes: 46 additions & 0 deletions examples/nuxt-openai/server/api/chat-with-vision.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import { OpenAIStream, StreamingTextResponse } from 'ai';
import OpenAI from 'openai';

export default defineLazyEventHandler(async () => {
const apiKey = useRuntimeConfig().openaiApiKey;
if (!apiKey) throw new Error('Missing OpenAI API key');
const openai = new OpenAI({
apiKey: apiKey,
});

return defineEventHandler(async (event: any) => {
// Extract the `prompt` from the body of the request
const { messages, data } = await readBody(event);

const initialMessages = messages.slice(0, -1);
const currentMessage = messages[messages.length - 1];

// Ask OpenAI for a streaming chat completion given the prompt
const response = await openai.chat.completions.create({
model: 'gpt-4-vision-preview',
stream: true,
max_tokens: 150,
messages: [
...initialMessages,
{
...currentMessage,
content: [
{ type: 'text', text: currentMessage.content },

// forward the image information to OpenAI:
{
type: 'image_url',
image_url: data.imageUrl,
},
],
},
],
});

// Convert the response into a friendly text-stream
const stream = OpenAIStream(response);

// Respond with the stream
return new StreamingTextResponse(stream);
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import { OpenAIStream, StreamingTextResponse } from 'ai';
import OpenAI from 'openai';
import { APIEvent } from 'solid-start/api';

// Create an OpenAI API client
const openai = new OpenAI({
apiKey: process.env['OPENAI_API_KEY'] || '',
});

export const POST = async (event: APIEvent) => {
// 'data' contains the additional data that you have sent:
const { messages, data } = await event.request.json();

const initialMessages = messages.slice(0, -1);
const currentMessage = messages[messages.length - 1];

// Ask OpenAI for a streaming chat completion given the prompt
const response = await openai.chat.completions.create({
model: 'gpt-4-vision-preview',
stream: true,
max_tokens: 150,
messages: [
...initialMessages,
{
...currentMessage,
content: [
{ type: 'text', text: currentMessage.content },

// forward the image information to OpenAI:
{
type: 'image_url',
image_url: data.imageUrl,
},
],
},
],
});

// Convert the response into a friendly text-stream
const stream = OpenAIStream(response);
// Respond with the stream
return new StreamingTextResponse(stream);
};
46 changes: 46 additions & 0 deletions examples/solidstart-openai/src/routes/vision/index.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import { For, JSX } from 'solid-js';
import { useChat } from 'ai/solid';

export default function Chat() {
const { messages, input, setInput, handleSubmit } = useChat({
api: '/api/chat-with-vision',
});

const handleInputChange: JSX.ChangeEventHandlerUnion<
HTMLInputElement,
Event
> = e => {
setInput(e.target.value);
};

return (
<div class="flex flex-col w-full max-w-md py-24 mx-auto stretch">
<For each={messages()}>
{m => (
<div class="whitespace-pre-wrap">
{m.role === 'user' ? 'User: ' : 'AI: '}
{m.content}
</div>
)}
</For>

<form
onSubmit={e =>
handleSubmit(e, {
data: {
imageUrl:
'https://upload.wikimedia.org/wikipedia/commons/thumb/3/3c/Field_sparrow_in_CP_%2841484%29_%28cropped%29.jpg/733px-Field_sparrow_in_CP_%2841484%29_%28cropped%29.jpg',
},
})
}
>
<input
class="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl"
value={input()}
placeholder="Say something..."
onChange={handleInputChange}
/>
</form>
</div>
);
}
2 changes: 1 addition & 1 deletion packages/core/react/use-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ export type UseChatHelpers = {
| React.ChangeEvent<HTMLInputElement>
| React.ChangeEvent<HTMLTextAreaElement>,
) => void;
/** Form submission handler to automatically reset input and append a user message */
/** Form submission handler to automatically reset input and append a user message */
handleSubmit: (
e: React.FormEvent<HTMLFormElement>,
chatRequestOptions?: ChatRequestOptions,
Expand Down
2 changes: 0 additions & 2 deletions packages/core/shared/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -145,9 +145,7 @@ export type RequestOptions = {

export type ChatRequestOptions = {
options?: RequestOptions;
// @deprecated
functions?: Array<Function>;
// @deprecated
function_call?: FunctionCall;
tools?: Array<Tool>;
tool_choice?: ToolChoice;
Expand Down
33 changes: 21 additions & 12 deletions packages/core/solid/use-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@ import { callChatApi } from '../shared/call-chat-api';
import { processChatStream } from '../shared/process-chat-stream';
import type {
ChatRequest,
ChatRequestOptions,
CreateMessage,
JSONValue,
Message,
RequestOptions,
UseChatOptions,
} from '../shared/types';
import { nanoid } from '../shared/utils';
Expand All @@ -28,14 +28,16 @@ export type UseChatHelpers = {
*/
append: (
message: Message | CreateMessage,
options?: RequestOptions,
chatRequestOptions?: ChatRequestOptions,
) => Promise<string | null | undefined>;
/**
* Reload the last AI chat response for the given chat history. If the last
* message isn't from the assistant, it will request the API to generate a
* new response.
*/
reload: (options?: RequestOptions) => Promise<string | null | undefined>;
reload: (
chatRequestOptions?: ChatRequestOptions,
) => Promise<string | null | undefined>;
/**
* Abort the current request immediately, keep the generated tokens if any.
*/
Expand All @@ -50,8 +52,8 @@ export type UseChatHelpers = {
input: Accessor<string>;
/** Signal setter to update the input value */
setInput: Setter<string>;
/** Form submission handler to automatically reset input and append a user message */
handleSubmit: (e: any) => void;
/** Form submission handler to automatically reset input and append a user message */
handleSubmit: (e: any, chatRequestOptions?: ChatRequestOptions) => void;
/** Whether the API request is in progress */
isLoading: Accessor<boolean>;
/** Additional data added on the server via StreamData */
Expand Down Expand Up @@ -109,7 +111,7 @@ export function useChat({
let abortController: AbortController | null = null;
async function triggerRequest(
messagesSnapshot: Message[],
options?: RequestOptions,
{ options, data }: ChatRequestOptions = {},
) {
try {
setError(undefined);
Expand All @@ -130,6 +132,7 @@ export function useChat({
let chatRequest: ChatRequest = {
messages: messagesSnapshot,
options,
data,
};

await processChatStream({
Expand All @@ -151,6 +154,7 @@ export function useChat({
}),
),
body: {
data: chatRequest.data,
...body,
...options?.body,
},
Expand Down Expand Up @@ -237,15 +241,20 @@ export function useChat({

const [input, setInput] = createSignal(initialInput);

const handleSubmit = (e: any) => {
const handleSubmit = (e: any, options: ChatRequestOptions = {}) => {
e.preventDefault();
const inputValue = input();
if (!inputValue) return;
append({
content: inputValue,
role: 'user',
createdAt: new Date(),
});

append(
{
content: inputValue,
role: 'user',
createdAt: new Date(),
},
options,
);

setInput('');
};

Expand Down

0 comments on commit ac503e0

Please sign in to comment.