Skip to content

Commit

Permalink
Solid.js: Add complex response parsing and StreamData support to useC…
Browse files Browse the repository at this point in the history
…hat (#738)

Co-authored-by: Max Leiter <max.leiter@vercel.com>
  • Loading branch information
lgrammel and MaxLeiter committed Nov 16, 2023
1 parent 69ca8f5 commit 70bd2ac
Show file tree
Hide file tree
Showing 12 changed files with 546 additions and 302 deletions.
5 changes: 5 additions & 0 deletions .changeset/wild-carpets-move.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': patch
---

ai/solid: add experimental_StreamData support to useChat
3 changes: 1 addition & 2 deletions examples/next-openai/app/stream-react-response/action.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
'use server';

import {
JSONValue,
Message,
OpenAIStream,
experimental_StreamData,
Expand Down Expand Up @@ -108,7 +107,7 @@ export async function handler({ messages }: { messages: Message[] }) {
return new experimental_StreamingReactResponse(stream, {
data,
ui({ content, data }) {
if (data != null) {
if (data?.[0] != null) {
const value = data[0] as any;

switch (value.type) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import {
OpenAIStream,
StreamingTextResponse,
experimental_StreamData,
} from 'ai';
import OpenAI from 'openai';
import type { ChatCompletionCreateParams } from 'openai/resources/chat';

import { APIEvent } from 'solid-start/api';

// Create an OpenAI API client
const openai = new OpenAI({
apiKey: process.env['OPENAI_API_KEY'] || '',
});

const functions: ChatCompletionCreateParams.Function[] = [
{
name: 'get_current_weather',
description: 'Get the current weather.',
parameters: {
type: 'object',
properties: {
format: {
type: 'string',
enum: ['celsius', 'fahrenheit'],
description: 'The temperature unit to use.',
},
},
required: ['format'],
},
},
{
name: 'eval_code_in_browser',
description: 'Execute javascript code in the browser with eval().',
parameters: {
type: 'object',
properties: {
code: {
type: 'string',
description: `Javascript code that will be directly executed via eval(). Do not use backticks in your response.
DO NOT include any newlines in your response, and be sure to provide only valid JSON when providing the arguments object.
The output of the eval() will be returned directly by the function.`,
},
},
required: ['code'],
},
},
];

export const POST = async (event: APIEvent) => {
const { messages } = await event.request.json();

const response = await openai.chat.completions.create({
model: 'gpt-3.5-turbo-0613',
stream: true,
messages,
functions,
});

const data = new experimental_StreamData();
const stream = OpenAIStream(response, {
experimental_onFunctionCall: async (
{ name, arguments: args },
createFunctionCallMessages,
) => {
if (name === 'get_current_weather') {
// Call a weather API here
const weatherData = {
temperature: 20,
unit: args.format === 'celsius' ? 'C' : 'F',
};

data.append({
text: 'Some custom data',
});

const newMessages = createFunctionCallMessages(weatherData);
return openai.chat.completions.create({
messages: [...messages, ...newMessages],
stream: true,
model: 'gpt-3.5-turbo-0613',
});
}
},
onFinal() {
data.close();
},
experimental_streamData: true,
});

data.append({
text: 'Hello, how are you?',
});

// Respond with the stream
return new StreamingTextResponse(stream, {}, data);
};
88 changes: 88 additions & 0 deletions examples/solidstart-openai/src/routes/function-calling/index.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
import { FunctionCallHandler, Message, nanoid } from 'ai';
import { useChat } from 'ai/solid';
import { For, JSX } from 'solid-js';

export default function Chat() {
const functionCallHandler: FunctionCallHandler = async (
chatMessages,
functionCall,
) => {
if (functionCall.name === 'eval_code_in_browser') {
if (functionCall.arguments) {
// Parsing here does not always work since it seems that some characters in generated code aren't escaped properly.
const parsedFunctionCallArguments: { code: string } = JSON.parse(
functionCall.arguments,
);
// WARNING: Do NOT do this in real-world applications!
eval(parsedFunctionCallArguments.code);
const functionResponse = {
messages: [
...chatMessages,
{
id: nanoid(),
name: 'eval_code_in_browser',
role: 'function' as const,
content: parsedFunctionCallArguments.code,
},
],
};
return functionResponse;
}
}
};

const { messages, input, setInput, handleSubmit, data } = useChat({
api: '/api/chat-with-functions',
experimental_onFunctionCall: functionCallHandler,
});

// Generate a map of message role to text color
const roleToColorMap: Record<Message['role'], string> = {
system: 'red',
user: 'black',
function: 'blue',
assistant: 'green',
};

const handleInputChange: JSX.ChangeEventHandlerUnion<
HTMLInputElement,
Event
> = e => {
setInput(e.target.value);
};

return (
<div class="flex flex-col w-full max-w-md py-24 mx-auto stretch">
<div class="bg-gray-200 mb-8">
<For each={data()}>
{item => (
<pre class="whitespace-pre-wrap">{JSON.stringify(item)}</pre>
)}
</For>
</div>

<For each={messages()}>
{m => (
<div
class="whitespace-pre-wrap"
style={{ color: roleToColorMap[m.role] }}
>
<strong>{`${m.role}: `}</strong>
{m.content || JSON.stringify(m.function_call)}
<br />
<br />
</div>
)}
</For>

<form onSubmit={handleSubmit}>
<input
class="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl"
value={input()}
placeholder="Say something..."
onChange={handleInputChange}
/>
</form>
</div>
);
}

0 comments on commit 70bd2ac

Please sign in to comment.