Skip to content

Commit

Permalink
Bump openai in examples, support openai@4.10+ (#612)
Browse files Browse the repository at this point in the history
  • Loading branch information
MaxLeiter committed Sep 30, 2023
1 parent 88301c0 commit 6229d6b
Show file tree
Hide file tree
Showing 16 changed files with 120 additions and 71 deletions.
5 changes: 5 additions & 0 deletions .changeset/cuddly-radios-confess.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': patch
---

openai: fix openai types in openai@4.10+
2 changes: 1 addition & 1 deletion examples/next-anthropic/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
},
"dependencies": {
"@anthropic-ai/sdk": "^0.6.2",
"ai": "2.2.12",
"ai": "2.2.13",
"next": "13.4.12",
"react": "18.2.0",
"react-dom": "^18.2.0"
Expand Down
2 changes: 1 addition & 1 deletion examples/next-fireworks/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"dependencies": {
"ai": "2.2.13",
"next": "13.4.12",
"openai": "4.2.0",
"openai": "4.11.0",
"react": "18.2.0",
"react-dom": "^18.2.0"
},
Expand Down
2 changes: 1 addition & 1 deletion examples/next-openai-rate-limits/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"@vercel/kv": "^0.2.2",
"ai": "2.2.13",
"next": "13.4.12",
"openai": "4.2.0",
"openai": "4.11.0",
"react": "18.2.0",
"react-dom": "^18.2.0",
"sonner": "^0.6.2"
Expand Down
4 changes: 2 additions & 2 deletions examples/next-openai/app/api/chat-with-functions/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import {
experimental_StreamData,
} from 'ai';
import OpenAI from 'openai';
import { CompletionCreateParams } from 'openai/resources/chat';
import type { ChatCompletionCreateParams } from 'openai/resources/chat';
// Create an OpenAI API client (that's edge friendly!)
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY || '',
Expand All @@ -13,7 +13,7 @@ const openai = new OpenAI({
// IMPORTANT! Set the runtime to edge
export const runtime = 'edge';

const functions: CompletionCreateParams.Function[] = [
const functions: ChatCompletionCreateParams.Function[] = [
{
name: 'get_current_weather',
description: 'Get the current weather.',
Expand Down
2 changes: 1 addition & 1 deletion examples/next-openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"dependencies": {
"ai": "2.2.13",
"next": "13.4.12",
"openai": "4.2.0",
"openai": "4.11.0",
"react": "18.2.0",
"react-dom": "^18.2.0"
},
Expand Down
2 changes: 1 addition & 1 deletion examples/nuxt-openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"@vue/shared": "^3.3.4",
"ai": "2.2.13",
"nuxt": "^3.6.5",
"openai": "4.2.0",
"openai": "4.11.0",
"tailwindcss": "^3.3.3",
"ufo": "^1.2.0",
"unctx": "^2.3.1",
Expand Down
2 changes: 1 addition & 1 deletion examples/solidstart-openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"@solidjs/meta": "^0.28.2",
"@solidjs/router": "^0.8.2",
"ai": "2.2.13",
"openai": "4.2.0",
"openai": "4.11.0",
"solid-js": "^1.7.2",
"solid-start": "^0.2.26",
"undici": "^5.15.1"
Expand Down
2 changes: 1 addition & 1 deletion examples/sveltekit-openai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch"
},
"dependencies": {
"openai": "4.2.0",
"openai": "4.11.0",
"ai": "2.2.13"
},
"devDependencies": {
Expand Down
2 changes: 0 additions & 2 deletions packages/core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@
"dependencies": {
"eventsource-parser": "1.0.0",
"nanoid": "3.3.6",
"openai": "4.2.0",
"solid-swr-store": "0.10.7",
"sswr": "2.0.0",
"swr": "2.2.0",
Expand All @@ -87,7 +86,6 @@
"eslint": "^7.32.0",
"eslint-config-vercel-ai": "workspace:*",
"jest": "29.2.1",
"replicate": "^0.16.0",
"ts-jest": "29.0.3",
"tsup": "^6.7.0",
"typescript": "5.1.3"
Expand Down
13 changes: 8 additions & 5 deletions packages/core/react/use-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ import type {
Message,
UseChatOptions,
ChatRequestOptions,
FunctionCall,
} from '../shared/types';
import { CreateChatCompletionRequestMessage } from 'openai/resources/chat';
export type { Message, CreateMessage, UseChatOptions };

export type UseChatHelpers = {
Expand Down Expand Up @@ -151,7 +151,9 @@ const getStreamedResponse = async (
let responseData: any = [];
type PrefixMap = {
text?: Message;
function_call?: string | CreateChatCompletionRequestMessage;
function_call?:
| string
| Pick<Message, 'function_call' | 'role' | 'content' | 'name'>;
data?: string[];
};

Expand Down Expand Up @@ -221,8 +223,9 @@ const getStreamedResponse = async (
let functionCall = prefixMap['function_call'];
// Ensure it hasn't been parsed
if (functionCall && typeof functionCall === 'string') {
const parsedFunctionCall: CreateChatCompletionRequestMessage.FunctionCall =
JSON.parse(functionCall as string).function_call;
const parsedFunctionCall: FunctionCall = JSON.parse(
functionCall as string,
).function_call;

functionCallMessage = {
id: nanoid(),
Expand Down Expand Up @@ -314,7 +317,7 @@ const getStreamedResponse = async (

if (streamedResponse.startsWith('{"function_call":')) {
// Once the stream is complete, the function call is parsed into an object.
const parsedFunctionCall: CreateChatCompletionRequestMessage.FunctionCall =
const parsedFunctionCall: FunctionCall =
JSON.parse(streamedResponse).function_call;

responseMessage['function_call'] = parsedFunctionCall;
Expand Down
58 changes: 47 additions & 11 deletions packages/core/shared/types.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,44 @@
import {
ChatCompletionMessage,
CreateChatCompletionRequestMessage,
CompletionCreateParams,
} from 'openai/resources/chat';
// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L146-L159
export interface FunctionCall {
/**
* The arguments to call the function with, as generated by the model in JSON
* format. Note that the model does not always generate valid JSON, and may
* hallucinate parameters not defined by your function schema. Validate the
* arguments in your code before calling your function.
*/
arguments?: string;

/**
* The name of the function to call.
*/
name?: string;
}

//
interface Function {
/**
* The name of the function to be called. Must be a-z, A-Z, 0-9, or contain
* underscores and dashes, with a maximum length of 64.
*/
name: string;

/**
* The parameters the functions accepts, described as a JSON Schema object. See the
* [guide](/docs/guides/gpt/function-calling) for examples, and the
* [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for
* documentation about the format.
*
* To describe a function that accepts no parameters, provide the value
* `{"type": "object", "properties": {}}`.
*/
parameters: Record<string, unknown>;

/**
* A description of what the function does, used by the model to choose when and
* how to call the function.
*/
description?: string;
}

/**
* Shared types between the API and UI packages.
Expand All @@ -22,7 +58,7 @@ export type Message = {
* contains the function call name and arguments. Otherwise, the field should
* not be set.
*/
function_call?: string | ChatCompletionMessage.FunctionCall;
function_call?: string | FunctionCall;
};

export type CreateMessage = Omit<Message, 'id'> & {
Expand All @@ -32,13 +68,13 @@ export type CreateMessage = Omit<Message, 'id'> & {
export type ChatRequest = {
messages: Message[];
options?: RequestOptions;
functions?: Array<CompletionCreateParams.Function>;
function_call?: CreateChatCompletionRequestMessage.FunctionCall;
functions?: Array<Function>;
function_call?: FunctionCall;
};

export type FunctionCallHandler = (
chatMessages: Message[],
functionCall: ChatCompletionMessage.FunctionCall,
functionCall: FunctionCall,
) => Promise<ChatRequest | void>;

export type RequestOptions = {
Expand All @@ -48,8 +84,8 @@ export type RequestOptions = {

export type ChatRequestOptions = {
options?: RequestOptions;
functions?: Array<CompletionCreateParams.Function>;
function_call?: CreateChatCompletionRequestMessage.FunctionCall;
functions?: Array<Function>;
function_call?: FunctionCall;
};

export type UseChatOptions = {
Expand Down
28 changes: 8 additions & 20 deletions packages/core/streams/openai-stream.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import { CreateMessage, JSONValue } from '../shared/types';
import {
CreateMessage,
FunctionCall,
JSONValue,
Message,
} from '../shared/types';
import { createChunkDecoder, getStreamString } from '../shared/utils';

import {
Expand Down Expand Up @@ -59,11 +64,10 @@ interface ChatCompletionChunk {
}

// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L43-L49
// Updated for https://github.com/openai/openai-node/commit/f10c757d831d90407ba47b4659d9cd34b1a35b1d
interface ChatCompletionChunkChoice {
delta: ChoiceDelta;

finish_reason: 'stop' | 'length' | 'function_call' | null;

finish_reason: 'stop' | 'length' | 'function_call' | 'content_filter' | null;
index: number;
}

Expand All @@ -86,22 +90,6 @@ interface ChoiceDelta {
role?: 'system' | 'user' | 'assistant' | 'function';
}

// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L146-L159
interface FunctionCall {
/**
* The arguments to call the function with, as generated by the model in JSON
* format. Note that the model does not always generate valid JSON, and may
* hallucinate parameters not defined by your function schema. Validate the
* arguments in your code before calling your function.
*/
arguments?: string;

/**
* The name of the function to call.
*/
name?: string;
}

/**
* https://github.com/openai/openai-node/blob/3ec43ee790a2eb6a0ccdd5f25faa23251b0f9b8e/src/resources/completions.ts#L28C1-L64C1
* Completions API. Streamed and non-streamed responses are the same.
Expand Down
26 changes: 25 additions & 1 deletion packages/core/streams/replicate-stream.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,31 @@
import { AIStream, type AIStreamCallbacksAndOptions } from './ai-stream';
import type { Prediction } from 'replicate';
import { createStreamDataTransformer } from './stream-data';

// from replicate SDK
interface Prediction {
id: string;
status: 'starting' | 'processing' | 'succeeded' | 'failed' | 'canceled';
version: string;
input: object;
output?: any;
source: 'api' | 'web';
error?: any;
logs?: string;
metrics?: {
predict_time?: number;
};
webhook?: string;
webhook_events_filter?: ('start' | 'output' | 'logs' | 'completed')[];
created_at: string;
updated_at: string;
completed_at?: string;
urls: {
get: string;
cancel: string;
stream?: string;
};
}

/**
* Stream predictions from Replicate.
* Only certain models are supported and you must pass `stream: true` to
Expand Down
4 changes: 2 additions & 2 deletions packages/core/svelte/use-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ import type {
Message,
UseChatOptions,
ChatRequestOptions,
FunctionCall,
} from '../shared/types';
import { ChatCompletionMessage } from 'openai/resources/chat';
export type { Message, CreateMessage, UseChatOptions };

export type UseChatHelpers = {
Expand Down Expand Up @@ -173,7 +173,7 @@ const getStreamedResponse = async (

if (typeof responseMessage.function_call === 'string') {
// Once the stream is complete, the function call is parsed into an object.
const parsedFunctionCall: ChatCompletionMessage.FunctionCall = JSON.parse(
const parsedFunctionCall: FunctionCall = JSON.parse(
responseMessage.function_call,
).function_call;

Expand Down

0 comments on commit 6229d6b

Please sign in to comment.