Skip to content

Commit

Permalink
Fix function definitions (#229)
Browse files Browse the repository at this point in the history
Previously, ai.jsx did not pass all function params (e.g. `description`)
to the OpenAI API, even when it accepted them. Now it does.

This also adds the beginning of a test runner.
  • Loading branch information
NickHeiner committed Aug 4, 2023
1 parent 18114b8 commit 4ff41e2
Show file tree
Hide file tree
Showing 14 changed files with 278 additions and 31 deletions.
7 changes: 0 additions & 7 deletions packages/ai-jsx/.eslintrc.cjs
Expand Up @@ -56,11 +56,4 @@ module.exports = {
'@typescript-eslint/no-base-to-string': 'error',
'@typescript-eslint/no-unnecessary-condition': ['warn', { allowConstantLoopConditions: true }],
},

overrides: [
{
files: ['*.test.ts', '*.test.tsx'],
plugins: ['jest'],
},
],
};
2 changes: 1 addition & 1 deletion packages/ai-jsx/package.json
Expand Up @@ -4,7 +4,7 @@
"repository": "fixie-ai/ai-jsx",
"bugs": "https://github.com/fixie-ai/ai-jsx/issues",
"homepage": "https://ai-jsx.com",
"version": "0.8.1",
"version": "0.8.2",
"volta": {
"extends": "../../package.json"
},
Expand Down
4 changes: 3 additions & 1 deletion packages/ai-jsx/src/batteries/use-tools.tsx
Expand Up @@ -114,7 +114,9 @@ export interface Tool {
* A function to invoke the tool.
*/
// Can we use Zod to do better than any[]?
func: (...args: any[]) => string | number | boolean | null | undefined | Promise<string | number | boolean | null>;
func: (
...args: any[]
) => string | number | boolean | null | undefined | Promise<string | number | boolean | null | undefined>;
}

/**
Expand Down
17 changes: 13 additions & 4 deletions packages/ai-jsx/src/core/completion.tsx
Expand Up @@ -8,9 +8,11 @@ import { Node, Component, RenderContext } from '../index.js';
import { AIJSXError, ErrorCode } from '../core/errors.js';
import { OpenAIChatModel, OpenAICompletionModel } from '../lib/openai.js';
import { getEnvVar } from '../lib/util.js';
import { ChatCompletionFunctions } from 'openai';
import { AnthropicChatModel } from '../lib/anthropic.js';
import z from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema';
import _ from 'lodash';
export {
UserMessage,
SystemMessage,
Expand Down Expand Up @@ -54,7 +56,7 @@ export type ModelComponent<T extends ModelPropsWithChildren> = Component<T>;
* Represents a function definition that can be invoked using the {@link FunctionCall} component.
*/
export interface FunctionDefinition {
description?: string;
description?: ChatCompletionFunctions['description'];
parameters: FunctionParameters;
}

Expand All @@ -74,9 +76,7 @@ export function getParametersSchema(parameters: FunctionParameters) {
properties: Object.keys(parameters).reduce(
(map: Record<string, any>, paramName) => ({
...map,
[paramName]: {
type: parameters[paramName].type,
},
[paramName]: _.omit(parameters[paramName], 'required'),
}),
{}
),
Expand All @@ -85,10 +85,19 @@ export function getParametersSchema(parameters: FunctionParameters) {

/**
* Represents parameters to a {@link FunctionDefinition}.
*
* This is a simplified version of the `parameters` field in {@link ChatCompletionFunctions}: https://platform.openai.com/docs/api-reference/chat/create#chat/create-parameters.
*
*
* If you want to pass a field to {@link FunctionParameters} that isn't supported on this type, you can use a {@link z.ZodObject} schema instead.
*/
export interface PlainFunctionParameter {
description?: string;
type?: string;
/**
* The possible values this param can take.
*/
enum?: string[];
required: boolean;
}

Expand Down
27 changes: 15 additions & 12 deletions packages/ai-jsx/src/lib/openai.tsx
Expand Up @@ -115,6 +115,10 @@ export function OpenAI({
return result;
}

export const SSE_PREFIX = 'data: ';
export const SSE_TERMINATOR = '\n\n';
export const SSE_FINAL_EVENT = '[DONE]';

/**
* Parses an OpenAI SSE response stream according to:
* - https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events
Expand All @@ -123,10 +127,6 @@ export function OpenAI({
* @returns An async generator that yields the parsed JSON objects from the stream.
*/
async function* openAiEventsToJson<T>(iterable: AsyncIterable<String>): AsyncGenerator<T> {
const SSE_PREFIX = 'data: ';
const SSE_TERMINATOR = '\n\n';
const SSE_FINAL_EVENT = '[DONE]';

let bufferedContent = '';

for await (const chunk of iterable) {
Expand Down Expand Up @@ -353,6 +353,16 @@ async function tokenCountForConversationMessage(
}
}

export type ChatCompletionDelta = Merge<
CreateChatCompletionResponse,
{
choices: {
delta: Partial<ChatCompletionResponseMessage>;
finish_reason?: string;
}[];
}
>;

/**
* Represents an OpenAI text chat model (e.g., `gpt-4`).
*/
Expand All @@ -362,7 +372,7 @@ export async function* OpenAIChatModel(
logitBias?: Record<string, number>;
} & MergeExclusive<
{
functionDefinitions: Record<string, FunctionDefinition>;
functionDefinitions: Record<ChatCompletionFunctions['name'], FunctionDefinition>;
forcedFunction: string;
},
{
Expand Down Expand Up @@ -482,13 +492,6 @@ export async function* OpenAIChatModel(

await checkOpenAIResponse(chatResponse, logger, 'createChatCompletion');

type ChatCompletionDelta = Merge<
CreateChatCompletionResponse,
{
choices: { delta: Partial<ChatCompletionResponseMessage>; finish_reason: string | undefined }[];
}
>;

const iterator = openAiEventsToJson<ChatCompletionDelta>(asyncIteratorOfFetchStream(chatResponse.body!.getReader()))[
Symbol.asyncIterator
]();
Expand Down
6 changes: 5 additions & 1 deletion packages/docs/docs/changelog.md
@@ -1,6 +1,10 @@
# Changelog

## 0.8.1
## 0.8.2

- Fix issue where the `description` field wasn't passed to function definitions.

## [0.8.1](https://github.com/fixie-ai/ai-jsx/commit/c6dfba422761f23ad4939c746a4a369385dc1f36)

- Add support for token-based conversation shrinking via `<Shrinkable>`.

Expand Down
13 changes: 12 additions & 1 deletion packages/examples/.eslintrc.cjs
Expand Up @@ -4,7 +4,7 @@ module.exports = {
extends: ['eslint:recommended', 'plugin:@typescript-eslint/strict', 'nth'],
parser: '@typescript-eslint/parser',
parserOptions: {
project: [path.join(__dirname, 'tsconfig.json')],
project: [path.join(__dirname, 'tsconfig.json'), path.join(__dirname, 'test', 'tsconfig.json')],
},
plugins: ['@typescript-eslint'],
root: true,
Expand All @@ -17,6 +17,7 @@ module.exports = {
'no-unused-vars': 'off',
'@typescript-eslint/no-unused-vars': ['warn', { ignoreRestSiblings: true, argsIgnorePattern: '^_' }],

'no-undef': 'off',
'no-magic-numbers': 'off',
'@typescript-eslint/no-magic-numbers': 'off',

Expand Down Expand Up @@ -48,4 +49,14 @@ module.exports = {
'@typescript-eslint/no-base-to-string': 'error',
'@typescript-eslint/no-unnecessary-condition': ['warn', { allowConstantLoopConditions: true }],
},

overrides: [
{
files: ['test/**/*.ts', 'test/**/*.tsx'],
plugins: ['jest'],
env: {
'jest/globals': true,
},
},
],
};
18 changes: 18 additions & 0 deletions packages/examples/jest.config.ts
@@ -0,0 +1,18 @@
import type { Config } from 'jest';

const config: Config = {
preset: 'ts-jest',
testMatch: ['<rootDir>/test/**/*.ts', '<rootDir>/test/**/*.tsx'],
testEnvironment: 'node',
testPathIgnorePatterns: ['/node_modules/', '/dist/'],
transform: {
'^.+\\.tsx?$': [
'ts-jest',
{
tsconfig: '<rootDir>/test/tsconfig.json',
},
],
},
};

export default config;
14 changes: 12 additions & 2 deletions packages/examples/package.json
Expand Up @@ -5,7 +5,9 @@
"extends": "../../package.json"
},
"devDependencies": {
"@jest/globals": "^29.5.0",
"@tsconfig/node18": "^2.0.1",
"@types/jest": "^29.5.2",
"@types/lodash": "^4.14.195",
"@types/node": "^20.3.1",
"@types/prompt-sync": "^4.2.0",
Expand All @@ -14,6 +16,12 @@
"@typescript-eslint/parser": "^5.60.0",
"eslint": "^8.40.0",
"eslint-config-nth": "^2.0.1",
"eslint-plugin-jest": "^27.2.2",
"jest": "^29.5.0",
"jest-fetch-mock": "^3.0.3",
"openai": "^3.3.0",
"ts-jest": "^29.1.0",
"ts-node": "^10.9.1",
"typescript": "^5.1.3"
},
"private": true,
Expand Down Expand Up @@ -55,8 +63,10 @@
"view-logs": "cat ai-jsx.log | pino-pretty",
"lint": "eslint . --max-warnings 0",
"lint:fix": "eslint . --fix",
"typecheck": "tsc -p tsconfig.json",
"build": "yarn run typecheck"
"typecheck": "tsc -p tsconfig.json && tsc -p test/tsconfig.json",
"build": "yarn run typecheck",
"unit": "jest",
"test": "yarn run typecheck && yarn run unit"
},
"dependencies": {
"@opentelemetry/api": "^1.4.1",
Expand Down
138 changes: 138 additions & 0 deletions packages/examples/test/core/completion.tsx
@@ -0,0 +1,138 @@
import { Readable } from 'stream';

/**
* This is a hack to let jest-fetch-mock handle response streams.
* https://github.com/jefflau/jest-fetch-mock/issues/113#issuecomment-1445010122
*/
class TempResponse extends Response {
constructor(...args: any[]) {
if (args[0] instanceof ReadableStream) {
// @ts-expect-error
args[0] = Readable.from(args[0]);
}
super(...args);
}
}
Object.defineProperty(global, 'Response', {
value: TempResponse,
});
/**
* End hack
*/

import jestFetchMock from 'jest-fetch-mock';
/**
* This _must_ occur before importing ai-jsx code. Otherwise, the mock won't be enabled.
*/
jestFetchMock.enableFetchMocks();

process.env.OPENAI_API_KEY = 'fake-openai-key';
process.env.ANTHROPIC_API_KEY = 'fake-anthropic-key';

import * as AI from 'ai-jsx';
import { ChatCompletion, UserMessage } from 'ai-jsx/core/completion';
import { ChatCompletionDelta, SSE_FINAL_EVENT, SSE_PREFIX, SSE_TERMINATOR } from 'ai-jsx/lib/openai';
import { Tool } from 'ai-jsx/batteries/use-tools';

it('passes creates a chat completion', async () => {
mockOpenAIResponse('response from OpenAI');

const result = await AI.createRenderContext().render(
<ChatCompletion>
<UserMessage>Hello</UserMessage>
</ChatCompletion>
);
expect(result).toEqual('response from OpenAI');
});

it('passes all function fields', async () => {
const functions: Record<string, Tool> = {
myFunc: {
description: 'My function',
parameters: {
myParam: {
description: 'My parameter',
type: 'string',
enum: ['option1', 'option2'],
required: true,
},
},
func: () => undefined,
},
};

const handleRequest = jest.fn();
mockOpenAIResponse('', handleRequest);

await AI.createRenderContext().render(
<ChatCompletion functionDefinitions={functions}>
<UserMessage>Hello</UserMessage>
</ChatCompletion>
);

expect(handleRequest).toHaveBeenCalledWith(
expect.objectContaining({
functions: [
{
name: 'myFunc',
description: 'My function',
parameters: {
type: 'object',
required: ['myParam'],
properties: {
myParam: {
type: 'string',
enum: ['option1', 'option2'],
description: 'My parameter',
},
},
},
},
],
})
);
});

function mockOpenAIResponse(message: string, handleRequest?: jest.MockedFn<(req: Request) => Promise<void>>) {
fetchMock.mockIf(
/^https:\/\/api.openai.com\/v1\/chat\/completions/,
// This is a hack to let jest-fetch-mock handle response streams.
// @ts-expect-error
async (req) => {
handleRequest?.(await req.json());

const stringStream = new ReadableStream({
start(controller) {
function sendDelta(messagePart: string) {
const response: ChatCompletionDelta = {
id: 'cmpl-3QJ8ZjX1J5Z5X',
object: 'text_completion',
created: 1624430979,
model: 'gpt-3.5-turbo',
choices: [
{
delta: {
role: 'assistant',
content: messagePart,
},
},
],
};
controller.enqueue(`${SSE_PREFIX}${JSON.stringify(response)}${SSE_TERMINATOR}`);
}

for (const char of message) {
sendDelta(char);
}

controller.enqueue(SSE_FINAL_EVENT);
controller.close();
},
}).pipeThrough(new TextEncoderStream());
return {
status: 200,
body: stringStream,
};
}
);
}
@@ -1,4 +1,4 @@
import { AIJSXError } from '../../dist/cjs/core/errors.cjs';
import { AIJSXError } from 'ai-jsx/core/errors';

test('Message is formatted as expected', () => {
expect(new AIJSXError('message', 1000, 'user', { checkedNames: ['name1', 'name2'] }).toString())
Expand Down

3 comments on commit 4ff41e2

@vercel
Copy link

@vercel vercel bot commented on 4ff41e2 Aug 4, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-docs – ./packages/docs

docs.ai-jsx.com
ai-jsx-docs-git-main-fixie-ai.vercel.app
ai-jsx-docs-fixie-ai.vercel.app
ai-jsx-docs.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 4ff41e2 Aug 4, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-nextjs-demo – ./packages/nextjs-demo

ai-jsx-nextjs-demo-git-main-fixie-ai.vercel.app
ai-jsx-nextjs-demo.vercel.app
ai-jsx-nextjs-demo-fixie-ai.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 4ff41e2 Aug 4, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-tutorial-nextjs – ./packages/tutorial-nextjs

ai-jsx-tutorial-nextjs-git-main-fixie-ai.vercel.app
ai-jsx-tutorial-nextjs.vercel.app
ai-jsx-tutorial-nextjs-fixie-ai.vercel.app

Please sign in to comment.