Skip to content

Commit

Permalink
Anthropic message support (#874)
Browse files Browse the repository at this point in the history
  • Loading branch information
lgrammel committed Dec 21, 2023
1 parent ac503e0 commit ef99062
Show file tree
Hide file tree
Showing 11 changed files with 312 additions and 38 deletions.
5 changes: 5 additions & 0 deletions .changeset/quick-papayas-notice.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': patch
---

Add support for the Anthropic message API
17 changes: 10 additions & 7 deletions docs/pages/docs/guides/providers/anthropic.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,8 @@ For this example, we'll create a route handler at `app/api/chat/route.ts` that a
```tsx filename="app/api/chat/route.ts" showLineNumbers
import Anthropic from '@anthropic-ai/sdk';
import { AnthropicStream, StreamingTextResponse } from 'ai';
import { experimental_buildAnthropicPrompt } from 'ai/prompts';

// Create an Anthropic API client (that's edge friendly??)
// Create an Anthropic API client (that's edge friendly)
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY || '',
});
Expand All @@ -50,14 +49,16 @@ export async function POST(req: Request) {
const { messages } = await req.json();

// Ask Claude for a streaming chat completion given the prompt
const response = await anthropic.completions.create({
prompt: experimental_buildAnthropicPrompt(messages),
model: 'claude-2',
const response = await anthropic.beta.messages.create({
messages,
model: 'claude-2.1',
stream: true,
max_tokens_to_sample: 300,
max_tokens: 300,
});

// Convert the response into a friendly text-stream
const stream = AnthropicStream(response);

// Respond with the stream
return new StreamingTextResponse(stream);
}
Expand Down Expand Up @@ -124,7 +125,7 @@ import Anthropic from '@anthropic-ai/sdk';
import { AnthropicStream, StreamingTextResponse } from 'ai';
import { experimental_buildAnthropicPrompt } from 'ai/prompts';

// Create an Anthropic API client (that's edge friendly??)
// Create an Anthropic API client (that's edge friendly)
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY || '',
});
Expand All @@ -143,8 +144,10 @@ export async function POST(req: Request) {
stream: true,
max_tokens_to_sample: 300,
});

// Convert the response into a friendly text-stream
const stream = AnthropicStream(response);

// Respond with the stream
return new StreamingTextResponse(stream);
}
Expand Down
28 changes: 28 additions & 0 deletions examples/next-anthropic/app/api/chat-completion/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import Anthropic from '@anthropic-ai/sdk';
import { AnthropicStream, StreamingTextResponse } from 'ai';
import { experimental_buildAnthropicPrompt } from 'ai/prompts';

// Create an Anthropic API client (that's edge friendly??)
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY || '',
});

// IMPORTANT! Set the runtime to edge
export const runtime = 'edge';

export async function POST(req: Request) {
// Extract the `prompt` from the body of the request
const { messages } = await req.json();

// Ask Claude for a streaming chat completion given the prompt
const response = await anthropic.completions.create({
prompt: experimental_buildAnthropicPrompt(messages),
model: 'claude-2',
stream: true,
max_tokens_to_sample: 300,
});
// Convert the response into a friendly text-stream
const stream = AnthropicStream(response);
// Respond with the stream
return new StreamingTextResponse(stream);
}
13 changes: 7 additions & 6 deletions examples/next-anthropic/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import Anthropic from '@anthropic-ai/sdk';
import { AnthropicStream, StreamingTextResponse } from 'ai';
import { experimental_buildAnthropicPrompt } from 'ai/prompts';

// Create an Anthropic API client (that's edge friendly??)
// Create an Anthropic API client (that's edge friendly)
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY || '',
});
Expand All @@ -15,14 +14,16 @@ export async function POST(req: Request) {
const { messages } = await req.json();

// Ask Claude for a streaming chat completion given the prompt
const response = await anthropic.completions.create({
prompt: experimental_buildAnthropicPrompt(messages),
model: 'claude-2',
const response = await anthropic.beta.messages.create({
messages,
model: 'claude-2.1',
stream: true,
max_tokens_to_sample: 300,
max_tokens: 300,
});

// Convert the response into a friendly text-stream
const stream = AnthropicStream(response);

// Respond with the stream
return new StreamingTextResponse(stream);
}
29 changes: 29 additions & 0 deletions examples/next-anthropic/app/chat-completion/page.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
'use client';

import { useChat } from 'ai/react';

export default function Chat() {
const { messages, input, handleInputChange, handleSubmit } = useChat({
api: '/api/chat-completion',
});

return (
<div className="flex flex-col w-full max-w-md py-24 mx-auto stretch">
{messages.map(m => (
<div key={m.id} className="whitespace-pre-wrap">
{m.role === 'user' ? 'User: ' : 'AI: '}
{m.content}
</div>
))}

<form onSubmit={handleSubmit}>
<input
className="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl"
value={input}
placeholder="Say something..."
onChange={handleInputChange}
/>
</form>
</div>
);
}
2 changes: 1 addition & 1 deletion examples/next-anthropic/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
"lint": "next lint"
},
"dependencies": {
"@anthropic-ai/sdk": "0.10.0",
"@anthropic-ai/sdk": "0.12.0",
"ai": "2.2.29",
"next": "14.0.3",
"react": "18.2.0",
Expand Down
2 changes: 1 addition & 1 deletion packages/core/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@
"swrv": "1.0.4"
},
"devDependencies": {
"@anthropic-ai/sdk": "0.10.0",
"@anthropic-ai/sdk": "0.12.0",
"@aws-sdk/client-bedrock-runtime": "3.451.0",
"@edge-runtime/vm": "^3.1.7",
"@google/generative-ai": "0.1.1",
Expand Down
130 changes: 118 additions & 12 deletions packages/core/streams/anthropic-stream.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,32 +4,42 @@ import {
StreamingTextResponse,
experimental_StreamData,
} from '.';
import { anthropicChunks } from '../tests/snapshots/anthropic';
import {
anthropicMessageChunks,
anthropicCompletionChunks,
} from '../tests/snapshots/anthropic';
import { readAllChunks } from '../tests/utils/mock-client';
import { DEFAULT_TEST_URL, createMockServer } from '../tests/utils/mock-server';

export const MESSAGE_URL = 'http://localhost:3030/messages';

const server = createMockServer([
{
url: DEFAULT_TEST_URL,
chunks: anthropicChunks,
chunks: anthropicCompletionChunks,
formatChunk: chunk =>
`event: completion\ndata: ${JSON.stringify(chunk)}\n\n`,
},
{
url: MESSAGE_URL,
chunks: anthropicMessageChunks,
formatChunk: chunk => chunk,
},
]);

describe('AnthropicStream', () => {
beforeAll(() => {
server.listen();
});
beforeAll(() => {
server.listen();
});

afterEach(() => {
server.resetHandlers();
});
afterEach(() => {
server.resetHandlers();
});

afterAll(() => {
server.close();
});
afterAll(() => {
server.close();
});

describe('Anthropic completion', () => {
it('should be able to parse SSE and receive the streamed response', async () => {
const anthropic = new Anthropic({
fetch: () => fetch(DEFAULT_TEST_URL),
Expand Down Expand Up @@ -124,3 +134,99 @@ describe('AnthropicStream', () => {
});
});
});

describe('Anthropic message', () => {
it('should be able to parse SSE and receive the streamed response', async () => {
const anthropic = new Anthropic({
fetch: () => fetch(MESSAGE_URL),
apiKey: 'sk-doesnt-matter',
});

const anthropicResponse = await anthropic.beta.messages.create({
messages: [{ role: 'user', content: 'Hello' }],
model: 'claude-2.1',
stream: true,
max_tokens: 300,
});

const stream = AnthropicStream(anthropicResponse);

const response = new StreamingTextResponse(stream);

expect(await readAllChunks(response)).toEqual([
'Hello',
',',
' world',
'.',
]);
});

describe('StreamData protocol', () => {
it('should send text', async () => {
const anthropic = new Anthropic({
fetch: () => fetch(MESSAGE_URL),
apiKey: 'sk-doesnt-matter',
});

const data = new experimental_StreamData();

const anthropicResponse = await anthropic.beta.messages.create({
messages: [{ role: 'user', content: 'Hello' }],
model: 'claude-2.1',
stream: true,
max_tokens: 300,
});

const stream = AnthropicStream(anthropicResponse, {
onFinal() {
data.close();
},
experimental_streamData: true,
});

const response = new StreamingTextResponse(stream, {}, data);

expect(await readAllChunks(response)).toEqual([
'0:"Hello"\n',
'0:","\n',
'0:" world"\n',
'0:"."\n',
]);
});

it('should send text and data', async () => {
const anthropic = new Anthropic({
fetch: () => fetch(MESSAGE_URL),
apiKey: 'sk-doesnt-matter',
});

const data = new experimental_StreamData();

data.append({ t1: 'v1' });

const anthropicResponse = await anthropic.beta.messages.create({
messages: [{ role: 'user', content: 'Hello' }],
model: 'claude-2.1',
stream: true,
max_tokens: 300,
});

const stream = AnthropicStream(anthropicResponse, {
onFinal() {
data.close();
},
experimental_streamData: true,
});

const response = new StreamingTextResponse(stream, {}, data);

expect(await readAllChunks(response)).toEqual([
'2:[{"t1":"v1"}]\n',
'0:"Hello"\n',
'0:","\n',
'0:" world"\n',
'0:"."\n',
]);
});
});
});

0 comments on commit ef99062

Please sign in to comment.