Skip to content

Commit

Permalink
Add support for Anthropic SDK and newer Anthropic API Version(s) (#448)
Browse files Browse the repository at this point in the history
Co-authored-by: Max Leiter <max.leiter@vercel.com>
Co-authored-by: Nilushanan Kulasingham <nkulasingham@gmail.com>
  • Loading branch information
3 people committed Aug 14, 2023
1 parent e9c47b7 commit c2917d3
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 14 deletions.
5 changes: 5 additions & 0 deletions .changeset/odd-coins-matter.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'ai': patch
---

Add support for the Anthropic SDK, newer Anthropic API versions, and improve Anthropic error handling
96 changes: 82 additions & 14 deletions packages/core/streams/anthropic-stream.ts
Original file line number Diff line number Diff line change
@@ -1,33 +1,101 @@
import { AIStream, type AIStreamCallbacks } from './ai-stream'
import {
AIStream,
readableFromAsyncIterable,
type AIStreamCallbacks,
createCallbacksTransformer
} from './ai-stream'

// https://github.com/anthropics/anthropic-sdk-typescript/blob/0fc31f4f1ae2976afd0af3236e82d9e2c84c43c9/src/resources/completions.ts#L28-L49
interface CompletionChunk {
/**
* The resulting completion up to and excluding the stop sequences.
*/
completion: string

/**
* The model that performed the completion.
*/
model: string

/**
* The reason that we stopped sampling.
*
* This may be one the following values:
*
* - `"stop_sequence"`: we reached a stop sequence — either provided by you via the
* `stop_sequences` parameter, or a stop sequence built into the model
* - `"max_tokens"`: we exceeded `max_tokens_to_sample` or the model's maximum
*/
stop_reason: string
}

interface StreamError {
error: {
type: string
message: string
}
}

interface StreamPing {}

type StreamData = CompletionChunk | StreamError | StreamPing

function parseAnthropicStream(): (data: string) => string | void {
let previous = ''

return data => {
const json = JSON.parse(data as string) as {
completion: string
stop: string | null
stop_reason: string | null
truncated: boolean
log_id: string
model: string
exception: string | null
const json = JSON.parse(data as string) as StreamData

// error event
if ('error' in json) {
throw new Error(`${json.error.type}: ${json.error.message}`)
}

// ping event
if (!('completion' in json)) {
return
}

// On API versions older than 2023-06-01,
// Anthropic's `completion` field is cumulative unlike OpenAI's
// deltas. In order to compute the delta, we must slice out the text
// we previously received.
const text = json.completion
const delta = text.slice(previous.length)
previous = text
if (
!previous ||
(text.length > previous.length && text.startsWith(previous))
) {
const delta = text.slice(previous.length)
previous = text

return delta
}

return delta
return text
}
}

async function* streamable(stream: AsyncIterable<CompletionChunk>) {
for await (const chunk of stream) {
const text = chunk.completion
if (text) yield text
}
}

/**
* Accepts either a fetch Response from the Anthropic `POST /v1/complete` endpoint,
* or the return value of `await client.completions.create({ stream: true })`
* from the `@anthropic-ai/sdk` package.
*/
export function AnthropicStream(
res: Response,
res: Response | AsyncIterable<CompletionChunk>,
cb?: AIStreamCallbacks
): ReadableStream {
return AIStream(res, parseAnthropicStream(), cb)
if (Symbol.asyncIterator in res) {
return readableFromAsyncIterable(streamable(res)).pipeThrough(
createCallbacksTransformer(cb)
)
} else {
return AIStream(res, parseAnthropicStream(), cb)
}
}

0 comments on commit c2917d3

Please sign in to comment.