Skip to content

Commit

Permalink
Update Anthropic SDK to 0.5.X to support Claude V2 (#1932)
Browse files Browse the repository at this point in the history
* Update Anthropic SDK

* Refactor, add back in (hacky) batch request abort capability

* Update Next version to add polyfill for Anthropic

---------

Co-authored-by: Christopher Woolum <woolumc@amazon.com>
Co-authored-by: jacoblee93 <jacoblee93@gmail.com>
  • Loading branch information
3 people committed Jul 11, 2023
1 parent b9acc21 commit 8da80b1
Show file tree
Hide file tree
Showing 5 changed files with 249 additions and 133 deletions.
2 changes: 1 addition & 1 deletion langchain/package.json
Expand Up @@ -777,7 +777,7 @@
}
},
"dependencies": {
"@anthropic-ai/sdk": "^0.4.3",
"@anthropic-ai/sdk": "^0.5.3",
"ansi-styles": "^5.0.0",
"binary-extensions": "^2.2.0",
"camelcase": "6",
Expand Down
112 changes: 57 additions & 55 deletions langchain/src/chat_models/anthropic.ts
@@ -1,21 +1,21 @@
import {
AI_PROMPT,
Anthropic as AnthropicApi,
HUMAN_PROMPT,
Client as AnthropicApi,
CompletionResponse,
SamplingParameters,
} from "@anthropic-ai/sdk";
import { BaseChatModel, BaseChatModelParams } from "./base.js";
import type { CompletionCreateParams } from "@anthropic-ai/sdk/resources/completions";

import { BaseLanguageModelCallOptions } from "../base_language/index.js";
import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
import {
AIMessage,
BaseMessage,
ChatGeneration,
ChatResult,
MessageType,
} from "../schema/index.js";
import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
import { BaseLanguageModelCallOptions } from "../base_language/index.js";
import { getEnvironmentVariable } from "../util/env.js";
import { BaseChatModel, BaseChatModelParams } from "./base.js";

function getAnthropicPromptFromMessage(type: MessageType): string {
switch (type) {
Expand Down Expand Up @@ -176,7 +176,7 @@ export class ChatAnthropic extends BaseChatModel implements AnthropicInput {
*/
invocationParams(
options?: this["ParsedCallOptions"]
): Omit<SamplingParameters, "prompt"> & Kwargs {
): Omit<CompletionCreateParams, "prompt"> & Kwargs {
return {
model: this.modelName,
temperature: this.temperature,
Expand Down Expand Up @@ -259,71 +259,73 @@ export class ChatAnthropic extends BaseChatModel implements AnthropicInput {

/** @ignore */
private async completionWithRetry(
request: SamplingParameters & Kwargs,
request: CompletionCreateParams & Kwargs,
options: { signal?: AbortSignal },
runManager?: CallbackManagerForLLMRun
): Promise<CompletionResponse> {
): Promise<AnthropicApi.Completions.Completion> {
if (!this.anthropicApiKey) {
throw new Error("Missing Anthropic API key.");
}
let makeCompletionRequest;
let makeCompletionRequest: () => Promise<AnthropicApi.Completions.Completion>;

let asyncCallerOptions = {};
if (request.stream) {
if (!this.streamingClient) {
const options = this.apiUrl ? { apiUrl: this.apiUrl } : undefined;
this.streamingClient = new AnthropicApi(this.anthropicApiKey, options);
this.streamingClient = new AnthropicApi({
...options,
apiKey: this.anthropicApiKey,
});
}
makeCompletionRequest = async () => {
let currentCompletion = "";
return (
this.streamingClient
.completeStream(request, {
onUpdate: (data: CompletionResponse) => {
if (data.stop_reason) {
return;
}
const part = data.completion;
if (part) {
const delta = part.slice(currentCompletion.length);
currentCompletion += delta ?? "";
// eslint-disable-next-line no-void
void runManager?.handleLLMNewToken(delta ?? "");
}
},
signal: options.signal,
})
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.catch((e: any) => {
// Anthropic doesn't actually throw JavaScript error objects at the moment.
// We convert the error so the async caller can recognize it correctly.
if (e?.name === "AbortError") {
throw new Error(`${e.name}: ${e.message}`);
}
throw e;
})
);
const stream = await this.streamingClient.completions.create({
...request,
});

const completion: AnthropicApi.Completion = {
completion: "",
model: "",
stop_reason: "",
};

for await (const data of stream) {
completion.stop_reason = data.stop_reason;
completion.model = data.model;

if (options.signal?.aborted) {
stream.controller.abort();
throw new Error("AbortError: User aborted the request.");
}

if (data.stop_reason) {
break;
}
const part = data.completion;
if (part) {
completion.completion += part;
// eslint-disable-next-line no-void
void runManager?.handleLLMNewToken(part ?? "");
}
}

return completion;
};
} else {
if (!this.batchClient) {
const options = this.apiUrl ? { apiUrl: this.apiUrl } : undefined;
this.batchClient = new AnthropicApi(this.anthropicApiKey, options);
this.batchClient = new AnthropicApi({
...options,
apiKey: this.anthropicApiKey,
});
}
asyncCallerOptions = { signal: options.signal };
makeCompletionRequest = async () =>
this.batchClient
.complete(request, {
signal: options.signal,
})
// eslint-disable-next-line @typescript-eslint/no-explicit-any
.catch((e: any) => {
console.log(e);
// Anthropic doesn't actually throw JavaScript error objects at the moment.
// We convert the error so the async caller can recognize it correctly.
if (e?.type === "aborted") {
throw new Error(`${e.name}: ${e.message}`);
}
throw e;
});
this.batchClient.completions.create({ ...request });
}
return this.caller.call(makeCompletionRequest);
return this.caller.callWithOptions(
asyncCallerOptions,
makeCompletionRequest
);
}

_llmType() {
Expand Down
29 changes: 27 additions & 2 deletions langchain/src/chat_models/tests/chatanthropic.int.test.ts
Expand Up @@ -39,12 +39,16 @@ test("Test ChatAnthropic Generate with a signal in call options", async () => {
modelName: "claude-instant-v1",
});
const controller = new AbortController();
const message = new HumanMessage("Hello!");
const message = new HumanMessage(
"How is your day going? Be extremely verbose!"
);
await expect(() => {
const res = chat.generate([[message], [message]], {
signal: controller.signal,
});
controller.abort();
setTimeout(() => {
controller.abort();
}, 500);
return res;
}).rejects.toThrow();
}, 5000);
Expand Down Expand Up @@ -187,3 +191,24 @@ test("ChatAnthropic, Anthropic apiUrl set manually via constructor", async () =>
const res = await chat.call([message]);
console.log({ res });
});

test("ChatAnthropic, Claude V2", async () => {
const chat = new ChatAnthropic({
modelName: "claude-2",
temperature: 0,
});

const chatPrompt = ChatPromptTemplate.fromPromptMessages([
HumanMessagePromptTemplate.fromTemplate(`Hi, my name is Joe!`),
AIMessagePromptTemplate.fromTemplate(`Nice to meet you, Joe!`),
HumanMessagePromptTemplate.fromTemplate("{text}"),
]);

const responseA = await chat.generatePrompt([
await chatPrompt.formatPromptValue({
text: "What did I just say my name was?",
}),
]);

console.log(responseA.generations);
});
2 changes: 1 addition & 1 deletion test-exports-vercel/package.json
Expand Up @@ -15,7 +15,7 @@
"eslint": "8.37.0",
"eslint-config-next": "13.3.0",
"langchain": "workspace:*",
"next": "13.3.0",
"next": "13.4.9",
"react": "18.2.0",
"react-dom": "18.2.0",
"typescript": "^5.0.0"
Expand Down

1 comment on commit 8da80b1

@vercel
Copy link

@vercel vercel bot commented on 8da80b1 Jul 11, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.