Skip to content

Commit

Permalink
Feature: Add handleEvent callback (#2025)
Browse files Browse the repository at this point in the history
* add callback to get function call

* use camel case in function parameter

* introduce handleEvent callback

* change param name in callbacks test function

* Extend handleLLMNewToken to accept function calls events

* Use built in types

* Fix build

* Backwards compatibility fixes

---------

Co-authored-by: jacoblee93 <jacoblee93@gmail.com>
  • Loading branch information
mgce and jacoblee93 committed Aug 25, 2023
1 parent 4d5b541 commit 1c0bdae
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 11 deletions.
10 changes: 9 additions & 1 deletion langchain/src/callbacks/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import {
AgentFinish,
BaseMessage,
ChainValues,
ChatGenerationChunk,
GenerationChunk,
LLMResult,
} from "../schema/index.js";
import {
Expand Down Expand Up @@ -39,6 +41,11 @@ export interface NewTokenIndices {
completion: number;
}

// TODO: Add all additional callback fields here
export type HandleLLMNewTokenCallbackFields = {
chunk?: GenerationChunk | ChatGenerationChunk;
};

/**
* Abstract class that provides a set of optional methods that can be
* overridden in derived classes to handle various events during the
Expand Down Expand Up @@ -73,7 +80,8 @@ abstract class BaseCallbackHandlerMethodsClass {
idx: NewTokenIndices,
runId: string,
parentRunId?: string,
tags?: string[]
tags?: string[],
fields?: HandleLLMNewTokenCallbackFields
): Promise<void> | void;

/**
Expand Down
8 changes: 6 additions & 2 deletions langchain/src/callbacks/handlers/tracer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { Serialized } from "../../load/serializable.js";
import {
BaseCallbackHandler,
BaseCallbackHandlerInput,
HandleLLMNewTokenCallbackFields,
NewTokenIndices,
} from "../base.js";
import { Document } from "../../document.js";
Expand Down Expand Up @@ -439,7 +440,10 @@ export abstract class BaseTracer extends BaseCallbackHandler {
async handleLLMNewToken(
token: string,
idx: NewTokenIndices,
runId: string
runId: string,
_parentRunId?: string,
_tags?: string[],
fields?: HandleLLMNewTokenCallbackFields
): Promise<void> {
const run = this.runMap.get(runId);
if (!run || run?.run_type !== "llm") {
Expand All @@ -448,7 +452,7 @@ export abstract class BaseTracer extends BaseCallbackHandler {
run.events.push({
name: "new_token",
time: Date.now(),
kwargs: { token, idx },
kwargs: { token, idx, chunk: fields?.chunk },
});
await this.onLLMNewToken?.(run);
}
Expand Down
12 changes: 9 additions & 3 deletions langchain/src/callbacks/manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {
import {
BaseCallbackHandler,
CallbackHandlerMethods,
HandleLLMNewTokenCallbackFields,
NewTokenIndices,
} from "./base.js";
import { ConsoleCallbackHandler } from "./handlers/console.js";
Expand Down Expand Up @@ -197,7 +198,11 @@ export class CallbackManagerForLLMRun
{
async handleLLMNewToken(
token: string,
idx: NewTokenIndices = { prompt: 0, completion: 0 }
idx?: NewTokenIndices,
_runId?: string,
_parentRunId?: string,
_tags?: string[],
fields?: HandleLLMNewTokenCallbackFields
): Promise<void> {
await Promise.all(
this.handlers.map((handler) =>
Expand All @@ -206,10 +211,11 @@ export class CallbackManagerForLLMRun
try {
await handler.handleLLMNewToken?.(
token,
idx,
idx ?? { prompt: 0, completion: 0 },
this.runId,
this._parentRunId,
this.tags
this.tags,
fields
);
} catch (err) {
console.error(
Expand Down
31 changes: 26 additions & 5 deletions langchain/src/chat_models/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,17 @@ export class ChatOpenAI
});
yield generationChunk;
// eslint-disable-next-line no-void
void runManager?.handleLLMNewToken(generationChunk.text ?? "");
void runManager?.handleLLMNewToken(
generationChunk.text ?? "",
{
prompt: 0,
completion: choice.index,
},
undefined,
undefined,
undefined,
{ chunk: generationChunk }
);
}
}

Expand Down Expand Up @@ -598,16 +608,27 @@ export class ChatOpenAI
choice.message.function_call.arguments +=
part.delta?.function_call?.arguments ?? "";
}
// eslint-disable-next-line no-void

const chunk = _convertDeltaToMessageChunk(
part.delta,
"assistant"
);
const generationChunk = new ChatGenerationChunk({
message: chunk,
text: chunk.content,
});

void runManager?.handleLLMNewToken(
part.delta?.content ?? "",
{
prompt: options.promptIndex ?? 0,
completion: part.index,
}
},
undefined,
undefined,
undefined,
{ chunk: generationChunk }
);
// TODO we don't currently have a callback method for
// sending the function call arguments
}
}
// when all messages are finished, resolve
Expand Down

1 comment on commit 1c0bdae

@vercel
Copy link

@vercel vercel bot commented on 1c0bdae Aug 25, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.