From 719a1311b66a95fc18ccd3d673459e7bd5eadbea Mon Sep 17 00:00:00 2001 From: Ninja <142054318+ninjamaster1337@users.noreply.github.com> Date: Fri, 10 Nov 2023 19:37:47 -0600 Subject: [PATCH] Fix delta role error when using custom LLM (#3223) * Fix delta role error when using custom LLM This addresses a delta chunk issue that happens when you use a custom baseURL on the openai chat model. Some models like llama 2 on openrouter may have a empty delta and result in a undefined error. Example error: ``` Cannot read properties of undefined (reading 'role')", "error.stack": "TypeError: Cannot read properties of undefined (reading 'role')\n at _convertDeltaToMessageChunk (/home/ubuntu/node_modules/langchain/dist/chat_models/openai.cjs:72:24)\n at ChatOpenAI._streamResponseChunks (/home/ubuntu/node_modules/langchain/dist/chat_models/openai.cjs:409:27)\n at process.processTicksAndRejections (node:internal/process/task_queues:95:5)\n at async ChatOpenAI._streamIterator (/home/ubuntu/node_modules/langchain/dist/chat_models/base.cjs:77:34)\n at async RunnableSequence._streamIterator (/home/ubuntu/node_modules/langchain/dist/schema/runnable/base.cjs:780:30)\n at async Object.pull (/home/ubuntu/node_modules/langchain/dist/util/stream.cjs:73:41) ``` on _convertDeltaToMessageChunk, openai works fine but when you start using models from openrouter or other baseURL llms, it doesnt take in account for empty deltas and assumes theres always a value. I was able to test with a custom baseURL and normal openai that this works with no errors on streaming after this tweak. * add default role * add ? on chunk text for streaming delta * Simpler fix * Revert --------- Co-authored-by: jacoblee93 --- langchain/src/chat_models/openai.ts | 3 +++ 1 file changed, 3 insertions(+) diff --git a/langchain/src/chat_models/openai.ts b/langchain/src/chat_models/openai.ts index cbda733aea9..7e3f72c3e8f 100644 --- a/langchain/src/chat_models/openai.ts +++ b/langchain/src/chat_models/openai.ts @@ -449,6 +449,9 @@ export class ChatOpenAI< } const { delta } = choice; + if (!delta) { + continue; + } const chunk = _convertDeltaToMessageChunk(delta, defaultRole); defaultRole = delta.role ?? defaultRole; const newTokenIndices = {