Skip to content

Commit

Permalink
fix(editor): Fix retrieving of messages from memory in chat modal (#8807
Browse files Browse the repository at this point in the history
)

Signed-off-by: Oleg Ivaniv <me@olegivaniv.com>
  • Loading branch information
OlegIvaniv authored and netroy committed Mar 6, 2024
1 parent 5266bcb commit af69dd9
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 51 deletions.
54 changes: 28 additions & 26 deletions packages/@n8n/nodes-langchain/utils/logWrapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import type { BaseDocumentLoader } from 'langchain/document_loaders/base';
import type { BaseCallbackConfig, Callbacks } from 'langchain/dist/callbacks/manager';
import { BaseLLM } from 'langchain/llms/base';
import { BaseChatMemory } from 'langchain/memory';
import type { MemoryVariables } from 'langchain/dist/memory/base';
import type { MemoryVariables, OutputValues } from 'langchain/dist/memory/base';
import { BaseRetriever } from 'langchain/schema/retriever';
import type { FormatInstructionsOptions } from 'langchain/schema/output_parser';
import { BaseOutputParser, OutputParserException } from 'langchain/schema/output_parser';
Expand Down Expand Up @@ -148,35 +148,37 @@ export function logWrapper(
arguments: [values],
})) as MemoryVariables;

const chatHistory = (response?.chat_history as BaseMessage[]) ?? response;

executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'loadMemoryVariables', response } }],
[{ json: { action: 'loadMemoryVariables', chatHistory } }],
]);
return response;
};
} else if (
prop === 'outputKey' &&
'outputKey' in target &&
target.constructor.name === 'BufferWindowMemory'
) {
connectionType = NodeConnectionType.AiMemory;
const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'chatHistory' } }],
]);
const response = target[prop];

target.chatHistory
.getMessages()
.then((messages) => {
executeFunctions.addOutputData(NodeConnectionType.AiMemory, index, [
[{ json: { action: 'chatHistory', chatHistory: messages } }],
]);
})
.catch((error: Error) => {
executeFunctions.addOutputData(NodeConnectionType.AiMemory, index, [
[{ json: { action: 'chatHistory', error } }],
]);
});
return response;
} else if (prop === 'saveContext' && 'saveContext' in target) {
return async (input: InputValues, output: OutputValues): Promise<MemoryVariables> => {
connectionType = NodeConnectionType.AiMemory;

const { index } = executeFunctions.addInputData(connectionType, [
[{ json: { action: 'saveContext', input, output } }],
]);

const response = (await callMethodAsync.call(target, {
executeFunctions,
connectionType,
currentNodeRunIndex: index,
method: target[prop],
arguments: [input, output],
})) as MemoryVariables;

const chatHistory = await target.chatHistory.getMessages();

executeFunctions.addOutputData(connectionType, index, [
[{ json: { action: 'saveContext', chatHistory } }],
]);

return response;
};
}
}

Expand Down
38 changes: 13 additions & 25 deletions packages/editor-ui/src/components/WorkflowLMChat.vue
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,10 @@ interface LangChainMessage {
};
}
interface MemoryOutput {
action: string;
chatHistory?: LangChainMessage[];
}
// TODO:
// - display additional information like execution time, tokens used, ...
// - display errors better
Expand Down Expand Up @@ -217,7 +221,10 @@ export default defineComponent({
this.messages = this.getChatMessages();
this.setNode();
setTimeout(() => this.$refs.inputField?.focus(), 0);
setTimeout(() => {
this.scrollToLatestMessage();
this.$refs.inputField?.focus();
}, 0);
},
methods: {
displayExecution(executionId: string) {
Expand Down Expand Up @@ -353,32 +360,13 @@ export default defineComponent({
memoryConnection.node,
);
const memoryOutputData = nodeResultData
?.map(
(
data,
): {
action: string;
chatHistory?: unknown[];
response?: {
sessionId?: unknown[];
};
} => get(data, ['data', NodeConnectionType.AiMemory, 0, 0, 'json'])!,
const memoryOutputData = (nodeResultData ?? [])
.map(
(data) => get(data, ['data', NodeConnectionType.AiMemory, 0, 0, 'json']) as MemoryOutput,
)
?.find((data) =>
['chatHistory', 'loadMemoryVariables'].includes(data?.action) ? data : undefined,
);
let chatHistory: LangChainMessage[];
if (memoryOutputData?.chatHistory) {
chatHistory = memoryOutputData?.chatHistory as LangChainMessage[];
} else if (memoryOutputData?.response) {
chatHistory = memoryOutputData?.response.sessionId as LangChainMessage[];
} else {
return [];
}
.find((data) => data.action === 'saveContext');
return (chatHistory || []).map((message) => {
return (memoryOutputData?.chatHistory ?? []).map((message) => {
return {
text: message.kwargs.content,
sender: last(message.id) === 'HumanMessage' ? 'user' : 'bot',
Expand Down

0 comments on commit af69dd9

Please sign in to comment.