Skip to content

Commit 13efc85

Browse files
committed
feat(memory): auto-wire memoryProvider on direct agent.stream() + drop dead MEMORY_TIMEOUT_MS
1 parent ab3a2d9 commit 13efc85

2 files changed

Lines changed: 44 additions & 10 deletions

File tree

src/api/agent.ts

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -290,9 +290,6 @@ async function loadRecordedAgentOSUsage(
290290
return getRecordedAgentOSUsage(options);
291291
}
292292

293-
/** Timeout for memory operations to prevent blocking generation. */
294-
const MEMORY_TIMEOUT_MS = 5000;
295-
296293
/**
297294
* Convert HEXACO trait values (0-1) into behavioral descriptions the LLM can act on.
298295
*
@@ -473,13 +470,18 @@ export function agent(opts: AgentOptions): Agent {
473470
},
474471

475472
stream(prompt: MessageContent, extra?: Partial<GenerateTextOptions>): StreamTextResult {
476-
const streamOpts: Partial<GenerateTextOptions> = {
477-
...baseOpts,
478-
...extra,
479-
usageLedger: mergeUsageLedgerOptions(baseOpts.usageLedger, extra?.usageLedger, {
480-
source: extra?.usageLedger?.source ?? 'agent.stream',
481-
}),
482-
};
473+
const userText = typeof prompt === 'string' ? prompt : extractTextFromContent(prompt);
474+
const streamOpts: Partial<GenerateTextOptions> = applyMemoryProvider(
475+
{
476+
...baseOpts,
477+
...extra,
478+
usageLedger: mergeUsageLedgerOptions(baseOpts.usageLedger, extra?.usageLedger, {
479+
source: extra?.usageLedger?.source ?? 'agent.stream',
480+
}),
481+
},
482+
opts.memoryProvider,
483+
userText,
484+
);
483485
if (typeof prompt === 'string') {
484486
streamOpts.prompt = prompt;
485487
} else {

src/api/runtime/__tests__/agentPromptEngine.test.ts

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -219,6 +219,38 @@ describe('agent() PromptEngine/Memory/Skills integration', () => {
219219
const combined = systemMsgs.map((m: any) => m.content).join('\n');
220220
expect(combined).toContain('Memory: user likes hiking');
221221
});
222+
223+
it('calls getContext before direct agent.stream() (new in 0.2.0)', async () => {
224+
const memory = createMockMemory();
225+
const a = agent({ instructions: 'test', memoryProvider: memory });
226+
227+
const streamResult = a.stream('hello from stream');
228+
// Drain the stream to ensure generation completes
229+
for await (const _chunk of streamResult.textStream) {
230+
// consume
231+
}
232+
await streamResult.text;
233+
234+
expect(memory.getContext).toHaveBeenCalledWith(
235+
'hello from stream',
236+
expect.objectContaining({ tokenBudget: expect.any(Number) }),
237+
);
238+
});
239+
240+
it('calls observe after direct agent.stream() completes (new in 0.2.0)', async () => {
241+
const memory = createMockMemory();
242+
const a = agent({ instructions: 'test', memoryProvider: memory });
243+
244+
const streamResult = a.stream('hello from stream');
245+
for await (const _chunk of streamResult.textStream) {
246+
// consume
247+
}
248+
await streamResult.text;
249+
await new Promise((resolve) => setImmediate(resolve));
250+
251+
expect(memory.observe).toHaveBeenCalledWith('user', 'hello from stream');
252+
expect(memory.observe).toHaveBeenCalledWith('assistant', 'streamed');
253+
});
222254
});
223255

224256
describe('all three compose together', () => {

0 commit comments

Comments
 (0)