Skip to content

Commit a6ee1f9

Browse files
committed
chore(logging): enhance rawLoggingMiddleware to log stream and generate results
1 parent 2f89f04 commit a6ee1f9

File tree

1 file changed

+24
-12
lines changed

1 file changed

+24
-12
lines changed

utils/llm/middlewares.ts

Lines changed: 24 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ import { extractReasoningMiddleware } from 'ai'
55
import { z } from 'zod'
66

77
import { nonNullable } from '../array'
8-
import { debounce } from '../debounce'
98
import { ParseFunctionCallError } from '../error'
109
import { generateRandomId } from '../id'
1110
import Logger from '../logger'
@@ -190,37 +189,50 @@ export const normalizeToolCallsMiddleware: LanguageModelV1Middleware = {
190189
export const rawLoggingMiddleware: LanguageModelV1Middleware = {
191190
wrapStream: async ({ doStream, params }) => {
192191
const log = logger.child('rawLoggingMiddleware')
192+
const text: string[] = []
193+
const reasoning: string[] = []
193194

194195
const { stream, ...rest } = await doStream()
195196

196-
log.debug('Stream started', { params })
197-
let text = ''
198-
let reasoning = ''
199-
const printLog = debounce(() => {
200-
log.debug('Stream progress', { text, reasoning })
201-
}, 2000)
202-
203197
const transformStream = new TransformStream<
204198
LanguageModelV1StreamPart,
205199
LanguageModelV1StreamPart
206200
>({
207201
transform(chunk, controller) {
208202
if (chunk.type === 'text-delta') {
209-
text += chunk.textDelta
203+
text.push(chunk.textDelta)
210204
}
211205
else if (chunk.type === 'reasoning') {
212-
reasoning += chunk.textDelta
206+
reasoning.push(chunk.textDelta)
213207
}
214-
printLog()
215208
controller.enqueue(chunk)
216209
},
210+
flush() {
211+
log.info('LLM Stream Result', {
212+
params,
213+
text: text.join(''),
214+
reasoning: reasoning.join(''),
215+
})
216+
},
217217
})
218218

219219
return {
220220
stream: stream.pipeThrough(transformStream),
221221
...rest,
222222
}
223223
},
224+
wrapGenerate: async ({ doGenerate, params }) => {
225+
const log = logger.child('rawLoggingMiddleware')
226+
227+
const result = await doGenerate()
228+
229+
log.info('LLM Generate Result', {
230+
params,
231+
result,
232+
})
233+
234+
return result
235+
},
224236
}
225237

226238
const errorResponse = /<\|channel\|>(?!\s*commentary\s+to=[a-z_.]+\s*>)[^<]+>(<\/assistant)?/gs
@@ -437,9 +449,9 @@ export const lmStudioHarmonyEncodingMiddleware: LanguageModelV1Middleware = {
437449
}
438450

439451
export const middlewares = [
440-
// rawLoggingMiddleware,
441452
normalizeToolCallsMiddleware,
442453
extractPromptBasedToolCallsMiddleware,
443454
lmStudioHarmonyEncodingMiddleware,
444455
reasoningMiddleware,
456+
rawLoggingMiddleware,
445457
]

0 commit comments

Comments
 (0)