From 5017e6fd4aa71eb7b1fd267f0829039c6021dba4 Mon Sep 17 00:00:00 2001 From: Nick Heiner Date: Tue, 25 Jul 2023 18:45:02 -0400 Subject: [PATCH] Add the logger (#213) This was a painful oversight that made observability impossible for `toTextStream`. --- packages/ai-jsx/package.json | 2 +- packages/ai-jsx/src/stream/index.ts | 5 +++-- packages/docs/docs/changelog.md | 6 +++++- packages/examples/src/fastify.tsx | 15 ++++++++++++++- 4 files changed, 23 insertions(+), 5 deletions(-) diff --git a/packages/ai-jsx/package.json b/packages/ai-jsx/package.json index f40d9126d..e889c65d3 100644 --- a/packages/ai-jsx/package.json +++ b/packages/ai-jsx/package.json @@ -4,7 +4,7 @@ "repository": "fixie-ai/ai-jsx", "bugs": "https://github.com/fixie-ai/ai-jsx/issues", "homepage": "https://ai-jsx.com", - "version": "0.5.15", + "version": "0.5.16", "volta": { "extends": "../../package.json" }, diff --git a/packages/ai-jsx/src/stream/index.ts b/packages/ai-jsx/src/stream/index.ts index f1b9ec510..7228f52d8 100644 --- a/packages/ai-jsx/src/stream/index.ts +++ b/packages/ai-jsx/src/stream/index.ts @@ -1,3 +1,4 @@ +import { LogImplementation } from '../core/log.js'; import { isElement as isAIElement, Element, @@ -178,9 +179,9 @@ export function toSerializedStreamResponse( * this allows the response to be easily consumed by other frameworks (such as https://sdk.vercel.ai/) * but does not support UI components or concurrently streaming multiple parts of the tree. */ -export function toTextStream(renderable: Renderable): ReadableStream { +export function toTextStream(renderable: Renderable, logger?: LogImplementation): ReadableStream { let previousValue = ''; - const generator = createRenderContext().render(renderable, { appendOnly: true })[Symbol.asyncIterator](); + const generator = createRenderContext({ logger }).render(renderable, { appendOnly: true })[Symbol.asyncIterator](); return new ReadableStream({ async pull(controller) { const next = await generator.next(); diff --git a/packages/docs/docs/changelog.md b/packages/docs/docs/changelog.md index 99c910cab..4df854a2d 100644 --- a/packages/docs/docs/changelog.md +++ b/packages/docs/docs/changelog.md @@ -1,6 +1,10 @@ # Changelog -## 0.5.15 +## 0.5.16 + +- Update `toTextStream` to accept a `logger`, so you can now see log output when you're running AI.JSX on the server and outputting to a stream. See [AI + UI](./guides/ai-ui.md) and [Observability](./guides/observability.md). + +## [0.5.15](https://github.com/fixie-ai/ai-jsx/commit/68adddd) - Add [`MdxChatCompletion`](./guides/mdx.md), so your model calls can now output [MDX](https://mdxjs.com/) using your components. diff --git a/packages/examples/src/fastify.tsx b/packages/examples/src/fastify.tsx index bf288766c..692444446 100644 --- a/packages/examples/src/fastify.tsx +++ b/packages/examples/src/fastify.tsx @@ -4,6 +4,8 @@ import * as AI from 'ai-jsx'; import Fastify from 'fastify'; import { toTextStream } from 'ai-jsx/stream'; import { ReadableStream } from 'stream/web'; +import { pino } from 'pino'; +import { PinoLogger } from 'ai-jsx/core/log'; /** * To run this demo: @@ -20,6 +22,17 @@ const fastify = Fastify({ logger: true, }); +const pinoStdoutLogger = pino({ + name: 'ai-jsx', + level: process.env.loglevel ?? 'trace', + transport: { + target: 'pino-pretty', + options: { + colorize: true, + }, + }, +}); + function FantasyCharacter() { return ( @@ -44,7 +57,7 @@ fastify.get('/stream-sample', async (request, reply) => { ); } - const responseStream = toTextStream(); + const responseStream = toTextStream(, new PinoLogger(pinoStdoutLogger)); await sendReadableStreamToFastifyReply(reply, responseStream); });