Skip to content

Commit

Permalink
Improve prompt/completion logging (#506)
Browse files Browse the repository at this point in the history
Currently the prompt/completion log events include debug-serialized JSX.
But this is a) difficult to extract data from and b) unnecessarily
generic because we're already working with a very finite set of
conversation components.

This changes the structure to include the final text and props
(sans-`children`) instead. Additionally, we include a `getElement()`
function so that consumers have the option to access the element itself,
without it being JSON-serialized by default.
  • Loading branch information
petersalas committed Nov 20, 2023
1 parent 16869b1 commit 7325135
Show file tree
Hide file tree
Showing 4 changed files with 74 additions and 13 deletions.
2 changes: 1 addition & 1 deletion packages/ai-jsx/package.json
Expand Up @@ -4,7 +4,7 @@
"repository": "fixie-ai/ai-jsx",
"bugs": "https://github.com/fixie-ai/ai-jsx/issues",
"homepage": "https://ai-jsx.com",
"version": "0.27.1",
"version": "0.28.0",
"volta": {
"extends": "../../package.json"
},
Expand Down
49 changes: 42 additions & 7 deletions packages/ai-jsx/src/core/conversation.tsx
Expand Up @@ -238,6 +238,47 @@ function toConversationMessages(partialRendering: AI.PartiallyRendered[]): Conve
});
}

async function loggableMessage(
message: ConversationMessage,
render: AI.RenderContext['render'],
cost?: (message: ConversationMessage, render: AI.ComponentContext['render']) => Promise<number>
) {
let textPromise: PromiseLike<string> | undefined = undefined;
switch (message.type) {
case 'user':
case 'assistant':
case 'system':
textPromise = render(message.element);
break;
case 'functionResponse':
textPromise = render(message.element.props.children);
break;
case 'functionCall':
break;
default: {
const neverMessage: never = message;
throw new Error(`Unexpected message type ${(neverMessage as any).type}`);
}
}

const costPromise = cost?.(message, render);

const { children, ...propsWithoutChildren } = {
children: undefined,
...message.element.props,
};
const loggableProps: Record<string, Jsonifiable> = propsWithoutChildren;

return {
// Use a function so that it doesn't serialize to JSON, but can be accessed if needed.
getElement: () => message.element,
type: message.type,
props: loggableProps,
text: await textPromise,
cost: await costPromise,
};
}

/** @hidden */
export async function renderToConversation(
conversation: AI.Node,
Expand All @@ -258,13 +299,7 @@ export async function renderToConversation(
const messages = toConversationMessages(await render(conversationToUse, { stop: isConversationalComponent }));

if (logger && logType) {
const loggableMessages = await Promise.all(
messages.map(async (m) => ({
element: debug(m.element, true),
...(cost && { cost: await cost(m, render) }),
}))
);

const loggableMessages = await Promise.all(messages.map((m) => loggableMessage(m, render, cost)));
logger.setAttribute(`ai.jsx.${logType}`, JSON.stringify(loggableMessages));
logger.info({ [logType]: { messages: loggableMessages } }, `Got ${logType} conversation`);
}
Expand Down
6 changes: 5 additions & 1 deletion packages/docs/docs/changelog.md
@@ -1,6 +1,10 @@
# Changelog

## 0.27.1
## 0.28.0

- Improved completion/prompt logging to include explicit message text

## [0.27.1](https://github.com/fixie-ai/ai-jsx/commit/b5e436615df37c7b68986059892d6043b684df18)

- Fix bug where memoized components could duplicate content
- Refactor `<Converse>` to allow rounds to progress in parallel when content allows
Expand Down
30 changes: 26 additions & 4 deletions packages/examples/test/core/completion.tsx
Expand Up @@ -104,8 +104,14 @@ describe('OpenTelemetry', () => {
"ai.jsx.tree": ""opentel response from OpenAI"",
},
{
"ai.jsx.completion": "[{"element":"<AssistantMessage @memoizedId=3>\\n {\\"opentel response from OpenAI\\"}\\n</AssistantMessage>","cost":10}]",
"ai.jsx.prompt": "[{"element":"<UserMessage @memoizedId=1>\\n {\\"hello\\"}\\n</UserMessage>","cost":4}]",
"ai.jsx.memoized": true,
"ai.jsx.result": "opentel response from OpenAI",
"ai.jsx.tag": "Stream",
"ai.jsx.tree": ""opentel response from OpenAI"",
},
{
"ai.jsx.completion": "[{"type":"assistant","props":{},"text":"opentel response from OpenAI","cost":10}]",
"ai.jsx.prompt": "[{"type":"user","props":{},"text":"hello","cost":4}]",
"ai.jsx.result": "opentel response from OpenAI",
"ai.jsx.tag": "OpenAIChatModel",
"ai.jsx.tree": "<OpenAIChatModel model="gpt-3.5-turbo">
Expand Down Expand Up @@ -171,6 +177,14 @@ describe('OpenTelemetry', () => {
"ai.jsx.tag": "UserMessage",
"ai.jsx.tree": "<UserMessage @memoizedId=1>
{"hello"}
</UserMessage>",
},
{
"ai.jsx.memoized": true,
"ai.jsx.result": "hello",
"ai.jsx.tag": "UserMessage",
"ai.jsx.tree": "<UserMessage @memoizedId=1>
{"hello"}
</UserMessage>",
},
{
Expand Down Expand Up @@ -218,6 +232,14 @@ describe('OpenTelemetry', () => {
"ai.jsx.tag": "Stream",
"ai.jsx.tree": ""opentel response from OpenAI"",
},
{
"ai.jsx.memoized": true,
"ai.jsx.result": "opentel response from OpenAI",
"ai.jsx.tag": "AssistantMessage",
"ai.jsx.tree": "<AssistantMessage @memoizedId=3>
{"opentel response from OpenAI"}
</AssistantMessage>",
},
{
"ai.jsx.result": "opentel response from OpenAI",
"ai.jsx.tag": "AssistantMessage",
Expand All @@ -226,8 +248,8 @@ describe('OpenTelemetry', () => {
</AssistantMessage>",
},
{
"ai.jsx.completion": "[{"element":"<AssistantMessage @memoizedId=3>\\n {\\"opentel response from OpenAI\\"}\\n</AssistantMessage>","cost":10}]",
"ai.jsx.prompt": "[{"element":"<UserMessage @memoizedId=1>\\n {\\"hello\\"}\\n</UserMessage>","cost":4}]",
"ai.jsx.completion": "[{"type":"assistant","props":{},"text":"opentel response from OpenAI","cost":10}]",
"ai.jsx.prompt": "[{"type":"user","props":{},"text":"hello","cost":4}]",
"ai.jsx.result": "opentel response from OpenAI",
"ai.jsx.tag": "OpenAIChatModel",
"ai.jsx.tree": "<OpenAIChatModel model="gpt-3.5-turbo">
Expand Down

4 comments on commit 7325135

@vercel
Copy link

@vercel vercel bot commented on 7325135 Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-voice – ./packages/voice

ai-jsx-voice-fixie-ai.vercel.app
ai-jsx-voice-git-main-fixie-ai.vercel.app
ai-jsx-voice.vercel.app
voice.fixie.ai

@vercel
Copy link

@vercel vercel bot commented on 7325135 Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-docs – ./packages/docs

ai-jsx-docs-git-main-fixie-ai.vercel.app
ai-jsx-docs.vercel.app
ai-jsx-docs-fixie-ai.vercel.app
docs.ai-jsx.com

@vercel
Copy link

@vercel vercel bot commented on 7325135 Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-nextjs-demo – ./packages/nextjs-demo

ai-jsx-nextjs-demo-git-main-fixie-ai.vercel.app
ai-jsx-nextjs-demo.vercel.app
ai-jsx-nextjs-demo-fixie-ai.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 7325135 Nov 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-tutorial-nextjs – ./packages/tutorial-nextjs

ai-jsx-tutorial-nextjs.vercel.app
ai-jsx-tutorial-nextjs-fixie-ai.vercel.app
ai-jsx-tutorial-nextjs-git-main-fixie-ai.vercel.app

Please sign in to comment.