Skip to content

Commit

Permalink
Improve error handling (#149)
Browse files Browse the repository at this point in the history
Add error codes and more legibility / CTAs to error output.
  • Loading branch information
NickHeiner committed Jun 27, 2023
1 parent e3fd97c commit 92c34d9
Show file tree
Hide file tree
Showing 22 changed files with 267 additions and 55 deletions.
2 changes: 1 addition & 1 deletion packages/ai-jsx/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"repository": "fixie-ai/ai-jsx",
"bugs": "https://github.com/fixie-ai/ai-jsx/issues",
"homepage": "https://ai-jsx.com",
"version": "0.5.5",
"version": "0.5.6",
"volta": {
"extends": "../../package.json"
},
Expand Down
12 changes: 11 additions & 1 deletion packages/ai-jsx/src/batteries/constrained-output.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import * as AI from '../index.js';
import { ChatCompletion, SystemMessage, AssistantMessage, UserMessage } from '../core/completion.js';
import yaml from 'js-yaml';
import { AIJSXError, ErrorCode } from '../core/errors.js';

interface ValidationResult {
success: boolean;
Expand Down Expand Up @@ -141,7 +142,16 @@ async function ObjectFormatChatCompletion(
);
}

throw new Error(`The model did not produce a valid ${typeName} object, even after ${retries} attempts.`);
throw new AIJSXError(
`The model did not produce a valid ${typeName} object, even after ${retries} attempts.`,
ErrorCode.ModelOutputDidNotMatchConstraint,
'runtime',
{
typeName,
retries,
output,
}
);
}

function isJsonString(str: string): ValidationResult {
Expand Down
29 changes: 23 additions & 6 deletions packages/ai-jsx/src/batteries/docs.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import { Jsonifiable } from 'type-fest';
import { ChatCompletion, SystemMessage, UserMessage } from '../core/completion.js';
import { Node } from '../index.js';
import { getEnvVar } from '../lib/util.js';
import { AIJSXError, ErrorCode } from '../core/errors.js';

/**
* A raw document loaded from an arbitrary source that has not yet been parsed.
Expand Down Expand Up @@ -70,7 +71,12 @@ function defaultParser<DocumentMetadata extends Jsonifiable = Jsonifiable>(
return Promise.resolve({ pageContent: [content], name: raw.name });
// TODO: Add support for other mime types.
}
throw new Error(`Unsupported mime type: ${raw.mimeType}`);
throw new AIJSXError(
`Unsupported mime type: ${raw.mimeType}`,
ErrorCode.UnsupportedMimeType,
'user',
_.pick(raw, ['mimeType'])
);
}

/** A non-overlapping subdivision of a corpus' documents used for loading. */
Expand Down Expand Up @@ -419,9 +425,14 @@ export interface CorpusStats {
loadingError?: Error;
}

class CorpusNotReadyError extends Error {
class CorpusNotReadyError extends AIJSXError {
constructor(state: CorpusLoadingState) {
super(`Corpus is not ready. It's in state ${state}. Call load() to load documents.`);
super(
`Corpus is not ready. It's in state ${state}. Call load() to load documents.`,
ErrorCode.CorpusNotReady,
'user',
{ state }
);
}
}

Expand Down Expand Up @@ -610,8 +621,10 @@ export class FixieCorpus<ChunkMetadata extends Jsonifiable = Jsonifiable> implem
if (!fixieApiKey) {
this.fixieApiKey = getEnvVar('FIXIE_API_KEY', false);
if (!this.fixieApiKey) {
throw new Error(
'You must provide a Fixie API key to access Fixie corpora. Find yours at https://app.fixie.ai/profile.'
throw new AIJSXError(
'You must provide a Fixie API key to access Fixie corpora. Find yours at https://app.fixie.ai/profile.',
ErrorCode.MissingFixieAPIKey,
'user'
);
}
}
Expand All @@ -628,7 +641,11 @@ export class FixieCorpus<ChunkMetadata extends Jsonifiable = Jsonifiable> implem
body: JSON.stringify({ query_string: query, chunk_limit: params?.limit }),
});
if (response.status !== 200) {
throw new Error(`Fixie API returned status ${response.status}: ${await response.text()}`);
throw new AIJSXError(
`Fixie API returned status ${response.status}: ${await response.text()}`,
ErrorCode.FixieStatusNotOk,
'runtime'
);
}
const apiResults = await response.json();
return apiResults.chunks.map((result: any) => ({
Expand Down
15 changes: 12 additions & 3 deletions packages/ai-jsx/src/batteries/use-tools.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import { ChatCompletion, SystemMessage, UserMessage } from '../core/completion.j
import { Node, RenderContext } from '../index.js';
import z, { ZodTypeAny } from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema';
import { AIJSXError, ErrorCode } from '../core/errors.js';

const toolChoiceSchema = z.object({
nameOfTool: z.string(),
Expand Down Expand Up @@ -54,13 +55,21 @@ async function InvokeTool(
}
toolChoiceResult = toolChoiceSchema.parse(parsedJson);
} catch (e: any) {
const error = new Error(
`Failed to parse LLM output into a tool choice: ${e.message}. Output: ${toolChoiceLLMOutput}`
const error = new AIJSXError(
`Failed to parse LLM output into a tool choice: ${e.message}. Output: ${toolChoiceLLMOutput}`,
ErrorCode.ModelOutputCouldNotBeParsedForTool,
'runtime',
{ toolChoiceLLMOutput }
);
throw error;
}
if (!(toolChoiceResult.nameOfTool in props.tools)) {
throw new Error(`LLM hallucinated a tool that does not exist: ${toolChoiceResult.nameOfTool}.`);
throw new AIJSXError(
`LLM hallucinated a tool that does not exist: ${toolChoiceResult.nameOfTool}.`,
ErrorCode.ModelHallucinatedTool,
'runtime',
{ toolChoiceResult }
);
}
const tool = props.tools[toolChoiceResult.nameOfTool];
const toolResult = await tool.func(...toolChoiceResult.parameters);
Expand Down
17 changes: 13 additions & 4 deletions packages/ai-jsx/src/core/completion.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import * as AI from '../index.js';
import { Node, Component, RenderContext } from '../index.js';
import { AIJSXError, ErrorCode } from '../core/errors.js';
import { OpenAIChatModel, OpenAICompletionModel } from '../lib/openai.js';
import { getEnvVar } from '../lib/util.js';

Expand Down Expand Up @@ -64,11 +65,15 @@ function AutomaticCompletionModel({ children, ...props }: ModelPropsWithChildren
);
}

throw new Error(`No completion model was specified. To fix this, do one of the following:
throw new AIJSXError(
`No completion model was specified. To fix this, do one of the following:
1. Set the OPENAI_API_KEY or REACT_APP_OPENAI_API_KEY environment variable.
2. Set the OPENAI_API_BASE or REACT_APP_OPENAI_API_BASE environment variable.
3. use an explicit CompletionProvider component.`);
3. use an explicit CompletionProvider component.`,
ErrorCode.MissingCompletionModel,
'user'
);
}

/**
Expand All @@ -84,11 +89,15 @@ function AutomaticChatModel({ children, ...props }: ModelPropsWithChildren) {
</OpenAIChatModel>
);
}
throw new Error(`No chat model was specified. To fix this, do one of the following:
throw new AIJSXError(
`No chat model was specified. To fix this, do one of the following:
1. Set the OPENAI_API_KEY or REACT_APP_OPENAI_API_KEY environment variable.
2. Set the OPENAI_API_BASE or REACT_APP_OPENAI_API_BASE environment variable.
3. use an explicit ChatProvider component.`);
3. use an explicit ChatProvider component.`,
ErrorCode.MissingChatModel,
'user'
);
}

/** The default context used by {@link CompletionProvider}. */
Expand Down
25 changes: 20 additions & 5 deletions packages/ai-jsx/src/core/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

import { v4 as uuidv4 } from 'uuid';
import { BoundLogger, NoOpLogImplementation, LogImplementation, Logger, PinoLogger } from './log.js';
import { AIJSXError, ErrorCode } from '../core/errors.js';

/** A context that is used to render an AI.JSX component. */
export interface ComponentContext extends RenderContext {
Expand Down Expand Up @@ -361,7 +362,11 @@ async function* renderStream(
}

if (!('then' in renderable)) {
throw new Error(`AI.JSX bug: unexpected renderable type: ${JSON.stringify(renderable)}`);
throw new AIJSXError(
`Unexpected renderable type: ${JSON.stringify(renderable)}`,
ErrorCode.UnrenderableType,
'ambiguous'
);
}
// N.B. Because RenderResults are both AsyncIterable _and_ PromiseLikes, this means that an async component that returns the result
// of a render call will not stream; it will effectively be `await`ed by default.
Expand Down Expand Up @@ -422,8 +427,10 @@ function createRenderContextInternal(render: StreamRenderer, userContext: Record
then: (onFulfilled?, onRejected?) => {
if (promiseResult === null) {
if (hasReturnedGenerator) {
throw new Error(
"The RenderResult's generator must be fully exhausted before you can await the final result."
throw new AIJSXError(
"The RenderResult's generator must be fully exhausted before you can await the final result.",
ErrorCode.GeneratorMustBeExhausted,
'ambiguous'
);
}

Expand All @@ -444,9 +451,17 @@ function createRenderContextInternal(render: StreamRenderer, userContext: Record

[Symbol.asyncIterator]: () => {
if (hasReturnedGenerator) {
throw new Error("The RenderResult's generator was already returned and cannot be returned again.");
throw new AIJSXError(
"The RenderResult's generator was already returned and cannot be returned again.",
ErrorCode.GeneratorCannotBeUsedTwice,
'ambiguous'
);
} else if (promiseResult !== null) {
throw new Error('The RenderResult was already awaited and can no longer be used as an iterable.');
throw new AIJSXError(
'The RenderResult was already awaited and can no longer be used as an iterable.',
ErrorCode.GeneratorCannotBeUsedAsIterableAfterAwaiting,
'ambiguous'
);
}

hasReturnedGenerator = true;
Expand Down
88 changes: 86 additions & 2 deletions packages/ai-jsx/src/core/errors.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,97 @@
import _ from 'lodash';
import { Jsonifiable } from 'type-fest';

export enum ErrorCode {
MissingCompletionModel = 1000,
MissingChatModel = 1001,
MissingImageModel = 1002,
UnrenderableType = 1003,
GeneratorMustBeExhausted = 1004,
GeneratorCannotBeUsedTwice = 1005,
GeneratorCannotBeUsedAsIterableAfterAwaiting = 1006,
UnexpectedRenderType = 1007,
LogitBiasBadInput = 1008,
ChatCompletionMissingChildren = 1009,
ChatCompletionUnexpectedChild = 1010,
ImageBadDimensions = 1011,
ModelOutputDidNotMatchUIShape = 1012,
AIJSXEndpointFailed = 1013,
AIJSXEndpointHadEmptyResponse = 1014,

ModelOutputDidNotMatchConstraint = 2000,

UnsupportedMimeType = 2001,
MissingFixieAPIKey = 2002,
CorpusNotReady = 2003,
FixieStatusNotOk = 2004,

ModelOutputCouldNotBeParsedForTool = 2005,
ModelHallucinatedTool = 2006,
}

export type ErrorBlame =
/** An error that is expected to occur, like a network failure. */
| 'runtime'
/** An error that's most likely caused by the user. */
| 'user'
/** An error that's most likely the fault of AI.JSX itself. */
| 'internal'
/** An error where it's not clear who caused it. */
| 'ambiguous';

/**
* A generic error thrown by AI.JSX. It could be a user error, runtime error, or internal error.
*/
export class AIJSXError extends Error {
constructor(
message: string,
public readonly code: ErrorCode,
public readonly blame: ErrorBlame,
public readonly metadata: Jsonifiable = {}
) {
super(message);
}

private messageOfErrorKind() {
switch (this.blame) {
case 'runtime':
return "This is a runtime error that's expected to occur with some frequency. It may go away on retry. It may be made more likely by errors in your code, or in AI.JSX.";
case 'user':
return 'This may be due to a mistake in your code.';
case 'internal':
return 'This is most likely a bug in AI.JSX. Bug reports appreciated. :)';
case 'ambiguous':
return "It's unclear whether this was caused by a bug in AI.JSX, in your code, or is an expected runtime error.";
}
}

private formattedMessage() {
return _.last(this.message) === '.' ? this.message : `${this.message}.`;
}

toString() {
return `AI.JSX(${this.code}): ${this.formattedMessage()}
${this.messageOfErrorKind()}
Need help?
* Discord: https://discord.com/channels/1065011484125569147/1121125525142904862
* Docs: https://docs.ai-jsx.com/
* GH: https://github.com/fixie-ai/ai-jsx/issues`;
}
}

/**
* Represents an error that occurs while invoking an HTTP request to a Large Language Model.
*/
export class HttpError extends Error {
export class HttpError extends AIJSXError {
constructor(
message: string,
readonly statusCode: number,
readonly errorCode: number,
readonly responseBody?: string,
readonly responseHeaders?: Record<string, string>
) {
super(message || `HTTP request failed with status code ${statusCode}`);
super(message || `HTTP request failed with status code ${statusCode}`, errorCode, 'runtime');
}
}
7 changes: 5 additions & 2 deletions packages/ai-jsx/src/core/image-gen.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import * as AI from '../index.js';
import { Node, Component, RenderContext } from '../index.js';
import { AIJSXError, ErrorCode } from '../core/errors.js';
import { DalleImageGen } from '../lib/openai.js';
import { getEnvVar } from '../lib/util.js';

Expand Down Expand Up @@ -34,8 +35,10 @@ function AutomaticImageGenModel({ children, ...props }: ImageGenPropsWithChildre
return <DalleImageGen {...props}>{children}</DalleImageGen>;
}

throw new Error(
'No image generation model was specified. Set the OPENAI_API_KEY environment variable to use OpenAI or use an explicit ImageGenProvider.'
throw new AIJSXError(
'No image generation model was specified. Set the OPENAI_API_KEY environment variable to use OpenAI or use an explicit ImageGenProvider.',
ErrorCode.MissingImageModel,
'user'
);
}

Expand Down
7 changes: 6 additions & 1 deletion packages/ai-jsx/src/experimental/next/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,18 @@ import 'server-only';
import { LogImplementation } from '../../core/log.js';
import * as AI from '../../react/core.js';
import { asJsxBoundary } from '../../react/jsx-boundary.js';
import { AIJSXError, ErrorCode } from '../../core/errors.js';
export * from '../../react/core.js';

function unwrapReact(partiallyRendered: AI.PartiallyRendered): ReactModule.ReactNode {
if (AI.isElement(partiallyRendered)) {
// This should be an AI.React element.
if (partiallyRendered.tag !== AI.React) {
throw new Error('AI.jsx internal error: unwrapReact only expects to see AI.React elements or strings.');
throw new AIJSXError(
'unwrapReact only expects to see AI.React elements or strings.',
ErrorCode.UnexpectedRenderType,
'internal'
);
}

return partiallyRendered.props.children;
Expand Down
Loading

3 comments on commit 92c34d9

@vercel
Copy link

@vercel vercel bot commented on 92c34d9 Jun 27, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-docs – ./packages/docs

ai-jsx-docs-fixie-ai.vercel.app
ai-jsx-docs.vercel.app
docs.ai-jsx.com
ai-jsx-docs-git-main-fixie-ai.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 92c34d9 Jun 27, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-tutorial-nextjs – ./packages/tutorial-nextjs

ai-jsx-tutorial-nextjs.vercel.app
ai-jsx-tutorial-nextjs-fixie-ai.vercel.app
ai-jsx-tutorial-nextjs-git-main-fixie-ai.vercel.app

@vercel
Copy link

@vercel vercel bot commented on 92c34d9 Jun 27, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Successfully deployed to the following URLs:

ai-jsx-nextjs-demo – ./packages/nextjs-demo

ai-jsx-nextjs-demo-fixie-ai.vercel.app
ai-jsx-nextjs-demo.vercel.app
ai-jsx-nextjs-demo-git-main-fixie-ai.vercel.app

Please sign in to comment.