Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 22 additions & 1 deletion examples/openui-chat/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,14 @@ This is an [OpenUI](https://openui.com) Agent Chat project bootstrapped with [`o

## Getting Started

First, run the development server:
First, create a `.env` file:

```env
OPENAI_API_KEY=sk-your-key-here
OPENAI_MODEL=gpt-5.4
```

Then run the development server:

```bash
npm run dev
Expand All @@ -19,6 +26,20 @@ Open [http://localhost:3000](http://localhost:3000) with your browser to see the
You can start editing the page by modifying `src/app/api/route.ts` and improving your agent
by adding system prompts or tools.

## OpenAI-compatible APIs

The example chat app supports OpenAI-compatible APIs through the optional `OPENAI_BASE_URL` environment variable.

Example using OpenRouter:

```env
OPENAI_API_KEY=sk-or-v1-...
OPENAI_BASE_URL=https://openrouter.ai/api/v1
OPENAI_MODEL=openai/gpt-4.1-mini
```

This also works with other OpenAI-compatible providers.

## Learn More

To learn more about OpenUI, take a look at the following resources:
Expand Down
100 changes: 87 additions & 13 deletions examples/openui-chat/src/app/api/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ function calculate({ expression }: { expression: string }): Promise<string> {
setTimeout(() => {
try {
const sanitized = expression.replace(/[^0-9+\-*/().%\s,Math.sqrtpowabsceilfloorround]/g, "");

const result = new Function(`return (${sanitized})`)();
resolve(JSON.stringify({ expression, result: Number(result) }));
} catch {
Expand Down Expand Up @@ -201,7 +201,9 @@ export async function POST(req: NextRequest) {

const client = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
baseURL: process.env.OPENAI_BASE_URL || undefined,
});

const MODEL = process.env.OPENAI_MODEL || "gpt-5.4";

// eslint-disable-next-line @typescript-eslint/no-explicit-any
Expand All @@ -211,9 +213,11 @@ export async function POST(req: NextRequest) {
if (m.role === "assistant" && m.tool_calls?.length) {
// Strip tool_calls (runTools re-runs the agentic loop server-side)
// but preserve content so prior replies remain in context.
const { tool_calls: _tc, ...rest } = m; // eslint-disable-line @typescript-eslint/no-unused-vars
const { tool_calls: _tc, ...rest } = m;

return rest;
}

return m;
});

Expand All @@ -229,15 +233,32 @@ export async function POST(req: NextRequest) {
start(controller) {
const enqueue = (data: Uint8Array) => {
if (controllerClosed) return;
try { controller.enqueue(data); } catch { /* already closed */ }

try {
controller.enqueue(data);
} catch {
// already closed
}
};

const close = () => {
if (controllerClosed) return;

controllerClosed = true;
try { controller.close(); } catch { /* already closed */ }

try {
controller.close();
} catch {
// already closed
}
};

const pendingCalls: Array<{ id: string; name: string; arguments: string }> = [];
const pendingCalls: Array<{
id: string;
name: string;
arguments: string;
}> = [];

let callIdx = 0;
let resultIdx = 0;

Expand All @@ -246,35 +267,78 @@ export async function POST(req: NextRequest) {
model: MODEL,
messages: chatMessages,
tools,
stream: true
stream: true,
});

// eslint-disable-next-line @typescript-eslint/no-explicit-any
runner.on("functionToolCall", (fc: any) => {
const id = `tc-${callIdx}`;
pendingCalls.push({ id, name: fc.name, arguments: fc.arguments });
enqueue(sseToolCallStart(encoder, { id, function: { name: fc.name } }, callIdx));

pendingCalls.push({
id,
name: fc.name,
arguments: fc.arguments,
});

enqueue(
sseToolCallStart(
encoder,
{
id,
function: {
name: fc.name,
},
},
callIdx,
),
);

callIdx++;
});

runner.on("functionToolCallResult", (result: string) => {
const tc = pendingCalls[resultIdx];

if (tc) {
enqueue(sseToolCallArgs(encoder, { id: tc.id, function: { arguments: tc.arguments } }, result, resultIdx));
enqueue(
sseToolCallArgs(
encoder,
{
id: tc.id,
function: {
arguments: tc.arguments,
},
},
result,
resultIdx,
),
);
}

resultIdx++;
});

// eslint-disable-next-line @typescript-eslint/no-explicit-any
runner.on("chunk", (chunk: any) => {
const choice = chunk.choices?.[0];
const delta = choice?.delta;

if (!delta) return;

if (delta.content) {
enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}\n\n`));
enqueue(
encoder.encode(
`data: ${JSON.stringify(chunk)}\n\n`,
),
);
}

if (choice?.finish_reason === "stop") {
enqueue(encoder.encode(`data: ${JSON.stringify(chunk)}\n\n`));
enqueue(
encoder.encode(
`data: ${JSON.stringify(chunk)}\n\n`,
),
);
}
});

Expand All @@ -285,9 +349,19 @@ export async function POST(req: NextRequest) {

// eslint-disable-next-line @typescript-eslint/no-explicit-any
runner.on("error", (err: any) => {
const msg = err instanceof Error ? err.message : "Stream error";
const msg =
err instanceof Error
? err.message
: "Stream error";

console.error("Chat route error:", msg);
enqueue(encoder.encode(`data: ${JSON.stringify({ error: msg })}\n\n`));

enqueue(
encoder.encode(
`data: ${JSON.stringify({ error: msg })}\n\n`,
),
);

close();
});
},
Expand Down
Loading