Skip to content
This repository was archived by the owner on May 13, 2025. It is now read-only.

Commit e72aa8e

Browse files
committed
Run oai-compatible api via separate koa app on a separate port
1 parent 1b0e49f commit e72aa8e

File tree

3 files changed

+79
-55
lines changed

3 files changed

+79
-55
lines changed

server.ts

Lines changed: 14 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -27,39 +27,30 @@ const DIR_FRONTEND = path.join(__dirname, "dist");
2727
const DIR_DATA = path.join(__dirname, "data");
2828
const DIR_CUSTOM_NODES = path.join(__dirname, "custom_nodes");
2929

30-
// API setup
31-
32-
const app = new Koa();
33-
const router = new Router();
30+
ensureDirExists(DIR_DATA);
31+
ensureDirExists(DIR_CUSTOM_NODES);
3432

35-
setupResourcesApi(router, DIR_DATA, DIR_CUSTOM_NODES);
36-
setupExecutorApi(router, DIR_DATA, DIR_CUSTOM_NODES);
37-
// setupExecutorWs(router);
33+
// Server setup
3834

39-
// Setup folders
35+
const appMain = new Koa();
36+
const routerMain = new Router();
4037

41-
ensureDirExists(DIR_DATA);
42-
ensureDirExists(DIR_CUSTOM_NODES);
38+
const portOpenAi: number = argv.port_openai || 5002;
4339

44-
// Config: middleware
40+
setupResourcesApi(routerMain, DIR_DATA, DIR_CUSTOM_NODES);
41+
setupExecutorApi(routerMain, DIR_DATA, DIR_CUSTOM_NODES, portOpenAi);
4542

46-
app
43+
appMain
4744
// body parsing
48-
.use(
49-
koaBody({
50-
jsonLimit: "1gb",
51-
})
52-
)
45+
.use(koaBody({ jsonLimit: "10240gb" }))
5346
// routing
54-
.use(router.routes())
55-
.use(router.allowedMethods())
47+
.use(routerMain.routes())
48+
.use(routerMain.allowedMethods())
5649
// frontend
5750
.use(serve(DIR_FRONTEND));
5851

59-
// Config: server
60-
6152
const port: number = argv.port || 12538;
6253

63-
app.listen(port, () => {
64-
console.log(`Server started on http://localhost:${port.toString()}`);
54+
appMain.listen(port, () => {
55+
console.log(`App started on http://localhost:${port}`);
6556
});

server/executor.ts

Lines changed: 33 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -344,7 +344,8 @@ const runGraph = async (
344344
export const setupExecutorApi = (
345345
router: Router,
346346
dirData: string,
347-
dirCustomNodes: string
347+
dirCustomNodes: string,
348+
portOpenAi: number
348349
) => {
349350
// Executor connection for ping api
350351
executorStorage.subscribe((instance) => {
@@ -401,36 +402,40 @@ export const setupExecutorApi = (
401402
});
402403

403404
// Endpoints to receive messages from an OpenAI-compatible client
404-
setupOpenAiCompatibleAPI(router, async (message, checkRequestActive) => {
405-
messageQueue.push(message);
406-
addMessageToSession(message);
407-
console.log("Received message from OAI API:", message.content);
408-
409-
// Ensure the correct chain is running
410-
await runGraph(message.chainId, dirData, dirCustomNodes);
411-
412-
// If the graph did not run, something went wrong. Return null.
413-
if (!executorStorage.get()) return null;
414-
415-
// Wait for message with the assistant role to appear in the executor session
416-
let result: ChatMessage | null = null;
417-
while (checkRequestActive()) {
418-
const session = executorStorage.get();
419-
if (!session) break;
420-
421-
const messages = session.sessionMessages;
422-
if (messages.length) {
423-
const lastMessage = messages[messages.length - 1];
424-
if (lastMessage.role === "assistant") {
425-
result = lastMessage;
426-
break;
405+
// Runs as a separate app on a separate port
406+
setupOpenAiCompatibleAPI(
407+
portOpenAi,
408+
async (message, checkRequestActive) => {
409+
messageQueue.push(message);
410+
addMessageToSession(message);
411+
console.log("Received message from OAI API:", message.content);
412+
413+
// Ensure the correct chain is running
414+
await runGraph(message.chainId, dirData, dirCustomNodes);
415+
416+
// If the graph did not run, something went wrong. Return null.
417+
if (!executorStorage.get()) return null;
418+
419+
// Wait for message with the assistant role to appear in the executor session
420+
let result: ChatMessage | null = null;
421+
while (checkRequestActive()) {
422+
const session = executorStorage.get();
423+
if (!session) break;
424+
425+
const messages = session.sessionMessages;
426+
if (messages.length) {
427+
const lastMessage = messages[messages.length - 1];
428+
if (lastMessage.role === "assistant") {
429+
result = lastMessage;
430+
break;
431+
}
427432
}
428-
}
429433

430-
await new Promise((resolve) => setTimeout(resolve, 100));
434+
await new Promise((resolve) => setTimeout(resolve, 100));
435+
}
436+
return result;
431437
}
432-
return result;
433-
});
438+
);
434439

435440
// Endpoint to run graph
436441
router.post("/api/executor/run/:id", async (ctx) => {

server/openai.ts

Lines changed: 32 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,32 @@
1-
import type Router from "koa-router";
1+
import Koa from "koa";
2+
import Router from "koa-router";
3+
import { koaBody } from "koa-body";
24
import { v4 as uuid } from "uuid";
35
import mime from "mime-types";
46

57
import { ChatMessage } from "../src/data/types.ts";
68
import { MsgUtils } from "../src/util/MsgUtils.ts";
79

10+
const appOpenAi = new Koa();
11+
const routerOpenAi = new Router();
12+
13+
/**
14+
* Set up the OpenAI-compatible API.
15+
*
16+
* Runs on a separate port, to allow easy forwarding
17+
* separately from the main app for special use-cases.
18+
*
19+
* @param port
20+
* @param onMessage
21+
*/
822
export const setupOpenAiCompatibleAPI = (
9-
router: Router,
23+
port: number,
1024
onMessage: (
1125
message: ChatMessage,
1226
checkRequestActive: () => boolean
1327
) => Promise<ChatMessage | null>
1428
) => {
15-
router.post("/v1/completions", async (ctx) => {
29+
routerOpenAi.post("/v1/completions", async (ctx) => {
1630
try {
1731
const { model, prompt } = ctx.request.body;
1832

@@ -50,7 +64,7 @@ export const setupOpenAiCompatibleAPI = (
5064
}
5165
});
5266

53-
router.post("/v1/chat/completions", async (ctx) => {
67+
routerOpenAi.post("/v1/chat/completions", async (ctx) => {
5468
try {
5569
const { model, messages } = ctx.request.body;
5670

@@ -127,4 +141,18 @@ export const setupOpenAiCompatibleAPI = (
127141
ctx.status = 400;
128142
}
129143
});
144+
145+
// Set up the server itself
146+
appOpenAi
147+
// body parsing
148+
.use(koaBody({ jsonLimit: "10240gb" }))
149+
// routing
150+
.use(routerOpenAi.routes())
151+
.use(routerOpenAi.allowedMethods());
152+
153+
appOpenAi.listen(port, () => {
154+
console.log(
155+
`OpenAI-compatible API started on http://localhost:${port}`
156+
);
157+
});
130158
};

0 commit comments

Comments
 (0)