diff --git a/.github/workflows/cortex-build.yml b/.github/workflows/cortex-build.yml index fdaed2f5f..0b736c22e 100644 --- a/.github/workflows/cortex-build.yml +++ b/.github/workflows/cortex-build.yml @@ -1,4 +1,4 @@ -name: CI Cortex CPP +name: CI Cortex Release on: push: diff --git a/cortex-js/package.json b/cortex-js/package.json index c658a60c8..2d8080df5 100644 --- a/cortex-js/package.json +++ b/cortex-js/package.json @@ -12,9 +12,9 @@ "dev": "nest dev", "build": "nest build && cpx \"cpuinfo/bin/**\" dist/bin", "build:binary": "run-script-os && cpx \"cpuinfo/bin/**\" dist/bin", - "build:binary:windows": "bun build --compile --target=bun-windows-x64 ./src/command.js --outfile cortex.exe --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage", - "build:binary:linux": "bun build --compile --target=bun-linux-x64 ./src/command.js --outfile cortex --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage", - "build:binary:macos": "bun build --compile --target=bun-darwin-arm64 ./src/command.js --outfile cortex --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage", + "build:binary:windows": "bun build --compile --target=bun-windows-x64 ./src/command.ts --outfile cortex.exe --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage", + "build:binary:linux": "bun build --compile --target=bun-linux-x64 ./src/command.ts --outfile cortex --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage", + "build:binary:macos": "bun build --compile --target=bun-darwin-arm64 ./src/command.ts --outfile cortex --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage", "format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"", "build:extensions": "run-script-os", "build:extensions:windows": "powershell -command \"$jobs = Get-ChildItem -Path './src/extensions' -Directory | ForEach-Object { Start-Job -Name ($_.Name) -ScriptBlock { param($_dir); try { Set-Location $_dir; yarn; yarn build; Write-Output 'Build successful in ' + $_dir } catch { Write-Error 'Error in ' + $_dir; throw } } -ArgumentList $_.FullName }; $jobs | Wait-Job; $jobs | ForEach-Object { Receive-Job -Job $_ -Keep } | ForEach-Object { Write-Host $_ }; $failed = $jobs | Where-Object { $_.State -ne 'Completed' -or $_.ChildJobs[0].JobStateInfo.State -ne 'Completed' }; if ($failed) { Exit 1 }\"", diff --git a/cortex-js/src/app.ts b/cortex-js/src/app.ts new file mode 100644 index 000000000..128f574de --- /dev/null +++ b/cortex-js/src/app.ts @@ -0,0 +1,72 @@ +import { NestFactory } from '@nestjs/core'; +import { DocumentBuilder, SwaggerModule } from '@nestjs/swagger'; +import { AppModule } from './app.module'; +import { FileManagerService } from './infrastructure/services/file-manager/file-manager.service'; +import { ValidationPipe } from '@nestjs/common'; +export const getApp = async () => { + const app = await NestFactory.create(AppModule, { + snapshot: true, + cors: true, + logger: console + }); + + const fileService = app.get(FileManagerService); + await fileService.getConfig(); + + app.useGlobalPipes( + new ValidationPipe({ + transform: true, + enableDebugMessages: true, + }), + ); + + const config = new DocumentBuilder() + .setTitle('Cortex API') + .setDescription( + 'Cortex API provides a command-line interface (CLI) for seamless interaction with large language models (LLMs). Fully compatible with the [OpenAI API](https://platform.openai.com/docs/api-reference), it enables straightforward command execution and management of LLM interactions.', + ) + .setVersion('1.0') + .addTag( + 'Inference', + 'This endpoint initiates interaction with a Language Learning Model (LLM).', + ) + .addTag( + 'Assistants', + 'These endpoints manage the lifecycle of an Assistant within a conversation thread.', + ) + .addTag( + 'Models', + 'These endpoints provide a list and descriptions of all available models within the Cortex framework.', + ) + .addTag( + 'Messages', + 'These endpoints manage the retrieval and storage of conversation content, including responses from LLMs and other metadata related to chat interactions.', + ) + .addTag( + 'Threads', + 'These endpoints handle the creation, retrieval, updating, and deletion of conversation threads.', + ) + .addTag( + 'Embeddings', + 'Endpoint for creating and retrieving embedding vectors from text inputs using specified models.', + ) + .addTag( + 'Status', + "Endpoint for actively querying the health status of the Cortex's API server.", + ) + .addTag( + 'Processes', + 'Endpoint for terminating the Cortex API server processes.', + ) + .addTag( + 'Events', + 'Endpoints for observing Cortex statuses through event notifications.', + ) + .addServer('http://localhost:1337') + .addServer('http://localhost:1337/v1') + .build(); + const document = SwaggerModule.createDocument(app, config); + + SwaggerModule.setup('api', app, document); + return app; +}; diff --git a/cortex-js/src/infrastructure/commanders/serve.command.ts b/cortex-js/src/infrastructure/commanders/serve.command.ts index db142cccb..a8ade2238 100644 --- a/cortex-js/src/infrastructure/commanders/serve.command.ts +++ b/cortex-js/src/infrastructure/commanders/serve.command.ts @@ -1,13 +1,13 @@ -import { spawn } from 'child_process'; import { defaultCortexJsHost, defaultCortexJsPort, } from '@/infrastructure/constants/cortex'; import { CommandRunner, SubCommand, Option } from 'nest-commander'; -import { join } from 'path'; import { SetCommandContext } from './decorators/CommandContext'; import { ServeStopCommand } from './sub-commands/serve-stop.command'; import { ContextService } from '../services/context/context.service'; +import { getApp } from '@/app'; +import { Logger } from '@nestjs/common'; type ServeOptions = { address?: string; @@ -30,32 +30,14 @@ export class ServeCommand extends CommandRunner { const host = options?.address || defaultCortexJsHost; const port = options?.port || defaultCortexJsPort; - return this.startServer(host, port, options); + return this.startServer(host, port); } - private async startServer( - host: string, - port: number, - options: ServeOptions = { detach: false }, - ) { - const serveProcess = spawn( - 'node', - [join(__dirname, '../../../dist/src/main.js')], - { - env: { - ...process.env, - CORTEX_JS_HOST: host, - CORTEX_JS_PORT: port.toString(), - NODE_ENV: 'production', - }, - stdio: options?.detach ? 'ignore' : 'inherit', - detached: options?.detach, - }, - ); - if (options?.detach) { - serveProcess.unref(); - console.log('Started server at http://%s:%d', host, port); - } + private async startServer(host: string, port: number) { + const app = await getApp(); + + await app.listen(port, host); + console.log(`Started server at http://${host}:${port}`); } @Option({ @@ -73,14 +55,4 @@ export class ServeCommand extends CommandRunner { parsePort(value: string) { return parseInt(value, 10); } - - @Option({ - flags: '-d, --detach', - description: 'Run the server in detached mode', - defaultValue: false, - name: 'detach', - }) - parseDetach() { - return true; - } } diff --git a/cortex-js/src/main.ts b/cortex-js/src/main.ts index c745de9b8..cb6a6f079 100644 --- a/cortex-js/src/main.ts +++ b/cortex-js/src/main.ts @@ -1,78 +1,11 @@ -import { NestFactory } from '@nestjs/core'; -import { AppModule } from './app.module'; -import { DocumentBuilder, SwaggerModule } from '@nestjs/swagger'; -import { ValidationPipe } from '@nestjs/common'; import { defaultCortexJsHost, defaultCortexJsPort, } from '@/infrastructure/constants/cortex'; -import { FileManagerService } from './infrastructure/services/file-manager/file-manager.service'; +import { getApp } from './app'; async function bootstrap() { - const app = await NestFactory.create(AppModule, { - snapshot: true, - cors: true, - }); - - const fileService = app.get(FileManagerService); - await fileService.getConfig(); - - app.useGlobalPipes( - new ValidationPipe({ - transform: true, - enableDebugMessages: true, - }), - ); - - const config = new DocumentBuilder() - .setTitle('Cortex API') - .setDescription( - 'Cortex API provides a command-line interface (CLI) for seamless interaction with large language models (LLMs). Fully compatible with the [OpenAI API](https://platform.openai.com/docs/api-reference), it enables straightforward command execution and management of LLM interactions.', - ) - .setVersion('1.0') - .addTag( - 'Inference', - 'This endpoint initiates interaction with a Language Learning Model (LLM).', - ) - .addTag( - 'Assistants', - 'These endpoints manage the lifecycle of an Assistant within a conversation thread.', - ) - .addTag( - 'Models', - 'These endpoints provide a list and descriptions of all available models within the Cortex framework.', - ) - .addTag( - 'Messages', - 'These endpoints manage the retrieval and storage of conversation content, including responses from LLMs and other metadata related to chat interactions.', - ) - .addTag( - 'Threads', - 'These endpoints handle the creation, retrieval, updating, and deletion of conversation threads.', - ) - .addTag( - 'Embeddings', - 'Endpoint for creating and retrieving embedding vectors from text inputs using specified models.', - ) - .addTag( - 'Status', - "Endpoint for actively querying the health status of the Cortex's API server.", - ) - .addTag( - 'Processes', - 'Endpoint for terminating the Cortex API server processes.', - ) - .addTag( - 'Events', - 'Endpoints for observing Cortex statuses through event notifications.', - ) - .addServer('http://localhost:1337') - .addServer('http://localhost:1337/v1') - .build(); - const document = SwaggerModule.createDocument(app, config); - - SwaggerModule.setup('api', app, document); - + const app = await getApp(); // getting port from env const host = process.env.CORTEX_JS_HOST || defaultCortexJsHost; const port = process.env.CORTEX_JS_PORT || defaultCortexJsPort;