Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/cortex-build.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: CI Cortex CPP
name: CI Cortex Release

on:
push:
Expand Down
6 changes: 3 additions & 3 deletions cortex-js/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
"dev": "nest dev",
"build": "nest build && cpx \"cpuinfo/bin/**\" dist/bin",
"build:binary": "run-script-os && cpx \"cpuinfo/bin/**\" dist/bin",
"build:binary:windows": "bun build --compile --target=bun-windows-x64 ./src/command.js --outfile cortex.exe --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage",
"build:binary:linux": "bun build --compile --target=bun-linux-x64 ./src/command.js --outfile cortex --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage",
"build:binary:macos": "bun build --compile --target=bun-darwin-arm64 ./src/command.js --outfile cortex --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage",
"build:binary:windows": "bun build --compile --target=bun-windows-x64 ./src/command.ts --outfile cortex.exe --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage",
"build:binary:linux": "bun build --compile --target=bun-linux-x64 ./src/command.ts --outfile cortex --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage",
"build:binary:macos": "bun build --compile --target=bun-darwin-arm64 ./src/command.ts --outfile cortex --external @nestjs/microservices --external @nestjs/websockets/socket-module --external class-transformer/storage",
"format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
"build:extensions": "run-script-os",
"build:extensions:windows": "powershell -command \"$jobs = Get-ChildItem -Path './src/extensions' -Directory | ForEach-Object { Start-Job -Name ($_.Name) -ScriptBlock { param($_dir); try { Set-Location $_dir; yarn; yarn build; Write-Output 'Build successful in ' + $_dir } catch { Write-Error 'Error in ' + $_dir; throw } } -ArgumentList $_.FullName }; $jobs | Wait-Job; $jobs | ForEach-Object { Receive-Job -Job $_ -Keep } | ForEach-Object { Write-Host $_ }; $failed = $jobs | Where-Object { $_.State -ne 'Completed' -or $_.ChildJobs[0].JobStateInfo.State -ne 'Completed' }; if ($failed) { Exit 1 }\"",
Expand Down
72 changes: 72 additions & 0 deletions cortex-js/src/app.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import { NestFactory } from '@nestjs/core';
import { DocumentBuilder, SwaggerModule } from '@nestjs/swagger';
import { AppModule } from './app.module';
import { FileManagerService } from './infrastructure/services/file-manager/file-manager.service';
import { ValidationPipe } from '@nestjs/common';
export const getApp = async () => {
const app = await NestFactory.create(AppModule, {
snapshot: true,
cors: true,
logger: console
});

const fileService = app.get(FileManagerService);
await fileService.getConfig();

app.useGlobalPipes(
new ValidationPipe({
transform: true,
enableDebugMessages: true,
}),
);

const config = new DocumentBuilder()
.setTitle('Cortex API')
.setDescription(
'Cortex API provides a command-line interface (CLI) for seamless interaction with large language models (LLMs). Fully compatible with the [OpenAI API](https://platform.openai.com/docs/api-reference), it enables straightforward command execution and management of LLM interactions.',
)
.setVersion('1.0')
.addTag(
'Inference',
'This endpoint initiates interaction with a Language Learning Model (LLM).',
)
.addTag(
'Assistants',
'These endpoints manage the lifecycle of an Assistant within a conversation thread.',
)
.addTag(
'Models',
'These endpoints provide a list and descriptions of all available models within the Cortex framework.',
)
.addTag(
'Messages',
'These endpoints manage the retrieval and storage of conversation content, including responses from LLMs and other metadata related to chat interactions.',
)
.addTag(
'Threads',
'These endpoints handle the creation, retrieval, updating, and deletion of conversation threads.',
)
.addTag(
'Embeddings',
'Endpoint for creating and retrieving embedding vectors from text inputs using specified models.',
)
.addTag(
'Status',
"Endpoint for actively querying the health status of the Cortex's API server.",
)
.addTag(
'Processes',
'Endpoint for terminating the Cortex API server processes.',
)
.addTag(
'Events',
'Endpoints for observing Cortex statuses through event notifications.',
)
.addServer('http://localhost:1337')
.addServer('http://localhost:1337/v1')
.build();
const document = SwaggerModule.createDocument(app, config);

SwaggerModule.setup('api', app, document);
return app;
};
44 changes: 8 additions & 36 deletions cortex-js/src/infrastructure/commanders/serve.command.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import { spawn } from 'child_process';
import {
defaultCortexJsHost,
defaultCortexJsPort,
} from '@/infrastructure/constants/cortex';
import { CommandRunner, SubCommand, Option } from 'nest-commander';
import { join } from 'path';
import { SetCommandContext } from './decorators/CommandContext';
import { ServeStopCommand } from './sub-commands/serve-stop.command';
import { ContextService } from '../services/context/context.service';
import { getApp } from '@/app';
import { Logger } from '@nestjs/common';

type ServeOptions = {
address?: string;
Expand All @@ -30,32 +30,14 @@ export class ServeCommand extends CommandRunner {
const host = options?.address || defaultCortexJsHost;
const port = options?.port || defaultCortexJsPort;

return this.startServer(host, port, options);
return this.startServer(host, port);
}

private async startServer(
host: string,
port: number,
options: ServeOptions = { detach: false },
) {
const serveProcess = spawn(
'node',
[join(__dirname, '../../../dist/src/main.js')],
{
env: {
...process.env,
CORTEX_JS_HOST: host,
CORTEX_JS_PORT: port.toString(),
NODE_ENV: 'production',
},
stdio: options?.detach ? 'ignore' : 'inherit',
detached: options?.detach,
},
);
if (options?.detach) {
serveProcess.unref();
console.log('Started server at http://%s:%d', host, port);
}
private async startServer(host: string, port: number) {
const app = await getApp();

await app.listen(port, host);
console.log(`Started server at http://${host}:${port}`);
}

@Option({
Expand All @@ -73,14 +55,4 @@ export class ServeCommand extends CommandRunner {
parsePort(value: string) {
return parseInt(value, 10);
}

@Option({
flags: '-d, --detach',
description: 'Run the server in detached mode',
defaultValue: false,
name: 'detach',
})
parseDetach() {
return true;
}
}
71 changes: 2 additions & 69 deletions cortex-js/src/main.ts
Original file line number Diff line number Diff line change
@@ -1,78 +1,11 @@
import { NestFactory } from '@nestjs/core';
import { AppModule } from './app.module';
import { DocumentBuilder, SwaggerModule } from '@nestjs/swagger';
import { ValidationPipe } from '@nestjs/common';
import {
defaultCortexJsHost,
defaultCortexJsPort,
} from '@/infrastructure/constants/cortex';
import { FileManagerService } from './infrastructure/services/file-manager/file-manager.service';
import { getApp } from './app';

async function bootstrap() {
const app = await NestFactory.create(AppModule, {
snapshot: true,
cors: true,
});

const fileService = app.get(FileManagerService);
await fileService.getConfig();

app.useGlobalPipes(
new ValidationPipe({
transform: true,
enableDebugMessages: true,
}),
);

const config = new DocumentBuilder()
.setTitle('Cortex API')
.setDescription(
'Cortex API provides a command-line interface (CLI) for seamless interaction with large language models (LLMs). Fully compatible with the [OpenAI API](https://platform.openai.com/docs/api-reference), it enables straightforward command execution and management of LLM interactions.',
)
.setVersion('1.0')
.addTag(
'Inference',
'This endpoint initiates interaction with a Language Learning Model (LLM).',
)
.addTag(
'Assistants',
'These endpoints manage the lifecycle of an Assistant within a conversation thread.',
)
.addTag(
'Models',
'These endpoints provide a list and descriptions of all available models within the Cortex framework.',
)
.addTag(
'Messages',
'These endpoints manage the retrieval and storage of conversation content, including responses from LLMs and other metadata related to chat interactions.',
)
.addTag(
'Threads',
'These endpoints handle the creation, retrieval, updating, and deletion of conversation threads.',
)
.addTag(
'Embeddings',
'Endpoint for creating and retrieving embedding vectors from text inputs using specified models.',
)
.addTag(
'Status',
"Endpoint for actively querying the health status of the Cortex's API server.",
)
.addTag(
'Processes',
'Endpoint for terminating the Cortex API server processes.',
)
.addTag(
'Events',
'Endpoints for observing Cortex statuses through event notifications.',
)
.addServer('http://localhost:1337')
.addServer('http://localhost:1337/v1')
.build();
const document = SwaggerModule.createDocument(app, config);

SwaggerModule.setup('api', app, document);

const app = await getApp();
// getting port from env
const host = process.env.CORTEX_JS_HOST || defaultCortexJsHost;
const port = process.env.CORTEX_JS_PORT || defaultCortexJsPort;
Expand Down