Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { SetCommandContext } from '../decorators/CommandContext';
import { ContextService } from '@/infrastructure/services/context/context.service';
import { InitCliUsecases } from '../usecases/init.cli.usecases';
import { Engines } from '../types/engine.interface';
import { CortexUsecases } from '@/usecases/cortex/cortex.usecases';
import { FileManagerService } from '@/infrastructure/services/file-manager/file-manager.service';

@SubCommand({
name: 'init',
Expand All @@ -16,6 +18,8 @@ import { Engines } from '../types/engine.interface';
export class EnginesInitCommand extends CommandRunner {
constructor(
private readonly initUsecases: InitCliUsecases,
private readonly cortexUsecases: CortexUsecases,
private readonly fileManagerService: FileManagerService,
readonly contextService: ContextService,
) {
super();
Expand All @@ -26,14 +30,24 @@ export class EnginesInitCommand extends CommandRunner {
const options = passedParams.includes(Engines.llamaCPP)
? await this.initUsecases.defaultInstallationOptions()
: {};

const configs = await this.fileManagerService.getConfig();
const host = configs.cortexCppHost;
const port = configs.cortexCppPort;
// Should stop cortex before installing engine
if (await this.cortexUsecases.healthCheck(host, port)) {
await this.cortexUsecases.stopCortex();
}
return this.initUsecases
.installEngine(
options,
engine.includes('@') ? engine.split('@')[1] : 'latest',
engine,
true
true,
)
.then(() => console.log('Engine installed successfully!'))
.catch(() => console.error('Engine not found or installation failed!'));
.catch((e) =>
console.error('Install engine failed with reason: %s', e.message ?? e),
);
}
}
2 changes: 2 additions & 0 deletions cortex-js/src/infrastructure/commanders/models.command.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { ModelStopCommand } from './models/model-stop.command';
import { ModelPullCommand } from './models/model-pull.command';
import { ModelRemoveCommand } from './models/model-remove.command';
import { ModelUpdateCommand } from './models/model-update.command';
import { RunCommand } from './shortcuts/run.command';

@SubCommand({
name: 'models',
Expand All @@ -17,6 +18,7 @@ import { ModelUpdateCommand } from './models/model-update.command';
ModelGetCommand,
ModelRemoveCommand,
ModelUpdateCommand,
RunCommand,
],
description: 'Subcommands for managing models',
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ export class ModelStartCommand extends CommandRunner {
/^(http|https):\/\/[^/]+\/.*/.test(existingModel.files[0])
) {
console.error(
`${modelId} not found on filesystem. Please try 'cortex pull ${modelId}' first.`,
`${modelId} not found on filesystem.\nPlease try 'cortex pull ${modelId}' first.`,
);
process.exit(1);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,9 @@ export class RunCommand extends CommandRunner {
exit(1);
}
}

// If not exist
// Try Pull
if (!(await this.modelsCliUsecases.getModel(modelId))) {
console.log(
`${modelId} not found on filesystem. Downloading from remote: https://huggingface.co/cortexso if possible.`,
);
await this.modelsCliUsecases.pullModel(modelId).catch((e: Error) => {
if (e instanceof ModelNotFoundException)
console.error('Model does not exist.');
Expand All @@ -78,6 +74,7 @@ export class RunCommand extends CommandRunner {
process.exit(1);
}

// Check model compatibility on this machine
checkModelCompatibility(modelId);

const engine = existingModel.engine || Engines.llamaCPP;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ export class ChatCliUsecases {
attach: boolean = true,
stopModel: boolean = true,
): Promise<void> {
if (attach) console.log(`Inorder to exit, type '${this.exitClause}'.`);
if (attach) console.log(`In order to exit, type '${this.exitClause}'.`);
const thread = await this.getOrCreateNewThread(modelId, threadId);
const messages: ChatCompletionMessage[] = (
await this.messagesUsecases.getLastMessagesByThread(thread.id, 10)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@ import { ModelsUsecases } from '@/usecases/models/models.usecases';
import { Model } from '@/domain/models/model.interface';
import { InquirerService } from 'nest-commander';
import { Inject, Injectable } from '@nestjs/common';
import { Presets, SingleBar } from 'cli-progress';

import { HttpService } from '@nestjs/axios';
import { StartModelSuccessDto } from '@/infrastructure/dtos/models/start-model-success.dto';
import { UpdateModelDto } from '@/infrastructure/dtos/models/update-model.dto';
import { FileManagerService } from '@/infrastructure/services/file-manager/file-manager.service';
Expand All @@ -14,14 +12,14 @@ import { load } from 'js-yaml';
import { existsSync, readdirSync, readFileSync } from 'fs';
import { isLocalModel } from '@/utils/normalize-model-id';
import { HuggingFaceRepoSibling } from '@/domain/models/huggingface.interface';
import { printLastErrorLines } from '@/utils/logs';

@Injectable()
export class ModelsCliUsecases {
constructor(
private readonly modelsUsecases: ModelsUsecases,
@Inject(InquirerService)
private readonly inquirerService: InquirerService,
private readonly httpService: HttpService,
private readonly fileService: FileManagerService,
) {}

Expand All @@ -40,11 +38,16 @@ export class ModelsCliUsecases {
...parsedPreset,
}))
.then((settings) => this.modelsUsecases.startModel(modelId, settings))
.catch(() => {
return {
modelId: modelId,
message: 'Model not found',
};
.catch(async (e) => {
console.error('Model start failed with reason:', e.message);

printLastErrorLines(await this.fileService.getDataFolderPath(), 5);

console.log(
'For more information, please check the logs at: %s',
join(await this.fileService.getDataFolderPath(), 'cortex.log'),
);
process.exit(1);
});
}

Expand Down Expand Up @@ -114,6 +117,11 @@ export class ModelsCliUsecases {
console.error('Model already exists');
process.exit(1);
}
// Checking dependencies

console.log(
`${modelId} not found on filesystem.\nDownloading from remote: https://huggingface.co/${modelId.includes('/') ? modelId : 'cortexso'} ...`,
);
await this.modelsUsecases.pullModel(modelId, true, (files) => {
return new Promise<HuggingFaceRepoSibling>(async (resolve) => {
const listChoices = files
Expand Down
20 changes: 16 additions & 4 deletions cortex-js/src/usecases/cortex/cortex.usecases.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import {
CORTEX_CPP_PROCESS_DESTROY_URL,
CORTEX_JS_STOP_API_SERVER_URL,
} from '@/infrastructure/constants/cortex';
import { createWriteStream, openSync } from 'fs';

@Injectable()
export class CortexUsecases {
Expand All @@ -25,8 +26,8 @@ export class CortexUsecases {

/**
* Start the Cortex CPP process
* @param attach
* @returns
* @param attach
* @returns
*/
async startCortex(
attach: boolean = false,
Expand Down Expand Up @@ -55,11 +56,16 @@ export class CortexUsecases {
'cortex-cpp',
);

const writer = openSync(
join(await this.fileManagerService.getDataFolderPath(), 'cortex.log'),
'a+',
);

// go up one level to get the binary folder, have to also work on windows
this.cortexProcess = spawn(cortexCppPath, args, {
detached: !attach,
cwd: cortexCppFolderPath,
stdio: attach ? 'inherit' : undefined,
stdio: [0, writer, writer],
env: {
...process.env,
CUDA_VISIBLE_DEVICES: '0',
Expand Down Expand Up @@ -135,7 +141,13 @@ export class CortexUsecases {
.catch(() => {});
}

private healthCheck(host: string, port: number): Promise<boolean> {
/**
* Check whether the Cortex CPP is healthy
* @param host
* @param port
* @returns
*/
healthCheck(host: string, port: number): Promise<boolean> {
return fetch(CORTEX_CPP_HEALTH_Z_URL(host, port))
.then((res) => {
if (res.ok) {
Expand Down
22 changes: 18 additions & 4 deletions cortex-js/src/usecases/models/models.usecases.ts
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ export class ModelsUsecases {
modelId,
};
}

console.log('Loading model...');
// update states and emitting event
this.activeModelStatuses[modelId] = {
model: modelId,
Expand Down Expand Up @@ -233,7 +233,7 @@ export class ModelsUsecases {
e,
TelemetrySource.CORTEX_CPP,
);
return {
throw {
message: e.message,
modelId,
};
Expand Down Expand Up @@ -365,14 +365,25 @@ export class ModelsUsecases {
const model: CreateModelDto = load(
readFileSync(join(modelFolder, 'model.yml'), 'utf-8'),
) as CreateModelDto;
if (model.engine === Engines.llamaCPP) {
if (model.engine === Engines.llamaCPP && model.files) {
const fileUrl = join(
await this.fileManagerService.getModelsPath(),
normalizeModelId(modelId),
llamaModelFile(model.files),
);
model.files = [fileUrl];
model.name = modelId.replace(':default', '');
} else if (model.engine === Engines.llamaCPP) {
model.files = [
join(
await this.fileManagerService.getModelsPath(),
normalizeModelId(modelId),
basename(
files.find((e) => e.rfilename.endsWith('.gguf'))?.rfilename ??
files[0].rfilename,
),
),
];
} else {
model.files = [modelFolder];
}
Expand All @@ -387,7 +398,10 @@ export class ModelsUsecases {
const fileUrl = join(
await this.fileManagerService.getModelsPath(),
normalizeModelId(modelId),
basename(files[0].rfilename),
basename(
files.find((e) => e.rfilename.endsWith('.gguf'))?.rfilename ??
files[0].rfilename,
),
);
await this.update(modelId, {
files: [fileUrl],
Expand Down
32 changes: 32 additions & 0 deletions cortex-js/src/utils/logs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import { createReadStream } from 'fs';
import { join } from 'path';
import { createInterface } from 'readline';

/**
* Print the last N lines of a file that contain the word 'ERROR'
* @param filename
* @param numLines
*/
export async function printLastErrorLines(
dataFolderPath: string,
numLines: number = 5,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

stupid qn from me - any reason why only 5 lines (will they see a truncated error message?)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

non blocking curious qn, otherwise LGTM!

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

stupid qn from me - any reason why only 5 lines (will they see a truncated error message?)

Yeah, that's a rough estimate for common cases. Since we output everything into the log file across sessions, we should output only several lines (not just errors), similar to other applications. Otherwise, there would be hundreds of lines of cortex-cpp printed out. The applications are not running in attached mode; in attached mode, users would see the full session logs.

): Promise<void> {
const errorLines: string[] = [];

const fileStream = createReadStream(join(dataFolderPath, 'cortex.log'));
const rl = createInterface({
input: fileStream,
crlfDelay: Infinity,
});

for await (const line of rl) {
errorLines.push(line);
if (errorLines.length > numLines) {
errorLines.shift();
}
}

console.log(`Last errors:`);
errorLines.forEach((line) => console.log(line));
console.log('...');
}