diff --git a/cortex-js/src/infrastructure/commanders/chat.command.ts b/cortex-js/src/infrastructure/commanders/chat.command.ts index 74d23c7f4..a9084d2a7 100644 --- a/cortex-js/src/infrastructure/commanders/chat.command.ts +++ b/cortex-js/src/infrastructure/commanders/chat.command.ts @@ -6,7 +6,7 @@ import { } from 'nest-commander'; import { ChatCliUsecases } from './usecases/chat.cli.usecases'; import { exit } from 'node:process'; -import { PSCliUsecases } from './usecases/ps.cli.usecases'; +import { ModelStat, PSCliUsecases } from './usecases/ps.cli.usecases'; import { ModelsUsecases } from '@/usecases/models/models.usecases'; type ChatOptions = { @@ -41,16 +41,7 @@ export class ChatCommand extends CommandRunner { if (models.length === 1) { modelId = models[0].modelId; } else if (models.length > 0) { - const { model } = await this.inquirerService.inquirer.prompt({ - type: 'list', - name: 'model', - message: 'Select running model to chat with:', - choices: models.map((e) => ({ - name: e.modelId, - value: e.modelId, - })), - }); - modelId = model; + modelId = await this.modelInquiry(models); } else { console.error('Model ID is required'); exit(1); @@ -66,6 +57,19 @@ export class ChatCommand extends CommandRunner { ); } + modelInquiry = async (models: ModelStat[]) => { + const { model } = await this.inquirerService.inquirer.prompt({ + type: 'list', + name: 'model', + message: 'Select running model to chat with:', + choices: models.map((e) => ({ + name: e.modelId, + value: e.modelId, + })), + }); + return model; + }; + @Option({ flags: '-t, --thread ', description: 'Thread Id. If not provided, will create new thread', diff --git a/cortex-js/src/infrastructure/commanders/models/model-start.command.ts b/cortex-js/src/infrastructure/commanders/models/model-start.command.ts index 5be62c19b..d479b4ddf 100644 --- a/cortex-js/src/infrastructure/commanders/models/model-start.command.ts +++ b/cortex-js/src/infrastructure/commanders/models/model-start.command.ts @@ -1,4 +1,9 @@ -import { CommandRunner, SubCommand, Option } from 'nest-commander'; +import { + CommandRunner, + SubCommand, + Option, + InquirerService, +} from 'nest-commander'; import { exit } from 'node:process'; import { ModelsCliUsecases } from '../usecases/models.cli.usecases'; import { CortexUsecases } from '@/usecases/cortex/cortex.usecases'; @@ -9,6 +14,7 @@ type ModelStartOptions = { @SubCommand({ name: 'start', description: 'Start a model by ID.' }) export class ModelStartCommand extends CommandRunner { constructor( + private readonly inquirerService: InquirerService, private readonly cortexUsecases: CortexUsecases, private readonly modelsCliUsecases: ModelsCliUsecases, ) { @@ -16,9 +22,14 @@ export class ModelStartCommand extends CommandRunner { } async run(input: string[], options: ModelStartOptions): Promise { - if (input.length === 0) { - console.error('Model ID is required'); - exit(1); + let modelId = input[0]; + if (!modelId) { + try { + modelId = await this.modelInquiry(); + } catch { + console.error('Model ID is required'); + exit(1); + } } await this.cortexUsecases @@ -28,6 +39,21 @@ export class ModelStartCommand extends CommandRunner { .then(() => !options.attach && process.exit(0)); } + modelInquiry = async () => { + const models = await this.modelsCliUsecases.listAllModels(); + if (!models.length) throw 'No models found'; + const { model } = await this.inquirerService.inquirer.prompt({ + type: 'list', + name: 'model', + message: 'Select a model to start:', + choices: models.map((e) => ({ + name: e.name, + value: e.id, + })), + }); + return model; + }; + @Option({ flags: '-a, --attach', description: 'Attach to interactive chat session', diff --git a/cortex-js/src/infrastructure/commanders/usecases/ps.cli.usecases.ts b/cortex-js/src/infrastructure/commanders/usecases/ps.cli.usecases.ts index 1fdf5cd0a..81f2fe8b4 100644 --- a/cortex-js/src/infrastructure/commanders/usecases/ps.cli.usecases.ts +++ b/cortex-js/src/infrastructure/commanders/usecases/ps.cli.usecases.ts @@ -1,7 +1,7 @@ import { Injectable } from '@nestjs/common'; import { defaultCortexCppHost, defaultCortexCppPort } from 'constant'; -interface ModelStat { +export interface ModelStat { modelId: string; engine?: string; duration?: string;