@@ -4,7 +4,7 @@ import { exit } from 'node:process';
44import { CortexUsecases } from '@/usecases/cortex/cortex.usecases' ;
55import { SetCommandContext } from '../decorators/CommandContext' ;
66import { ContextService } from '@/infrastructure/services/context/context.service' ;
7- import { createReadStream , existsSync , statSync , watchFile } from 'node:fs' ;
7+ import { existsSync } from 'node:fs' ;
88import { FileManagerService } from '@/infrastructure/services/file-manager/file-manager.service' ;
99import { join } from 'node:path' ;
1010import { Engines } from '../types/engine.interface' ;
@@ -14,11 +14,12 @@ import { isRemoteEngine } from '@/utils/normalize-model-id';
1414import { downloadProgress } from '@/utils/download-progress' ;
1515import { CortexClient } from '../services/cortex.client' ;
1616import { DownloadType } from '@/domain/models/download.interface' ;
17+ import { printLastErrorLines } from '@/utils/logs' ;
1718
1819type ModelStartOptions = {
19- attach : boolean ;
2020 preset ?: string ;
2121} ;
22+
2223@SubCommand ( {
2324 name : 'start' ,
2425 description : 'Start a model by ID.' ,
@@ -77,16 +78,17 @@ export class ModelStartCommand extends BaseCommand {
7778 await downloadProgress ( this . cortex , undefined , DownloadType . Engine ) ;
7879 }
7980
80- // Attached - stdout logs
81- if ( options . attach ) {
82- this . attachLogWatch ( ) ;
83- }
84-
8581 const parsedPreset = await this . fileService . getPreset ( options . preset ) ;
8682
83+ const startingSpinner = ora ( 'Loading model...' ) . start ( ) ;
84+
8785 await this . cortex . models
8886 . start ( modelId , parsedPreset )
89- . then ( ( ) => options . attach && ora ( 'Model is running...' ) . start ( ) ) ;
87+ . then ( ( ) => startingSpinner . succeed ( 'Model loaded' ) )
88+ . catch ( async ( error ) => {
89+ startingSpinner . fail ( error . message ?? error ) ;
90+ printLastErrorLines ( await this . fileService . getLogPath ( ) ) ;
91+ } ) ;
9092 }
9193
9294 modelInquiry = async ( ) => {
@@ -104,55 +106,11 @@ export class ModelStartCommand extends BaseCommand {
104106 return model ;
105107 } ;
106108
107- @Option ( {
108- flags : '-a, --attach' ,
109- description : 'Attach to interactive chat session' ,
110- defaultValue : false ,
111- name : 'attach' ,
112- } )
113- parseAttach ( ) {
114- return true ;
115- }
116-
117109 @Option ( {
118110 flags : '-p, --preset <preset>' ,
119111 description : 'Apply a chat preset to the chat session' ,
120112 } )
121113 parseTemplate ( value : string ) {
122114 return value ;
123115 }
124-
125- /**
126- * Attach to the log file and watch for changes
127- */
128- private async attachLogWatch ( ) {
129- const logPath = await this . fileService . getLogPath ( ) ;
130- const initialSize = statSync ( logPath ) . size ;
131- const logStream = createReadStream ( logPath , {
132- start : initialSize ,
133- encoding : 'utf-8' ,
134- autoClose : false ,
135- } ) ;
136- logStream . on ( 'data' , ( chunk ) => {
137- console . log ( chunk ) ;
138- } ) ;
139- watchFile ( logPath , ( curr , prev ) => {
140- // Check if the file size has increased
141- if ( curr . size > prev . size ) {
142- // Calculate the position to start reading from
143- const position = prev . size ;
144-
145- // Create a new read stream from the updated position
146- const updateStream = createReadStream ( logPath , {
147- encoding : 'utf8' ,
148- start : position ,
149- } ) ;
150-
151- // Read the newly written content
152- updateStream . on ( 'data' , ( chunk ) => {
153- console . log ( chunk ) ;
154- } ) ;
155- }
156- } ) ;
157- }
158116}
0 commit comments