Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit eb6ade3

Browse files
committed
fix: pull corresponding engine base on model
1 parent 03c948b commit eb6ade3

File tree

3 files changed

+35
-13
lines changed

3 files changed

+35
-13
lines changed

cortex-js/src/infrastructure/commanders/models/model-pull.command.ts

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,14 @@ export class ModelPullCommand extends CommandRunner {
3737

3838
const existingModel = await this.modelsCliUsecases.getModel(modelId);
3939
const engine = existingModel?.engine || 'cortex.llamacpp';
40+
41+
await this.modelsCliUsecases.pullModel(modelId).catch((e: Error) => {
42+
if (e instanceof ModelNotFoundException)
43+
console.error('Model does not exist.');
44+
else console.error(e);
45+
exit(1);
46+
});
47+
4048
// Pull engine if not exist
4149
if (
4250
!existsSync(
@@ -45,16 +53,11 @@ export class ModelPullCommand extends CommandRunner {
4553
) {
4654
await this.initUsecases.installEngine(
4755
await this.initUsecases.defaultInstallationOptions(),
56+
'latest',
57+
engine,
4858
);
4959
}
5060

51-
await this.modelsCliUsecases.pullModel(modelId).catch((e: Error) => {
52-
if (e instanceof ModelNotFoundException)
53-
console.error('Model does not exist.');
54-
else console.error(e);
55-
exit(1);
56-
});
57-
5861
console.log('\nDownload complete!');
5962
exit(0);
6063
}

cortex-js/src/infrastructure/commanders/models/model-start.command.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,8 @@ export class ModelStartCommand extends CommandRunner {
6969
) {
7070
await this.initUsecases.installEngine(
7171
await this.initUsecases.defaultInstallationOptions(),
72+
'latest',
73+
engine,
7274
);
7375
}
7476
await this.cortexUsecases

cortex-js/src/infrastructure/commanders/usecases/init.cli.usecases.ts

Lines changed: 23 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,12 +52,35 @@ export class InitCliUsecases {
5252
installEngine = async (
5353
options: InitOptions,
5454
version: string = 'latest',
55+
engine: string = 'default',
5556
force: boolean = true,
5657
): Promise<any> => {
5758
const configs = await this.fileManagerService.getConfig();
5859

5960
if (configs.initialized && !force) return;
6061

62+
if (engine === 'default' || engine === 'cortex.llamacpp')
63+
// Ship Llama.cpp engine by default
64+
await this.installLlamaCppEngine(options, version);
65+
66+
if (engine === 'cortex.onnx')
67+
if (process.platform === 'win32')
68+
// Ship ONNX Runtime on Windows by default
69+
await this.installONNXEngine();
70+
71+
configs.initialized = true;
72+
await this.fileManagerService.writeConfigFile(configs);
73+
};
74+
75+
/**
76+
* Install Llama.cpp engine
77+
* @param options
78+
* @param version
79+
*/
80+
private installLlamaCppEngine = async (
81+
options: InitOptions,
82+
version: string = 'latest',
83+
) => {
6184
const engineFileName = this.parseEngineFileName(options);
6285

6386
const res = await firstValueFrom(
@@ -148,12 +171,6 @@ export class InitCliUsecases {
148171
if (options.runMode === 'GPU' && !(await cudaVersion())) {
149172
await this.installCudaToolkitDependency(options.cudaVersion);
150173
}
151-
152-
// Ship ONNX Runtime on Windows by default
153-
if (process.platform === 'win32') await this.installONNXEngine();
154-
155-
configs.initialized = true;
156-
await this.fileManagerService.writeConfigFile(configs);
157174
};
158175

159176
/**

0 commit comments

Comments
 (0)