Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 192a891

Browse files
committed
fix: pull corresponding engine base on model
1 parent 03c948b commit 192a891

File tree

4 files changed

+45
-18
lines changed

4 files changed

+45
-18
lines changed

cortex-js/src/infrastructure/commanders/models/model-pull.command.ts

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,16 @@ export class ModelPullCommand extends CommandRunner {
3535
}
3636
const modelId = passedParams[0];
3737

38+
await this.modelsCliUsecases.pullModel(modelId).catch((e: Error) => {
39+
if (e instanceof ModelNotFoundException)
40+
console.error('Model does not exist.');
41+
else console.error(e);
42+
exit(1);
43+
});
44+
3845
const existingModel = await this.modelsCliUsecases.getModel(modelId);
3946
const engine = existingModel?.engine || 'cortex.llamacpp';
47+
4048
// Pull engine if not exist
4149
if (
4250
!existsSync(
@@ -45,16 +53,11 @@ export class ModelPullCommand extends CommandRunner {
4553
) {
4654
await this.initUsecases.installEngine(
4755
await this.initUsecases.defaultInstallationOptions(),
56+
'latest',
57+
engine,
4858
);
4959
}
5060

51-
await this.modelsCliUsecases.pullModel(modelId).catch((e: Error) => {
52-
if (e instanceof ModelNotFoundException)
53-
console.error('Model does not exist.');
54-
else console.error(e);
55-
exit(1);
56-
});
57-
5861
console.log('\nDownload complete!');
5962
exit(0);
6063
}

cortex-js/src/infrastructure/commanders/models/model-start.command.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,8 @@ export class ModelStartCommand extends CommandRunner {
6969
) {
7070
await this.initUsecases.installEngine(
7171
await this.initUsecases.defaultInstallationOptions(),
72+
'latest',
73+
engine,
7274
);
7375
}
7476
await this.cortexUsecases

cortex-js/src/infrastructure/commanders/usecases/init.cli.usecases.ts

Lines changed: 29 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,12 +52,41 @@ export class InitCliUsecases {
5252
installEngine = async (
5353
options: InitOptions,
5454
version: string = 'latest',
55+
engine: string = 'default',
5556
force: boolean = true,
5657
): Promise<any> => {
5758
const configs = await this.fileManagerService.getConfig();
5859

5960
if (configs.initialized && !force) return;
6061

62+
// Ship Llama.cpp engine by default
63+
if (
64+
!existsSync(
65+
join(
66+
await this.fileManagerService.getDataFolderPath(),
67+
'engines',
68+
engine,
69+
),
70+
)
71+
)
72+
await this.installLlamaCppEngine(options, version);
73+
74+
if (engine === 'cortex.onnx')
75+
if (process.platform === 'win32') await this.installONNXEngine();
76+
77+
configs.initialized = true;
78+
await this.fileManagerService.writeConfigFile(configs);
79+
};
80+
81+
/**
82+
* Install Llama.cpp engine
83+
* @param options
84+
* @param version
85+
*/
86+
private installLlamaCppEngine = async (
87+
options: InitOptions,
88+
version: string = 'latest',
89+
) => {
6190
const engineFileName = this.parseEngineFileName(options);
6291

6392
const res = await firstValueFrom(
@@ -148,12 +177,6 @@ export class InitCliUsecases {
148177
if (options.runMode === 'GPU' && !(await cudaVersion())) {
149178
await this.installCudaToolkitDependency(options.cudaVersion);
150179
}
151-
152-
// Ship ONNX Runtime on Windows by default
153-
if (process.platform === 'win32') await this.installONNXEngine();
154-
155-
configs.initialized = true;
156-
await this.fileManagerService.writeConfigFile(configs);
157180
};
158181

159182
/**

cortex-js/src/infrastructure/commanders/usecases/models.cli.usecases.ts

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,7 @@ export class ModelsCliUsecases {
152152

153153
/**
154154
* It's to pull ONNX model from HuggingFace repository
155-
* @param modelId
155+
* @param modelId
156156
*/
157157
private async pullOnnxModel(modelId: string) {
158158
const modelsContainerDir = await this.fileService.getModelsPath();
@@ -179,7 +179,6 @@ export class ModelsCliUsecases {
179179
console.log(`Downloading ${file}`);
180180
const bar = new SingleBar({}, Presets.shades_classic);
181181
bar.start(100, 0);
182-
183182
const response = await firstValueFrom(
184183
this.httpService.get(
185184
`https://huggingface.co/cortexhub/${repo}/resolve/${branch}/${file}?download=true`,
@@ -219,7 +218,7 @@ export class ModelsCliUsecases {
219218
readFileSync(join(modelFolder, 'model.yml'), 'utf-8'),
220219
) as CreateModelDto;
221220
model.files = [join(modelFolder)];
222-
model.model = modelId
221+
model.model = modelId;
223222

224223
if (!(await this.modelsUsecases.findOne(modelId)))
225224
await this.modelsUsecases.create(model);
@@ -290,8 +289,8 @@ export class ModelsCliUsecases {
290289

291290
/**
292291
* Parse preset file
293-
* @param preset
294-
* @returns
292+
* @param preset
293+
* @returns
295294
*/
296295
private async parsePreset(preset?: string): Promise<object> {
297296
const presetsFolder = await this.fileService.getPresetsPath();

0 commit comments

Comments
 (0)