Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 1ee23c6

Browse files
authored
feat: cortex post-install script - init silently on install (#642)
1 parent 8034171 commit 1ee23c6

File tree

6 files changed

+123
-27
lines changed

6 files changed

+123
-27
lines changed

README.md

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -68,12 +68,7 @@ To install Cortex CLI, follow the steps below:
6868
npm i -g @janhq/cortex
6969
```
7070

71-
2. Initialize a compatible engine:
72-
``` bash
73-
cortex init
74-
```
75-
76-
3. Download a GGUF model from Hugging Face:
71+
2. Download a GGUF model from Hugging Face:
7772
``` bash
7873
# Pull a model most compatible with your hardware
7974
cortex pull llama3
@@ -84,12 +79,12 @@ cortex pull llama3:7b
8479
# Pull a model with the HuggingFace `model_id`
8580
cortex pull microsoft/Phi-3-mini-4k-instruct-gguf
8681
```
87-
4. Load the model:
82+
3. Load the model:
8883
``` bash
8984
cortex models start llama3:7b
9085
```
9186

92-
5. Start chatting with the model:
87+
4. Start chatting with the model:
9388
``` bash
9489
cortex chat tell me a joke
9590
```

cortex-js/README.md

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -68,12 +68,7 @@ To install Cortex CLI, follow the steps below:
6868
npm i -g @janhq/cortex
6969
```
7070

71-
2. Initialize a compatible engine:
72-
``` bash
73-
cortex init
74-
```
75-
76-
3. Download a GGUF model from Hugging Face:
71+
2. Download a GGUF model from Hugging Face:
7772
``` bash
7873
# Pull a model most compatible with your hardware
7974
cortex pull llama3
@@ -84,12 +79,12 @@ cortex pull llama3:7b
8479
# Pull a model with the HuggingFace `model_id`
8580
cortex pull microsoft/Phi-3-mini-4k-instruct-gguf
8681
```
87-
4. Load the model:
82+
3. Load the model:
8883
``` bash
8984
cortex models start llama3:7b
9085
```
9186

92-
5. Start chatting with the model:
87+
4. Start chatting with the model:
9388
``` bash
9489
cortex chat tell me a joke
9590
```

cortex-js/package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,8 @@
2626
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
2727
"test:e2e": "jest --config ./test/jest-e2e.json",
2828
"typeorm": "typeorm-ts-node-esm",
29-
"build:dev": "npx nest build && chmod +x ./dist/src/command.js && npm link"
29+
"build:dev": "npx nest build && chmod +x ./dist/src/command.js && npm link",
30+
"postinstall": "cortex init -s"
3031
},
3132
"dependencies": {
3233
"@huggingface/gguf": "^0.1.5",

cortex-js/src/infrastructure/commanders/init.command.ts

Lines changed: 48 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
1-
import { CommandRunner, InquirerService, SubCommand } from 'nest-commander';
1+
import {
2+
CommandRunner,
3+
InquirerService,
4+
SubCommand,
5+
Option,
6+
} from 'nest-commander';
27
import { InitCliUsecases } from './usecases/init.cli.usecases';
38
import { InitOptions } from './types/init-options.interface';
49

@@ -16,6 +21,39 @@ export class InitCommand extends CommandRunner {
1621
}
1722

1823
async run(input: string[], options?: InitOptions): Promise<void> {
24+
if (options?.silent) {
25+
return this.initSilently(input);
26+
} else {
27+
return this.initPrompts(input, options);
28+
}
29+
}
30+
31+
private initSilently = async (input: string[], options: InitOptions = {}) => {
32+
const version = input[0] ?? 'latest';
33+
if (process.platform === 'darwin') {
34+
const engineFileName = this.initUsecases.parseEngineFileName(options);
35+
return this.initUsecases.installEngine(engineFileName, version);
36+
}
37+
// If Nvidia Driver is installed -> GPU
38+
options.runMode = (await this.initUsecases.checkNvidiaGPUExist())
39+
? 'GPU'
40+
: 'CPU';
41+
// CPU Instructions detection
42+
options.gpuType = 'Nvidia';
43+
options.instructions = await this.initUsecases.detectInstructions();
44+
const engineFileName = this.initUsecases.parseEngineFileName(options);
45+
return this.initUsecases
46+
.installEngine(engineFileName, version)
47+
.then(() => this.initUsecases.installCudaToolkitDependency(options));
48+
};
49+
50+
/**
51+
* Manual initalization
52+
* To setup cortex's dependencies
53+
* @param input
54+
* @param options GPU | CPU / Nvidia | Others (Vulkan) / AVX | AVX2 | AVX512
55+
*/
56+
private initPrompts = async (input: string[], options?: InitOptions) => {
1957
options = await this.inquirerService.ask(
2058
'init-run-mode-questions',
2159
options,
@@ -33,5 +71,14 @@ export class InitCommand extends CommandRunner {
3371
if (options.installCuda === 'Yes') {
3472
await this.initUsecases.installCudaToolkitDependency(options);
3573
}
74+
};
75+
76+
@Option({
77+
flags: '-s, --silent',
78+
description: 'Init without asking questions',
79+
defaultValue: false,
80+
})
81+
parseSilent() {
82+
return true;
3683
}
3784
}

cortex-js/src/infrastructure/commanders/types/init-options.interface.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,5 @@ export interface InitOptions {
44
instructions?: 'AVX' | 'AVX2' | 'AVX512' | undefined;
55
cudaVersion?: '11' | '12';
66
installCuda?: 'Yes' | string;
7+
silent?: boolean;
78
}

cortex-js/src/infrastructure/commanders/usecases/init.cli.usecases.ts

Lines changed: 66 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,13 @@ import { Injectable } from '@nestjs/common';
99
import { firstValueFrom } from 'rxjs';
1010
import { FileManagerService } from '@/file-manager/file-manager.service';
1111
import { rm } from 'fs/promises';
12+
import { exec } from 'child_process';
1213

1314
@Injectable()
1415
export class InitCliUsecases {
15-
CORTEX_RELEASES_URL = 'https://api.github.com/repos/janhq/cortex/releases';
16-
CUDA_DOWNLOAD_URL =
16+
private readonly CORTEX_RELEASES_URL =
17+
'https://api.github.com/repos/janhq/cortex/releases';
18+
private readonly CUDA_DOWNLOAD_URL =
1719
'https://catalog.jan.ai/dist/cuda-dependencies/<version>/<platform>/cuda.tar.gz';
1820

1921
constructor(
@@ -109,7 +111,7 @@ export class InitCliUsecases {
109111
await rm(destination, { force: true });
110112
};
111113

112-
parseEngineFileName = (options: InitOptions) => {
114+
parseEngineFileName = (options?: InitOptions) => {
113115
const platform =
114116
process.platform === 'win32'
115117
? 'windows'
@@ -118,12 +120,14 @@ export class InitCliUsecases {
118120
: process.platform;
119121
const arch = process.arch === 'arm64' ? process.arch : 'amd64';
120122
const cudaVersion =
121-
options.runMode === 'GPU'
123+
options?.runMode === 'GPU'
122124
? options.gpuType === 'Nvidia'
123125
? '-cuda-' + (options.cudaVersion === '11' ? '11-7' : '12-0')
124126
: '-vulkan'
125127
: '';
126-
const instructions = options.instructions ? `-${options.instructions}` : '';
128+
const instructions = options?.instructions
129+
? `-${options.instructions}`
130+
: '';
127131
const engineName = `${platform}-${arch}${instructions.toLowerCase()}${cudaVersion}`;
128132
return `${engineName}.tar.gz`;
129133
};
@@ -173,10 +177,6 @@ export class InitCliUsecases {
173177
return undefined; // No CUDA Toolkit found
174178
};
175179

176-
checkFileExistenceInPaths = (file: string, paths: string[]): boolean => {
177-
return paths.some((p) => existsSync(join(p, file)));
178-
};
179-
180180
installCudaToolkitDependency = async (options: InitOptions) => {
181181
const platform = process.platform === 'win32' ? 'windows' : 'linux';
182182

@@ -232,4 +232,61 @@ export class InitCliUsecases {
232232
}
233233
await rm(destination, { force: true });
234234
};
235+
236+
// Function to check for NVIDIA GPU
237+
checkNvidiaGPUExist = (): Promise<boolean> => {
238+
return new Promise<boolean>((resolve) => {
239+
// Execute the nvidia-smi command
240+
exec('nvidia-smi', (error) => {
241+
if (error) {
242+
// If there's an error, it means nvidia-smi is not installed or there's no NVIDIA GPU
243+
console.log('NVIDIA GPU not detected or nvidia-smi not installed.');
244+
resolve(false);
245+
} else {
246+
// If the command executes successfully, NVIDIA GPU is present
247+
console.log('NVIDIA GPU detected.');
248+
resolve(true);
249+
}
250+
});
251+
});
252+
};
253+
254+
detectInstructions = (): Promise<'AVX' | 'AVX2' | 'AVX512' | undefined> => {
255+
return new Promise<'AVX' | 'AVX2' | 'AVX512' | undefined>((res) => {
256+
// Execute the cpuinfo command
257+
258+
exec(
259+
join(
260+
__dirname,
261+
`../../../../bin/cpuinfo${process.platform !== 'linux' ? '.exe' : ''}`,
262+
),
263+
(error, stdout) => {
264+
if (error) {
265+
// If there's an error, it means lscpu is not installed
266+
console.log('CPUInfo is not installed.');
267+
res('AVX');
268+
} else {
269+
// If the command executes successfully, parse the output to detect CPU instructions
270+
if (stdout.includes('"AVX512": "true"')) {
271+
console.log('AVX-512 instructions detected.');
272+
res('AVX512');
273+
} else if ('"AVX2": "true"') {
274+
console.log('AVX2 instructions detected.');
275+
res('AVX2');
276+
} else {
277+
console.log('AVXs instructions detected.');
278+
res('AVX');
279+
}
280+
}
281+
},
282+
);
283+
});
284+
};
285+
286+
private checkFileExistenceInPaths = (
287+
file: string,
288+
paths: string[],
289+
): boolean => {
290+
return paths.some((p) => existsSync(join(p, file)));
291+
};
235292
}

0 commit comments

Comments
 (0)