Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 32b716e

Browse files
committed
chore: remove default inference params
1 parent dfb2254 commit 32b716e

File tree

2 files changed

+2
-6
lines changed

2 files changed

+2
-6
lines changed

cortex-js/src/infrastructure/commanders/usecases/chat.cli.usecases.ts

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,8 @@ export class ChatCliUsecases {
6363
rl.on('line', sendCompletionMessage.bind(this));
6464

6565
async function sendCompletionMessage(userInput: string) {
66+
if (!userInput || userInput.trim() === '') return;
67+
6668
if (userInput.trim() === this.exitClause) {
6769
rl.close();
6870
return;
@@ -98,12 +100,7 @@ export class ChatCliUsecases {
98100
model: modelId,
99101
stream: true,
100102
max_tokens: 4098,
101-
stop: [],
102-
frequency_penalty: 0.7,
103-
presence_penalty: 0.7,
104103
temperature: 0.7,
105-
top_p: 0.7,
106-
107104
// Override with model settings
108105
...parser.parseModelInferenceParams(model),
109106
};

cortex-js/src/infrastructure/constants/benchmark.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ export const defaultBenchmarkConfiguration: BenchmarkConfig = {
1818
model: 'tinyllama',
1919
stream: true,
2020
max_tokens: 2048,
21-
stop: [],
2221
frequency_penalty: 0,
2322
presence_penalty: 0,
2423
temperature: 0.7,

0 commit comments

Comments
 (0)