Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
99 changes: 99 additions & 0 deletions apps/desktop/src/components/settings/views/ai.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,33 @@ const initialSttModels = [
downloaded: true,
fileName: "ggml-tiny-q8_0.bin",
},
{
key: "QuantizedTinyEn",
name: "Tiny - English",
accuracy: 1,
speed: 3,
size: "44 MB",
downloaded: false,
fileName: "ggml-tiny.en-q8_0.bin",
},
{
key: "QuantizedBase",
name: "Base",
accuracy: 2,
speed: 2,
size: "82 MB",
downloaded: false,
fileName: "ggml-base-q8_0.bin",
},
{
key: "QuantizedBaseEn",
name: "Base - English",
accuracy: 2,
speed: 2,
size: "82 MB",
downloaded: false,
fileName: "ggml-base.en-q8_0.bin",
},
{
key: "QuantizedSmall",
name: "Small",
Expand All @@ -69,6 +96,15 @@ const initialSttModels = [
downloaded: false,
fileName: "ggml-small-q8_0.bin",
},
{
key: "QuantizedSmallEn",
name: "Small - English",
accuracy: 2,
speed: 2,
size: "264 MB",
downloaded: false,
fileName: "ggml-small.en-q8_0.bin",
},
{
key: "QuantizedLargeTurbo",
name: "Large",
Expand Down Expand Up @@ -354,6 +390,67 @@ export default function LocalAI() {
return apiBase && (apiBase.includes("localhost") || apiBase.includes("127.0.0.1"));
};

// call backend for the current selected LLM model and sets it
const currentLLMModel = useQuery({
queryKey: ["current-llm-model"],
queryFn: () => localLlmCommands.getCurrentModel(),
});

useEffect(() => {
if (currentLLMModel.data && !customLLMEnabled.data) {
setSelectedLLMModel(currentLLMModel.data);
}
}, [currentLLMModel.data, customLLMEnabled.data]);

// call backend for the current selected STT model and sets it
const currentSTTModel = useQuery({
queryKey: ["current-stt-model"],
queryFn: () => localSttCommands.getCurrentModel(),
});

useEffect(() => {
if (currentSTTModel.data) {
setSelectedSTTModel(currentSTTModel.data);
}
}, [currentSTTModel.data]);

// call backend for the download status of the STT models and sets it
const sttModelDownloadStatus = useQuery({
queryKey: ["stt-model-download-status"],
queryFn: async () => {
const statusChecks = await Promise.all([
localSttCommands.isModelDownloaded("QuantizedTiny"),
localSttCommands.isModelDownloaded("QuantizedTinyEn"),
localSttCommands.isModelDownloaded("QuantizedBase"),
localSttCommands.isModelDownloaded("QuantizedBaseEn"),
localSttCommands.isModelDownloaded("QuantizedSmall"),
localSttCommands.isModelDownloaded("QuantizedSmallEn"),
localSttCommands.isModelDownloaded("QuantizedLargeTurbo"),
]);
return {
"QuantizedTiny": statusChecks[0],
"QuantizedTinyEn": statusChecks[1],
"QuantizedBase": statusChecks[2],
"QuantizedBaseEn": statusChecks[3],
"QuantizedSmall": statusChecks[4],
"QuantizedSmallEn": statusChecks[5],
"QuantizedLargeTurbo": statusChecks[6],
} as Record<string, boolean>;
},
refetchInterval: 3000,
});

useEffect(() => {
if (sttModelDownloadStatus.data) {
setSttModels(prev =>
prev.map(model => ({
...model,
downloaded: sttModelDownloadStatus.data[model.key] || false,
}))
);
}
}, [sttModelDownloadStatus.data]);

return (
<div className="space-y-8">
<div>
Expand Down Expand Up @@ -389,6 +486,7 @@ export default function LocalAI() {
onClick={() => {
if (model.downloaded) {
setSelectedSTTModel(model.key);
localSttCommands.setCurrentModel(model.key as any);
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Improve type safety by avoiding type assertions

The use of as any and as SupportedModel type assertions could hide type mismatches at compile time.

Consider updating the model type definitions to properly type the model keys, eliminating the need for type assertions. This would provide better compile-time type safety.

Also applies to: 627-627

🤖 Prompt for AI Agents
In apps/desktop/src/components/settings/views/ai.tsx at lines 489 and 627, the
code uses 'as any' and 'as SupportedModel' type assertions on model keys, which
reduces type safety. To fix this, update the type definitions for the model keys
so they correctly reflect the expected types without needing assertions. Adjust
the model and localSttCommands types to align, ensuring the model.key is typed
properly and can be passed directly to setCurrentModel without casting.

}
}}
>
Expand Down Expand Up @@ -526,6 +624,7 @@ export default function LocalAI() {
onClick={() => {
if (model.available && model.downloaded) {
setSelectedLLMModel(model.key);
localLlmCommands.setCurrentModel(model.key as SupportedModel);
setCustomLLMEnabledMutation.mutate(false);
}
}}
Expand Down
28 changes: 14 additions & 14 deletions apps/desktop/src/locales/en/messages.po
Original file line number Diff line number Diff line change
Expand Up @@ -346,12 +346,12 @@ msgstr "Annual"
msgid "Anyone with the link can view this page"
msgstr "Anyone with the link can view this page"

#: src/components/settings/views/ai.tsx:658
#: src/components/settings/views/ai.tsx:757
msgid "API Base URL"
msgstr "API Base URL"

#: src/components/settings/views/integrations.tsx:197
#: src/components/settings/views/ai.tsx:684
#: src/components/settings/views/ai.tsx:783
msgid "API Key"
msgstr "API Key"

Expand Down Expand Up @@ -460,7 +460,7 @@ msgstr "Company name"
#~ msgid "Connect"
#~ msgstr "Connect"

#: src/components/settings/views/ai.tsx:638
#: src/components/settings/views/ai.tsx:737
msgid "Connect to a self-hosted or third-party LLM endpoint (OpenAI API compatible)."
msgstr "Connect to a self-hosted or third-party LLM endpoint (OpenAI API compatible)."

Expand Down Expand Up @@ -492,7 +492,7 @@ msgstr "Contacts Access"
msgid "Continue"
msgstr "Continue"

#: src/components/settings/views/ai.tsx:770
#: src/components/settings/views/ai.tsx:869
msgid "Control how creative the AI enhancement should be"
msgstr "Control how creative the AI enhancement should be"

Expand All @@ -517,15 +517,15 @@ msgstr "Create Note"
msgid "Create your first template to get started"
msgstr "Create your first template to get started"

#: src/components/settings/views/ai.tsx:767
#: src/components/settings/views/ai.tsx:866
msgid "Creativity Level"
msgstr "Creativity Level"

#: src/components/settings/views/billing.tsx:66
msgid "Current Plan"
msgstr "Current Plan"

#: src/components/settings/views/ai.tsx:635
#: src/components/settings/views/ai.tsx:734
msgid "Custom Endpoint"
msgstr "Custom Endpoint"

Expand Down Expand Up @@ -588,7 +588,7 @@ msgstr "Enable"
msgid "Enable Integration"
msgstr "Enable Integration"

#: src/components/settings/views/ai.tsx:509
#: src/components/settings/views/ai.tsx:607
msgid "Enhancing"
msgstr "Enhancing"

Expand All @@ -600,11 +600,11 @@ msgstr "Enter a section title"
#~ msgid "Enter model name (e.g., gpt-4, llama3.2:3b)"
#~ msgstr "Enter model name (e.g., gpt-4, llama3.2:3b)"

#: src/components/settings/views/ai.tsx:687
#: src/components/settings/views/ai.tsx:786
msgid "Enter the API key for your custom LLM endpoint"
msgstr "Enter the API key for your custom LLM endpoint"

#: src/components/settings/views/ai.tsx:661
#: src/components/settings/views/ai.tsx:760
msgid "Enter the base URL for your custom LLM endpoint"
msgstr "Enter the base URL for your custom LLM endpoint"

Expand Down Expand Up @@ -767,7 +767,7 @@ msgstr "LinkedIn username"
msgid "Live summary of the meeting"
msgstr "Live summary of the meeting"

#: src/components/settings/views/ai.tsx:721
#: src/components/settings/views/ai.tsx:820
msgid "Loading available models..."
msgstr "Loading available models..."

Expand Down Expand Up @@ -813,7 +813,7 @@ msgstr "Members"
msgid "Microphone Access"
msgstr "Microphone Access"

#: src/components/settings/views/ai.tsx:709
#: src/components/settings/views/ai.tsx:808
msgid "Model Name"
msgstr "Model Name"

Expand Down Expand Up @@ -955,7 +955,7 @@ msgstr "Pause"
msgid "people"
msgstr "people"

#: src/components/settings/views/ai.tsx:371
#: src/components/settings/views/ai.tsx:468
msgid "Performance difference between languages"
msgstr "Performance difference between languages"

Expand Down Expand Up @@ -1045,7 +1045,7 @@ msgstr "Search..."
msgid "Sections"
msgstr "Sections"

#: src/components/settings/views/ai.tsx:712
#: src/components/settings/views/ai.tsx:811
msgid "Select a model from the dropdown (if available) or manually enter the model name required by your endpoint."
msgstr "Select a model from the dropdown (if available) or manually enter the model name required by your endpoint."

Expand Down Expand Up @@ -1187,7 +1187,7 @@ msgstr "Toggle left sidebar"
msgid "Toggle widget panel"
msgstr "Toggle widget panel"

#: src/components/settings/views/ai.tsx:362
#: src/components/settings/views/ai.tsx:459
msgid "Transcribing"
msgstr "Transcribing"

Expand Down
28 changes: 14 additions & 14 deletions apps/desktop/src/locales/ko/messages.po
Original file line number Diff line number Diff line change
Expand Up @@ -346,12 +346,12 @@ msgstr ""
msgid "Anyone with the link can view this page"
msgstr ""

#: src/components/settings/views/ai.tsx:658
#: src/components/settings/views/ai.tsx:757
msgid "API Base URL"
msgstr ""

#: src/components/settings/views/integrations.tsx:197
#: src/components/settings/views/ai.tsx:684
#: src/components/settings/views/ai.tsx:783
msgid "API Key"
msgstr ""

Expand Down Expand Up @@ -460,7 +460,7 @@ msgstr ""
#~ msgid "Connect"
#~ msgstr ""

#: src/components/settings/views/ai.tsx:638
#: src/components/settings/views/ai.tsx:737
msgid "Connect to a self-hosted or third-party LLM endpoint (OpenAI API compatible)."
msgstr ""

Expand Down Expand Up @@ -492,7 +492,7 @@ msgstr ""
msgid "Continue"
msgstr ""

#: src/components/settings/views/ai.tsx:770
#: src/components/settings/views/ai.tsx:869
msgid "Control how creative the AI enhancement should be"
msgstr ""

Expand All @@ -517,15 +517,15 @@ msgstr ""
msgid "Create your first template to get started"
msgstr ""

#: src/components/settings/views/ai.tsx:767
#: src/components/settings/views/ai.tsx:866
msgid "Creativity Level"
msgstr ""

#: src/components/settings/views/billing.tsx:66
msgid "Current Plan"
msgstr ""

#: src/components/settings/views/ai.tsx:635
#: src/components/settings/views/ai.tsx:734
msgid "Custom Endpoint"
msgstr ""

Expand Down Expand Up @@ -588,7 +588,7 @@ msgstr ""
msgid "Enable Integration"
msgstr "Enable Integration"

#: src/components/settings/views/ai.tsx:509
#: src/components/settings/views/ai.tsx:607
msgid "Enhancing"
msgstr "Enhancing"

Expand All @@ -600,11 +600,11 @@ msgstr ""
#~ msgid "Enter model name (e.g., gpt-4, llama3.2:3b)"
#~ msgstr ""

#: src/components/settings/views/ai.tsx:687
#: src/components/settings/views/ai.tsx:786
msgid "Enter the API key for your custom LLM endpoint"
msgstr ""

#: src/components/settings/views/ai.tsx:661
#: src/components/settings/views/ai.tsx:760
msgid "Enter the base URL for your custom LLM endpoint"
msgstr ""

Expand Down Expand Up @@ -767,7 +767,7 @@ msgstr ""
msgid "Live summary of the meeting"
msgstr ""

#: src/components/settings/views/ai.tsx:721
#: src/components/settings/views/ai.tsx:820
msgid "Loading available models..."
msgstr ""

Expand Down Expand Up @@ -813,7 +813,7 @@ msgstr ""
msgid "Microphone Access"
msgstr ""

#: src/components/settings/views/ai.tsx:709
#: src/components/settings/views/ai.tsx:808
msgid "Model Name"
msgstr ""

Expand Down Expand Up @@ -955,7 +955,7 @@ msgstr ""
msgid "people"
msgstr ""

#: src/components/settings/views/ai.tsx:371
#: src/components/settings/views/ai.tsx:468
msgid "Performance difference between languages"
msgstr ""

Expand Down Expand Up @@ -1045,7 +1045,7 @@ msgstr ""
msgid "Sections"
msgstr ""

#: src/components/settings/views/ai.tsx:712
#: src/components/settings/views/ai.tsx:811
msgid "Select a model from the dropdown (if available) or manually enter the model name required by your endpoint."
msgstr ""

Expand Down Expand Up @@ -1187,7 +1187,7 @@ msgstr ""
msgid "Toggle widget panel"
msgstr ""

#: src/components/settings/views/ai.tsx:362
#: src/components/settings/views/ai.tsx:459
msgid "Transcribing"
msgstr ""

Expand Down
Loading