Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 0 additions & 2 deletions apps/desktop/src-tauri/Entitlements.plist
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,5 @@
<true/>
<key>com.apple.security.personal-information.addressbook</key>
<true/>
<key>com.apple.developer.usernotifications.time-sensitive</key>
<true/>
</dict>
</plist>
33 changes: 0 additions & 33 deletions apps/desktop/src-tauri/src/ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ use tauri_plugin_store2::{ScopedStore, StorePluginExt};
pub trait AppExt<R: tauri::Runtime> {
fn sentry_dsn(&self) -> String;
fn desktop_store(&self) -> Result<ScopedStore<R, crate::StoreKey>, String>;
fn setup_local_ai(&self) -> impl Future<Output = Result<(), String>>;
fn setup_db_for_local(&self) -> impl Future<Output = Result<(), String>>;
fn setup_db_for_cloud(&self) -> impl Future<Output = Result<(), String>>;
}
Expand All @@ -34,38 +33,6 @@ impl<R: tauri::Runtime, T: tauri::Manager<R>> AppExt<R> for T {
self.scoped_store("desktop").map_err(|e| e.to_string())
}

#[tracing::instrument(skip_all)]

async fn setup_local_ai(&self) -> Result<(), String> {
{
use tauri_plugin_local_stt::LocalSttPluginExt;

let current_model = self
.get_current_model()
.unwrap_or(SupportedSttModel::Whisper(WhisperModel::QuantizedSmall));

if let Ok(true) = self.is_model_downloaded(&current_model).await {
if let Err(e) = self.start_server(Some(current_model)).await {
tracing::error!("start_local_stt_server: {}", e);
}
}
}

{
use tauri_plugin_local_llm::{LocalLlmPluginExt, SupportedModel};

let current_model = self.get_current_model().unwrap_or(SupportedModel::HyprLLM);

if let Ok(true) = self.is_model_downloaded(&current_model).await {
if let Err(e) = self.start_server().await {
tracing::error!("start_local_llm_server: {}", e);
}
}
}

Ok(())
}

#[tracing::instrument(skip_all)]
async fn setup_db_for_local(&self) -> Result<(), String> {
let (db, db_just_created) = {
Expand Down
4 changes: 0 additions & 4 deletions apps/desktop/src-tauri/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,10 +236,6 @@ pub async fn main() {
}
}
}

tokio::spawn(async move {
app_clone.setup_local_ai().await.unwrap();
});
});

Ok(())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ function ProModelsSection({
{proModels.data?.map((model) => (
<ModelEntry
key={model.key}
disabled={true}
disabled={false}
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Pro models are enabled here while the section description says they are temporarily disabled; update the description or keep this disabled to avoid confusing UX.

Prompt for AI agents
Address the following comment on apps/desktop/src/components/settings/components/ai/stt-view-local.tsx at line 253:

<comment>Pro models are enabled here while the section description says they are temporarily disabled; update the description or keep this disabled to avoid confusing UX.</comment>

<file context>
@@ -250,7 +250,7 @@ function ProModelsSection({
         {proModels.data?.map((model) =&gt; (
           &lt;ModelEntry
             key={model.key}
-            disabled={true}
+            disabled={false}
             model={model}
             selectedSTTModel={selectedSTTModel}
</file context>

model={model}
selectedSTTModel={selectedSTTModel}
setSelectedSTTModel={setSelectedSTTModel}
Expand Down
10 changes: 5 additions & 5 deletions crates/detect/src/mic/macos.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,9 @@ impl crate::Observer for Detector {
system_listener,
system_listener_ptr,
) {
println!("Failed to add system listener: {:?}", e);
tracing::error!("adding_system_listener_failed: {:?}", e);
} else {
println!("✅ System listener added successfully");
tracing::info!("adding_system_listener_success");
}

if let Ok(device) = ca::System::default_input_device() {
Expand All @@ -229,7 +229,7 @@ impl crate::Observer for Detector {
)
.is_ok()
{
println!("✅ Device listener added successfully");
tracing::info!("adding_device_listener_success");

if let Ok(mut device_guard) = current_device.lock() {
*device_guard = Some(device);
Expand All @@ -244,10 +244,10 @@ impl crate::Observer for Detector {
}
}
} else {
println!("❌ Failed to add device listener");
tracing::error!("adding_device_listener_failed");
}
} else {
println!("⚠️ No default input device found");
tracing::warn!("no_default_input_device_found");
}

let _ = tx.blocking_send(());
Expand Down
1 change: 1 addition & 0 deletions plugins/local-llm/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ hypr-llama = { workspace = true }

tauri = { workspace = true, features = ["test"] }
tauri-plugin-store2 = { workspace = true }
tauri-plugin-windows = { workspace = true }
tauri-specta = { workspace = true, features = ["derive", "typescript"] }

thiserror = { workspace = true }
Expand Down
32 changes: 32 additions & 0 deletions plugins/local-llm/src/events.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
use crate::LocalLlmPluginExt;
use tauri_plugin_windows::HyprWindow;

pub fn on_event<R: tauri::Runtime>(app: &tauri::AppHandle<R>, event: &tauri::RunEvent) {
match event {
tauri::RunEvent::WindowEvent { label, event, .. } => {
let hypr_window = match label.parse::<HyprWindow>() {
Ok(window) => window,
Err(e) => {
tracing::warn!("parse_error: {:?}", e);
return;
}
};

if hypr_window != HyprWindow::Main {
return;
}

match event {
tauri::WindowEvent::Focused(true) => {
tokio::task::block_in_place(|| {
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Blocking the event loop with block_in_place in a Tauri event handler can freeze or delay UI/event processing; spawn the async task instead of blocking.

Prompt for AI agents
Address the following comment on plugins/local-llm/src/events.rs at line 21:

<comment>Blocking the event loop with block_in_place in a Tauri event handler can freeze or delay UI/event processing; spawn the async task instead of blocking.</comment>

<file context>
@@ -0,0 +1,32 @@
+use crate::LocalLlmPluginExt;
+use tauri_plugin_windows::HyprWindow;
+
+pub fn on_event&lt;R: tauri::Runtime&gt;(app: &amp;tauri::AppHandle&lt;R&gt;, event: &amp;tauri::RunEvent) {
+    match event {
+        tauri::RunEvent::WindowEvent { label, event, .. } =&gt; {
+            let hypr_window = match label.parse::&lt;HyprWindow&gt;() {
+                Ok(window) =&gt; window,
+                Err(e) =&gt; {
</file context>

tokio::runtime::Handle::current().block_on(async {
let _ = app.start_server().await;
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Errors from start_server are ignored; log failures to aid diagnosis and reliability.

Prompt for AI agents
Address the following comment on plugins/local-llm/src/events.rs at line 23:

<comment>Errors from start_server are ignored; log failures to aid diagnosis and reliability.</comment>

<file context>
@@ -0,0 +1,32 @@
+use crate::LocalLlmPluginExt;
+use tauri_plugin_windows::HyprWindow;
+
+pub fn on_event&lt;R: tauri::Runtime&gt;(app: &amp;tauri::AppHandle&lt;R&gt;, event: &amp;tauri::RunEvent) {
+    match event {
+        tauri::RunEvent::WindowEvent { label, event, .. } =&gt; {
+            let hypr_window = match label.parse::&lt;HyprWindow&gt;() {
+                Ok(window) =&gt; window,
+                Err(e) =&gt; {
</file context>

});
});
}
_ => {}
}
}
_ => {}
}
}
3 changes: 3 additions & 0 deletions plugins/local-llm/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,15 @@ use tokio::sync::Mutex;

mod commands;
mod error;
mod events;
mod ext;
mod manager;
mod model;
mod server;
mod store;

pub use error::*;
use events::*;
pub use ext::*;
pub use manager::*;
pub use model::*;
Expand Down Expand Up @@ -83,6 +85,7 @@ pub fn init<R: tauri::Runtime>() -> tauri::plugin::TauriPlugin<R> {

Ok(())
})
.on_event(on_event)
.build()
}

Expand Down
63 changes: 22 additions & 41 deletions plugins/local-stt/src/events.rs
Original file line number Diff line number Diff line change
@@ -1,54 +1,35 @@
use tauri::Manager;

use crate::{LocalSttPluginExt, SharedState};
use crate::LocalSttPluginExt;
use tauri_plugin_windows::HyprWindow;

pub fn on_event<R: tauri::Runtime>(app: &tauri::AppHandle<R>, event: &tauri::RunEvent) {
let state = app.state::<SharedState>();

match event {
tauri::RunEvent::WindowEvent { label, event, .. } => match event {
tauri::WindowEvent::CloseRequested { .. } | tauri::WindowEvent::Destroyed => {
let hypr_window = match label.parse::<HyprWindow>() {
Ok(window) => window,
Err(e) => {
tracing::warn!("window_parse_error: {:?}", e);
return;
}
};

if hypr_window != HyprWindow::Main {
tauri::RunEvent::WindowEvent { label, event, .. } => {
let hypr_window = match label.parse::<HyprWindow>() {
Ok(window) => window,
Err(e) => {
tracing::warn!("parse_error: {:?}", e);
return;
}
};

tracing::info!("events: stopping servers");

tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async {
let mut guard = state.lock().await;

if let Some(_) = guard.internal_server.take() {
guard.internal_server = None;
}
if let Some(_) = guard.external_server.take() {
guard.external_server = None;
}
for (_, (task, token)) in guard.download_task.drain() {
token.cancel();
task.abort();
}
});
});
if hypr_window != HyprWindow::Main {
return;
}
tauri::WindowEvent::Focused(true) => {
tokio::task::block_in_place(|| {
tokio::runtime::Handle::current().block_on(async {
let _ = app.start_server(None).await;

match event {
tauri::WindowEvent::Focused(true) => {
tokio::task::block_in_place(|| {
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Blocking the runtime thread with block_in_place + block_on inside an event handler may stall the UI/event loop; prefer spawning the async task instead.

Prompt for AI agents
Address the following comment on plugins/local-stt/src/events.rs at line 21:

<comment>Blocking the runtime thread with block_in_place + block_on inside an event handler may stall the UI/event loop; prefer spawning the async task instead.</comment>

<file context>
@@ -1,54 +1,32 @@
-use tauri::Manager;
-
-use crate::{LocalSttPluginExt, SharedState};
+use crate::LocalSttPluginExt;
 use tauri_plugin_windows::HyprWindow;
 
 pub fn on_event&lt;R: tauri::Runtime&gt;(app: &amp;tauri::AppHandle&lt;R&gt;, event: &amp;tauri::RunEvent) {
-    let state = app.state::&lt;SharedState&gt;();
-
</file context>

tokio::runtime::Handle::current().block_on(async {
match app.start_server(None).await {
Ok(_) => tracing::info!("server_started"),
Err(e) => tracing::error!("server_start_failed: {:?}", e),
}
});
});
});
}
_ => {}
}
_ => {}
},
}
_ => {}
}
}
2 changes: 1 addition & 1 deletion plugins/local-stt/src/ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ impl<R: Runtime, T: Manager<R>> LocalSttPluginExt<R> for T {
self.shell()
.sidecar("stt")?
.current_dir(dirs::home_dir().unwrap())
.args(["serve", "-v"])
.args(["serve", "-v", "-d"])
};

let server = external::run_server(cmd, am_key).await?;
Expand Down
2 changes: 0 additions & 2 deletions plugins/local-stt/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,13 @@ mod model;
mod server;
mod store;
mod types;
mod utils;

pub use error::*;
use events::*;
pub use ext::*;
pub use model::*;
pub use store::*;
pub use types::*;
use utils::*;

pub type SharedState = std::sync::Arc<tokio::sync::Mutex<State>>;

Expand Down
19 changes: 12 additions & 7 deletions plugins/local-stt/src/server/external.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ pub struct ServerHandle {
client: hypr_am::Client,
}

impl Drop for ServerHandle {
fn drop(&mut self) {
tracing::info!("stopping");
let _ = self.shutdown.send(());
}
}
// impl Drop for ServerHandle {
// fn drop(&mut self) {
// tracing::info!("stopping");
// let _ = self.shutdown.send(());
// }
// }
Comment on lines +10 to +15
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Re-enable Drop (or add an explicit shutdown API) to avoid orphaned external STT processes

Commenting out Drop removes the only in-module path that can signal the background task to kill the child process. The shutdown sender is private (not pub) and there’s no public method that sends on it, so nothing external can currently trigger shutdown. This risks leaking the external STT child when the handle is dropped.

Recommend restoring Drop and including base_url in the log (per PR objectives). Minimal diff:

-// impl Drop for ServerHandle {
-//     fn drop(&mut self) {
-//         tracing::info!("stopping");
-//         let _ = self.shutdown.send(());
-//     }
-// }
+impl Drop for ServerHandle {
+    fn drop(&mut self) {
+        tracing::info!("stopping: {}", self.base_url);
+        if let Err(e) = self.shutdown.send(()) {
+            tracing::debug!("shutdown_channel_already_closed {:?}", e);
+        }
+    }
+}

Optionally, instead of relying on Drop, expose a public fn shutdown(&self) -> bool that sends on the watch channel and returns whether a send occurred, and ensure all call sites invoke it on app lifecycle events.

Run this to confirm no alternative shutdown path exists right now:


🏁 Script executed:

#!/bin/bash
set -euo pipefail
# ServerHandle definitions and Drop impls
rg -nP '^\s*pub\s+struct\s+ServerHandle\b' -C3
rg -nP 'impl\s+Drop\s+for\s+ServerHandle\b' -C2

# Any explicit sends on the shutdown sender?
rg -n 'shutdown\.send\(' -C2

Length of output: 3376


Re-enable Drop for External STT ServerHandle to Prevent Orphans

The plugins/local-stt/src/server/external.rs ServerHandle currently has no Drop implementation or public shutdown API, so its background task never receives the shutdown signal. This will leak the external STT child process whenever the handle is dropped.

Please restore the Drop impl (including base_url in the log) or expose a public shutdown method. For example:

--- a/plugins/local-stt/src/server/external.rs
+++ b/plugins/local-stt/src/server/external.rs
@@
// impl Drop for ServerHandle {
//     fn drop(&mut self) {
//         tracing::info!("stopping");
//         let _ = self.shutdown.send(());
//     }
// }
+
+impl Drop for ServerHandle {
+    fn drop(&mut self) {
+        tracing::info!("stopping external STT at {}", self.base_url);
+        if let Err(e) = self.shutdown.send(()) {
+            tracing::debug!("external STT shutdown channel already closed: {:?}", e);
+        }
+    }
+}

Alternatively, mirror the LLM plugin’s pattern by adding:

impl ServerHandle {
    /// Gracefully signal the external STT to stop; returns Err if already shut down.
    pub fn shutdown(self) -> Result<(), tokio::sync::watch::error::SendError<()>> {
        self.shutdown.send(())
    }
}

– plugins/local-stt/src/server/external.rs:3–15

🤖 Prompt for AI Agents
In plugins/local-stt/src/server/external.rs around lines 3 to 15, the
ServerHandle lacks a Drop implementation or public shutdown API so the
background task never receives the shutdown signal and the external STT child is
leaked; restore the Drop impl (or add a public shutdown method) so the handle
signals shutdown: reintroduce impl Drop for ServerHandle that logs including the
base_url (e.g., "stopping external stt at {base_url}") and then sends the
shutdown signal (ignoring the send error), or alternatively add a pub fn
shutdown(self) -> Result<(), tokio::sync::watch::error::SendError<()>> that
forwards to self.shutdown.send(()) and document it as a graceful stop; ensure
the shutdown sender is used exactly once and ownership/borrow rules compile
cleanly.


impl ServerHandle {
pub async fn health(&self) -> ServerHealth {
Expand Down Expand Up @@ -58,8 +58,12 @@ pub async fn run_server(
) -> Result<ServerHandle, crate::Error> {
let port = 50060;
let _ = port_killer::kill(port);
tokio::time::sleep(std::time::Duration::from_millis(500)).await;
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Arbitrary fixed 500ms sleep after killing the port can cause flaky startups and unnecessary delay; prefer awaiting port availability or process readiness with a bounded retry instead.

Prompt for AI agents
Address the following comment on plugins/local-stt/src/server/external.rs at line 61:

<comment>Arbitrary fixed 500ms sleep after killing the port can cause flaky startups and unnecessary delay; prefer awaiting port availability or process readiness with a bounded retry instead.</comment>

<file context>
@@ -58,6 +58,7 @@ pub async fn run_server(
 ) -&gt; Result&lt;ServerHandle, crate::Error&gt; {
     let port = 50060;
     let _ = port_killer::kill(port);
+    tokio::time::sleep(std::time::Duration::from_millis(500)).await;
 
     let (mut rx, child) = cmd.args([&quot;--port&quot;, &amp;port.to_string()]).spawn()?;
</file context>


tracing::info!("spwaning_started");
let (mut rx, child) = cmd.args(["--port", &port.to_string()]).spawn()?;
tracing::info!("spwaning_ended");

let base_url = format!("http://localhost:{}", port);
let (shutdown_tx, mut shutdown_rx) = tokio::sync::watch::channel(());
let client = hypr_am::Client::new(&base_url);
Expand Down Expand Up @@ -118,7 +122,8 @@ pub async fn run_server(
}
});

tokio::time::sleep(std::time::Duration::from_millis(500)).await;
tokio::time::sleep(std::time::Duration::from_millis(200)).await;
tracing::info!("returning_handle");

Comment on lines +125 to 127
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Replace fixed 200 ms post-spawn delay with a readiness wait on the HTTP API

A static sleep is guesswork. Poll the external STT server until it responds on the base_url (or until timeout). This reduces flakiness and speeds up the common fast path.

-    tokio::time::sleep(std::time::Duration::from_millis(200)).await;
-    tracing::info!("returning_handle");
+    if !wait_server_ready(&client, std::time::Duration::from_secs(5)).await {
+        tracing::warn!("external_stt not ready within timeout: {}", base_url);
+    } else {
+        tracing::info!("external_stt_ready {}", base_url);
+    }

Add this helper in the same module (outside this hunk):

// Returns true once the server responds to status() (Loaded or Loading), false on timeout.
async fn wait_server_ready(client: &hypr_am::Client, timeout: std::time::Duration) -> bool {
    use tokio::time::{sleep, Instant};
    let start = Instant::now();
    loop {
        match client.status().await {
            Ok(res) => {
                // Consider both Loading and Loaded as "responding"; callers can poll health() later.
                if matches!(res.model_state, hypr_am::ModelState::Loaded | hypr_am::ModelState::Loading) {
                    return true;
                }
            }
            Err(_) => {}
        }
        if start.elapsed() >= timeout {
            return false;
        }
        sleep(std::time::Duration::from_millis(100)).await;
    }
}
🤖 Prompt for AI Agents
In plugins/local-stt/src/server/external.rs around lines 125 to 127, replace the
fixed tokio::time::sleep(Duration::from_millis(200)).await; and the subsequent
tracing::info!("returning_handle"); with a readiness wait: add the suggested
async helper wait_server_ready(client, timeout) in this module (outside the
current hunk), then call it after spawning the server and before returning the
handle; if wait_server_ready returns true proceed to return the handle and log
readiness, if false treat as timeout (log error and return Err or propagate
timeout) so callers don’t rely on a magic 200ms sleep.

Ok(ServerHandle {
api_key: Some(am_key),
Expand Down
2 changes: 1 addition & 1 deletion plugins/local-stt/src/server/internal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ pub struct ServerHandle {

impl Drop for ServerHandle {
fn drop(&mut self) {
tracing::info!("stopping");
tracing::info!("stopping: {}", self.base_url);
let _ = self.shutdown.send(());
}
}
Expand Down
Loading