-
Notifications
You must be signed in to change notification settings - Fork 415
Fix external stt server startups #1402
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,32 @@ | ||
| use crate::LocalLlmPluginExt; | ||
| use tauri_plugin_windows::HyprWindow; | ||
|
|
||
| pub fn on_event<R: tauri::Runtime>(app: &tauri::AppHandle<R>, event: &tauri::RunEvent) { | ||
| match event { | ||
| tauri::RunEvent::WindowEvent { label, event, .. } => { | ||
| let hypr_window = match label.parse::<HyprWindow>() { | ||
| Ok(window) => window, | ||
| Err(e) => { | ||
| tracing::warn!("parse_error: {:?}", e); | ||
| return; | ||
| } | ||
| }; | ||
|
|
||
| if hypr_window != HyprWindow::Main { | ||
| return; | ||
| } | ||
|
|
||
| match event { | ||
| tauri::WindowEvent::Focused(true) => { | ||
| tokio::task::block_in_place(|| { | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Blocking the event loop with block_in_place in a Tauri event handler can freeze or delay UI/event processing; spawn the async task instead of blocking. Prompt for AI agents |
||
| tokio::runtime::Handle::current().block_on(async { | ||
| let _ = app.start_server().await; | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Errors from start_server are ignored; log failures to aid diagnosis and reliability. Prompt for AI agents |
||
| }); | ||
| }); | ||
| } | ||
| _ => {} | ||
| } | ||
| } | ||
| _ => {} | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,54 +1,35 @@ | ||
| use tauri::Manager; | ||
|
|
||
| use crate::{LocalSttPluginExt, SharedState}; | ||
| use crate::LocalSttPluginExt; | ||
| use tauri_plugin_windows::HyprWindow; | ||
|
|
||
| pub fn on_event<R: tauri::Runtime>(app: &tauri::AppHandle<R>, event: &tauri::RunEvent) { | ||
| let state = app.state::<SharedState>(); | ||
|
|
||
| match event { | ||
| tauri::RunEvent::WindowEvent { label, event, .. } => match event { | ||
| tauri::WindowEvent::CloseRequested { .. } | tauri::WindowEvent::Destroyed => { | ||
| let hypr_window = match label.parse::<HyprWindow>() { | ||
| Ok(window) => window, | ||
| Err(e) => { | ||
| tracing::warn!("window_parse_error: {:?}", e); | ||
| return; | ||
| } | ||
| }; | ||
|
|
||
| if hypr_window != HyprWindow::Main { | ||
| tauri::RunEvent::WindowEvent { label, event, .. } => { | ||
| let hypr_window = match label.parse::<HyprWindow>() { | ||
| Ok(window) => window, | ||
| Err(e) => { | ||
| tracing::warn!("parse_error: {:?}", e); | ||
| return; | ||
| } | ||
| }; | ||
|
|
||
| tracing::info!("events: stopping servers"); | ||
|
|
||
| tokio::task::block_in_place(|| { | ||
| tokio::runtime::Handle::current().block_on(async { | ||
| let mut guard = state.lock().await; | ||
|
|
||
| if let Some(_) = guard.internal_server.take() { | ||
| guard.internal_server = None; | ||
| } | ||
| if let Some(_) = guard.external_server.take() { | ||
| guard.external_server = None; | ||
| } | ||
| for (_, (task, token)) in guard.download_task.drain() { | ||
| token.cancel(); | ||
| task.abort(); | ||
| } | ||
| }); | ||
| }); | ||
| if hypr_window != HyprWindow::Main { | ||
| return; | ||
| } | ||
| tauri::WindowEvent::Focused(true) => { | ||
| tokio::task::block_in_place(|| { | ||
| tokio::runtime::Handle::current().block_on(async { | ||
| let _ = app.start_server(None).await; | ||
|
|
||
| match event { | ||
| tauri::WindowEvent::Focused(true) => { | ||
| tokio::task::block_in_place(|| { | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Blocking the runtime thread with block_in_place + block_on inside an event handler may stall the UI/event loop; prefer spawning the async task instead. Prompt for AI agents |
||
| tokio::runtime::Handle::current().block_on(async { | ||
| match app.start_server(None).await { | ||
| Ok(_) => tracing::info!("server_started"), | ||
| Err(e) => tracing::error!("server_start_failed: {:?}", e), | ||
| } | ||
| }); | ||
| }); | ||
| }); | ||
| } | ||
| _ => {} | ||
| } | ||
| _ => {} | ||
| }, | ||
| } | ||
| _ => {} | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -7,12 +7,12 @@ pub struct ServerHandle { | |
| client: hypr_am::Client, | ||
| } | ||
|
|
||
| impl Drop for ServerHandle { | ||
| fn drop(&mut self) { | ||
| tracing::info!("stopping"); | ||
| let _ = self.shutdown.send(()); | ||
| } | ||
| } | ||
| // impl Drop for ServerHandle { | ||
| // fn drop(&mut self) { | ||
| // tracing::info!("stopping"); | ||
| // let _ = self.shutdown.send(()); | ||
| // } | ||
| // } | ||
|
Comment on lines
+10
to
+15
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 💡 Verification agent 🧩 Analysis chainRe-enable Drop (or add an explicit shutdown API) to avoid orphaned external STT processes Commenting out Drop removes the only in-module path that can signal the background task to kill the child process. The shutdown sender is private (not pub) and there’s no public method that sends on it, so nothing external can currently trigger shutdown. This risks leaking the external STT child when the handle is dropped. Recommend restoring Drop and including base_url in the log (per PR objectives). Minimal diff: -// impl Drop for ServerHandle {
-// fn drop(&mut self) {
-// tracing::info!("stopping");
-// let _ = self.shutdown.send(());
-// }
-// }
+impl Drop for ServerHandle {
+ fn drop(&mut self) {
+ tracing::info!("stopping: {}", self.base_url);
+ if let Err(e) = self.shutdown.send(()) {
+ tracing::debug!("shutdown_channel_already_closed {:?}", e);
+ }
+ }
+}Optionally, instead of relying on Drop, expose a public fn shutdown(&self) -> bool that sends on the watch channel and returns whether a send occurred, and ensure all call sites invoke it on app lifecycle events. Run this to confirm no alternative shutdown path exists right now: 🏁 Script executed: #!/bin/bash
set -euo pipefail
# ServerHandle definitions and Drop impls
rg -nP '^\s*pub\s+struct\s+ServerHandle\b' -C3
rg -nP 'impl\s+Drop\s+for\s+ServerHandle\b' -C2
# Any explicit sends on the shutdown sender?
rg -n 'shutdown\.send\(' -C2Length of output: 3376 Re-enable Drop for External STT ServerHandle to Prevent Orphans The Please restore the --- a/plugins/local-stt/src/server/external.rs
+++ b/plugins/local-stt/src/server/external.rs
@@
// impl Drop for ServerHandle {
// fn drop(&mut self) {
// tracing::info!("stopping");
// let _ = self.shutdown.send(());
// }
// }
+
+impl Drop for ServerHandle {
+ fn drop(&mut self) {
+ tracing::info!("stopping external STT at {}", self.base_url);
+ if let Err(e) = self.shutdown.send(()) {
+ tracing::debug!("external STT shutdown channel already closed: {:?}", e);
+ }
+ }
+}Alternatively, mirror the LLM plugin’s pattern by adding: impl ServerHandle {
/// Gracefully signal the external STT to stop; returns Err if already shut down.
pub fn shutdown(self) -> Result<(), tokio::sync::watch::error::SendError<()>> {
self.shutdown.send(())
}
}– plugins/local-stt/src/server/external.rs:3–15 🤖 Prompt for AI Agents |
||
|
|
||
| impl ServerHandle { | ||
| pub async fn health(&self) -> ServerHealth { | ||
|
|
@@ -58,8 +58,12 @@ pub async fn run_server( | |
| ) -> Result<ServerHandle, crate::Error> { | ||
| let port = 50060; | ||
| let _ = port_killer::kill(port); | ||
| tokio::time::sleep(std::time::Duration::from_millis(500)).await; | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Arbitrary fixed 500ms sleep after killing the port can cause flaky startups and unnecessary delay; prefer awaiting port availability or process readiness with a bounded retry instead. Prompt for AI agents |
||
|
|
||
| tracing::info!("spwaning_started"); | ||
| let (mut rx, child) = cmd.args(["--port", &port.to_string()]).spawn()?; | ||
| tracing::info!("spwaning_ended"); | ||
|
|
||
| let base_url = format!("http://localhost:{}", port); | ||
| let (shutdown_tx, mut shutdown_rx) = tokio::sync::watch::channel(()); | ||
| let client = hypr_am::Client::new(&base_url); | ||
|
|
@@ -118,7 +122,8 @@ pub async fn run_server( | |
| } | ||
| }); | ||
|
|
||
| tokio::time::sleep(std::time::Duration::from_millis(500)).await; | ||
| tokio::time::sleep(std::time::Duration::from_millis(200)).await; | ||
| tracing::info!("returning_handle"); | ||
|
|
||
|
Comment on lines
+125
to
127
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion Replace fixed 200 ms post-spawn delay with a readiness wait on the HTTP API A static sleep is guesswork. Poll the external STT server until it responds on the base_url (or until timeout). This reduces flakiness and speeds up the common fast path. - tokio::time::sleep(std::time::Duration::from_millis(200)).await;
- tracing::info!("returning_handle");
+ if !wait_server_ready(&client, std::time::Duration::from_secs(5)).await {
+ tracing::warn!("external_stt not ready within timeout: {}", base_url);
+ } else {
+ tracing::info!("external_stt_ready {}", base_url);
+ }Add this helper in the same module (outside this hunk): // Returns true once the server responds to status() (Loaded or Loading), false on timeout.
async fn wait_server_ready(client: &hypr_am::Client, timeout: std::time::Duration) -> bool {
use tokio::time::{sleep, Instant};
let start = Instant::now();
loop {
match client.status().await {
Ok(res) => {
// Consider both Loading and Loaded as "responding"; callers can poll health() later.
if matches!(res.model_state, hypr_am::ModelState::Loaded | hypr_am::ModelState::Loading) {
return true;
}
}
Err(_) => {}
}
if start.elapsed() >= timeout {
return false;
}
sleep(std::time::Duration::from_millis(100)).await;
}
}🤖 Prompt for AI Agents |
||
| Ok(ServerHandle { | ||
| api_key: Some(am_key), | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Pro models are enabled here while the section description says they are temporarily disabled; update the description or keep this disabled to avoid confusing UX.
Prompt for AI agents