diff --git a/Cargo.toml b/Cargo.toml index 3f3b7f77e7..ce8c93f8c9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,6 +69,16 @@ unexpected_cfgs = "allow" dbg_macro = "deny" let_underscore_future = "deny" unchecked_duration_subtraction = "deny" +collapsible_if = "deny" +manual_is_multiple_of = "deny" +clone_on_copy = "deny" +redundant_closure = "deny" +ptr_arg = "deny" +len_zero = "deny" +let_unit_value = "deny" +unnecessary_lazy_evaluations = "deny" +needless_range_loop = "deny" +manual_clamp = "deny" # Optimize for smaller binary size [profile.release] diff --git a/apps/desktop/src-tauri/src/lib.rs b/apps/desktop/src-tauri/src/lib.rs index ea9baef021..2c0bf465d5 100644 --- a/apps/desktop/src-tauri/src/lib.rs +++ b/apps/desktop/src-tauri/src/lib.rs @@ -84,8 +84,6 @@ use tauri_plugin_notification::{NotificationExt, PermissionState}; use tauri_plugin_opener::OpenerExt; use tauri_plugin_shell::ShellExt; use tauri_specta::Event; -#[cfg(target_os = "macos")] -use tokio::sync::Mutex; use tokio::sync::{RwLock, oneshot}; use tracing::{error, instrument, trace, warn}; use upload::{create_or_get_video, upload_image, upload_video}; @@ -897,8 +895,7 @@ async fn get_video_metadata(path: PathBuf) -> Result Result<(), String> { } let entries = std::fs::read_dir(&recordings_dir) - .map_err(|e| format!("Failed to read recordings directory: {}", e))?; + .map_err(|e| format!("Failed to read recordings directory: {e}"))?; for entry in entries.flatten() { let path = entry.path(); if path.is_dir() && path.extension().and_then(|s| s.to_str()) == Some("cap") { diff --git a/apps/desktop/src-tauri/src/logging.rs b/apps/desktop/src-tauri/src/logging.rs index c88f39abc9..25dc1d815a 100644 --- a/apps/desktop/src-tauri/src/logging.rs +++ b/apps/desktop/src-tauri/src/logging.rs @@ -33,18 +33,17 @@ pub async fn upload_log_file(app: &AppHandle) -> Result<(), String> { let log_file = get_latest_log_file(app).await.ok_or("No log file found")?; let metadata = - fs::metadata(&log_file).map_err(|e| format!("Failed to read log file metadata: {}", e))?; + fs::metadata(&log_file).map_err(|e| format!("Failed to read log file metadata: {e}"))?; let file_size = metadata.len(); - const MAX_SIZE: u64 = 1 * 1024 * 1024; + const MAX_SIZE: u64 = 1024 * 1024; let log_content = if file_size > MAX_SIZE { let content = - fs::read_to_string(&log_file).map_err(|e| format!("Failed to read log file: {}", e))?; + fs::read_to_string(&log_file).map_err(|e| format!("Failed to read log file: {e}"))?; let header = format!( - "⚠️ Log file truncated (original size: {} bytes, showing last ~1MB)\n\n", - file_size + "⚠️ Log file truncated (original size: {file_size} bytes, showing last ~1MB)\n\n" ); let max_content_size = (MAX_SIZE as usize) - header.len(); @@ -54,13 +53,13 @@ pub async fn upload_log_file(app: &AppHandle) -> Result<(), String> { if let Some(newline_pos) = truncated.find('\n') { format!("{}{}", header, &truncated[newline_pos + 1..]) } else { - format!("{}{}", header, truncated) + format!("{header}{truncated}") } } else { content } } else { - fs::read_to_string(&log_file).map_err(|e| format!("Failed to read log file: {}", e))? + fs::read_to_string(&log_file).map_err(|e| format!("Failed to read log file: {e}"))? }; let form = reqwest::multipart::Form::new() @@ -73,7 +72,7 @@ pub async fn upload_log_file(app: &AppHandle) -> Result<(), String> { client.post(url).multipart(form) }) .await - .map_err(|e| format!("Failed to upload logs: {}", e))?; + .map_err(|e| format!("Failed to upload logs: {e}"))?; if !response.status().is_success() { return Err(format!("Upload failed with status: {}", response.status())); diff --git a/apps/desktop/src-tauri/src/main.rs b/apps/desktop/src-tauri/src/main.rs index 4dae15806c..8f554ac2ab 100644 --- a/apps/desktop/src-tauri/src/main.rs +++ b/apps/desktop/src-tauri/src/main.rs @@ -67,7 +67,7 @@ fn main() { // Ensure logs directory exists std::fs::create_dir_all(&logs_dir).unwrap_or_else(|e| { - eprintln!("Failed to create logs directory: {}", e); + eprintln!("Failed to create logs directory: {e}"); }); let file_appender = tracing_appender::rolling::daily(&logs_dir, "cap-desktop.log"); diff --git a/apps/desktop/src-tauri/src/platform/macos/sc_shareable_content.rs b/apps/desktop/src-tauri/src/platform/macos/sc_shareable_content.rs index eb7fe21596..01dba7c59c 100644 --- a/apps/desktop/src-tauri/src/platform/macos/sc_shareable_content.rs +++ b/apps/desktop/src-tauri/src/platform/macos/sc_shareable_content.rs @@ -1,13 +1,7 @@ use cidre::{arc, ns, sc}; -use core_graphics::{display::CGDirectDisplayID, window::CGWindowID}; -use std::sync::Arc; -use std::{ - collections::HashMap, - sync::{OnceLock, RwLock}, - time::Instant, -}; +use std::sync::{Arc, OnceLock, RwLock}; use tokio::sync::{Mutex, Notify}; -use tracing::{debug, info, trace}; +use tracing::trace; #[derive(Default)] struct CacheState { @@ -62,8 +56,6 @@ pub async fn prewarm_shareable_content() -> Result<(), arc::R> { pub async fn get_shareable_content() -> Result>, arc::R> { - let lookup_start = Instant::now(); - if let Some(content) = state() .cache .read() @@ -82,14 +74,10 @@ pub async fn get_shareable_content() async fn run_warmup(task: WarmupTask) { let result = async { - let warm_start = Instant::now(); - let content = sc::ShareableContent::current().await?; let cache = ShareableContentCache::new(content); - let elapsed_ms = warm_start.elapsed().as_micros() as f64 / 1000.0; let mut guard = state().cache.write().unwrap(); - let replaced = guard.is_some(); *guard = Some(cache); Ok::<(), arc::R>(()) @@ -115,8 +103,6 @@ async fn run_warmup(task: WarmupTask) { struct ShareableContentCache { #[allow(dead_code)] content: arc::R, - displays: HashMap>, - windows: HashMap>, } unsafe impl Send for ShareableContentCache {} @@ -124,31 +110,7 @@ unsafe impl Sync for ShareableContentCache {} impl ShareableContentCache { fn new(content: arc::R) -> Self { - let displays = content - .displays() - .iter() - .map(|display| (display.display_id().0, display.retained())) - .collect(); - - let windows = content - .windows() - .iter() - .map(|window| (window.id(), window.retained())) - .collect(); - - Self { - content, - displays, - windows, - } - } - - fn display(&self, id: CGDirectDisplayID) -> Option> { - self.displays.get(&id).cloned() - } - - fn window(&self, id: CGWindowID) -> Option> { - self.windows.get(&id).cloned() + Self { content } } } diff --git a/apps/desktop/src-tauri/src/recording.rs b/apps/desktop/src-tauri/src/recording.rs index f1e10217af..e6d4f79478 100644 --- a/apps/desktop/src-tauri/src/recording.rs +++ b/apps/desktop/src-tauri/src/recording.rs @@ -7,11 +7,9 @@ use cap_project::{ TimelineConfiguration, TimelineSegment, UploadMeta, ZoomMode, ZoomSegment, cursor::CursorEvents, }; -use cap_recording::PipelineDoneError; use cap_recording::feeds::camera::CameraFeedLock; -use cap_recording::feeds::microphone::MicrophoneFeedLock; use cap_recording::{ - RecordingError, RecordingMode, + RecordingMode, feeds::{camera, microphone}, instant_recording, sources::{ @@ -471,7 +469,7 @@ pub async fn start_recording( let shareable_content = crate::platform::get_shareable_content() .await .map_err(|e| format!("GetShareableContent: {e}"))? - .ok_or_else(|| format!("GetShareableContent/NotAvailable"))?; + .ok_or_else(|| "GetShareableContent/NotAvailable".to_string())?; let common = InProgressRecordingCommon { target_name, @@ -830,7 +828,7 @@ async fn handle_recording_end( } } RecordingMetaInner::Instant(meta) => { - *meta = InstantRecordingMeta::Failed { error: error }; + *meta = InstantRecordingMeta::Failed { error }; } } project_meta @@ -980,12 +978,10 @@ async fn handle_recording_finish( return; } - if GeneralSettingsStore::get(&app).ok().flatten().unwrap_or_default().delete_instant_recordings_after_upload { - if let Err(err) = tokio::fs::remove_dir_all(&recording_dir).await { + if GeneralSettingsStore::get(&app).ok().flatten().unwrap_or_default().delete_instant_recordings_after_upload && let Err(err) = tokio::fs::remove_dir_all(&recording_dir).await { error!("Failed to remove recording files after upload: {err:?}"); - return; } - } + } } else if let Ok(meta) = build_video_meta(&output_path) .map_err(|err| error!("Error getting video metadata: {}", err)) diff --git a/apps/desktop/src-tauri/src/recording_settings.rs b/apps/desktop/src-tauri/src/recording_settings.rs index a611f25698..57f22c03e7 100644 --- a/apps/desktop/src-tauri/src/recording_settings.rs +++ b/apps/desktop/src-tauri/src/recording_settings.rs @@ -1,7 +1,6 @@ use cap_recording::{ RecordingMode, feeds::camera::DeviceOrModelID, sources::screen_capture::ScreenCaptureTarget, }; -use serde_json::json; use tauri::{AppHandle, Wry}; use tauri_plugin_store::StoreExt; @@ -40,23 +39,23 @@ impl RecordingSettingsStore { } // i don't trust anyone to not overwrite the whole store lols - pub fn update(app: &AppHandle, update: impl FnOnce(&mut Self)) -> Result<(), String> { - let Ok(store) = app.store("store") else { - return Err("Store not found".to_string()); - }; - - let mut settings = Self::get(app)?.unwrap_or_default(); - update(&mut settings); - store.set(Self::KEY, json!(settings)); - store.save().map_err(|e| e.to_string()) - } - - fn save(&self, app: &AppHandle) -> Result<(), String> { - let Ok(store) = app.store("store") else { - return Err("Store not found".to_string()); - }; - - store.set(Self::KEY, json!(self)); - store.save().map_err(|e| e.to_string()) - } + // pub fn update(app: &AppHandle, update: impl FnOnce(&mut Self)) -> Result<(), String> { + // let Ok(store) = app.store("store") else { + // return Err("Store not found".to_string()); + // }; + + // let mut settings = Self::get(app)?.unwrap_or_default(); + // update(&mut settings); + // store.set(Self::KEY, json!(settings)); + // store.save().map_err(|e| e.to_string()) + // } + + // fn save(&self, app: &AppHandle) -> Result<(), String> { + // let Ok(store) = app.store("store") else { + // return Err("Store not found".to_string()); + // }; + + // store.set(Self::KEY, json!(self)); + // store.save().map_err(|e| e.to_string()) + // } } diff --git a/apps/desktop/src-tauri/src/target_select_overlay.rs b/apps/desktop/src-tauri/src/target_select_overlay.rs index 31a030d6b7..efd1e68e8c 100644 --- a/apps/desktop/src-tauri/src/target_select_overlay.rs +++ b/apps/desktop/src-tauri/src/target_select_overlay.rs @@ -68,11 +68,11 @@ pub async fn open_target_select_overlays( let display = focused_target .as_ref() .map(|v| v.display()) - .unwrap_or_else(|| scap_targets::Display::get_containing_cursor()); + .unwrap_or_else(scap_targets::Display::get_containing_cursor); let window = focused_target .as_ref() .map(|v| v.window().and_then(|id| scap_targets::Window::from_id(&id))) - .unwrap_or_else(|| scap_targets::Window::get_topmost_at_cursor()); + .unwrap_or_else(scap_targets::Window::get_topmost_at_cursor); let _ = TargetUnderCursor { display_id: display.map(|d| d.id()), @@ -129,7 +129,7 @@ pub async fn close_target_select_overlays(app: AppHandle) -> Result<(), String> pub async fn get_window_icon(window_id: &str) -> Result, String> { let window_id = window_id .parse::() - .map_err(|err| format!("Invalid window ID: {}", err))?; + .map_err(|err| format!("Invalid window ID: {err}"))?; Ok(Window::from_id(&window_id) .ok_or("Window not found")? @@ -143,7 +143,7 @@ pub async fn get_window_icon(window_id: &str) -> Result, String> pub async fn display_information(display_id: &str) -> Result { let display_id = display_id .parse::() - .map_err(|err| format!("Invalid display ID: {}", err))?; + .map_err(|err| format!("Invalid display ID: {err}"))?; let display = Display::from_id(&display_id).ok_or("Display not found")?; Ok(DisplayInformation { diff --git a/apps/desktop/src-tauri/src/thumbnails/mac.rs b/apps/desktop/src-tauri/src/thumbnails/mac.rs index 64918e26a4..74036269e3 100644 --- a/apps/desktop/src-tauri/src/thumbnails/mac.rs +++ b/apps/desktop/src-tauri/src/thumbnails/mac.rs @@ -22,7 +22,7 @@ pub async fn capture_window_thumbnail(window: &scap_targets::Window) -> Option) -> Option { use cidre::{cv, sc}; use image::{ImageEncoder, RgbaImage, codecs::png::PngEncoder}; - use std::{io::Cursor, slice}; + use std::io::Cursor; let mut config = sc::StreamCfg::new(); config.set_width(THUMBNAIL_WIDTH as usize); @@ -162,7 +162,7 @@ fn convert_32bit_pixel_buffer( #[derive(Copy, Clone)] enum Nv12Range { Video, - Full, + _Full, } fn convert_nv12_pixel_buffer( @@ -187,12 +187,12 @@ fn convert_nv12_pixel_buffer( let y_plane_height = lock.height_of_plane(0); let uv_plane_height = lock.height_of_plane(1); - if y_plane_height < height || uv_plane_height < (height + 1) / 2 { + if y_plane_height < height || uv_plane_height < height.div_ceil(2) { warn!( y_plane_height, uv_plane_height, expected_y = height, - expected_uv = (height + 1) / 2, + expected_uv = height.div_ceil(2), "NV12 plane height smaller than expected", ); return None; @@ -228,7 +228,7 @@ fn convert_nv12_pixel_buffer( } let uv_row = &uv_plane[uv_row_start..uv_row_start + width]; - for x in 0..width { + for (x, y_val) in y_row.iter().enumerate().take(width) { let uv_index = (x / 2) * 2; if uv_index + 1 >= uv_row.len() { warn!( @@ -239,10 +239,9 @@ fn convert_nv12_pixel_buffer( return None; } - let y_val = y_row[x]; let cb = uv_row[uv_index]; let cr = uv_row[uv_index + 1]; - let (r, g, b) = ycbcr_to_rgb(y_val, cb, cr, range); + let (r, g, b) = ycbcr_to_rgb(*y_val, cb, cr, range); let out = (y_idx * width + x) * 4; rgba_data[out] = r; rgba_data[out + 1] = g; @@ -261,7 +260,7 @@ fn ycbcr_to_rgb(y: u8, cb: u8, cr: u8, range: Nv12Range) -> (u8, u8, u8) { let (y_value, scale) = match range { Nv12Range::Video => ((y - 16.0).max(0.0), 1.164383_f32), - Nv12Range::Full => (y, 1.0_f32), + Nv12Range::_Full => (y, 1.0_f32), }; let r = scale * y_value + 1.596027_f32 * cr; @@ -272,7 +271,7 @@ fn ycbcr_to_rgb(y: u8, cb: u8, cr: u8, range: Nv12Range) -> (u8, u8, u8) { } fn clamp_channel(value: f32) -> u8 { - value.max(0.0).min(255.0) as u8 + value.clamp(0.0, 255.0) as u8 } struct PixelBufferLock<'a> { diff --git a/apps/desktop/src-tauri/src/upload.rs b/apps/desktop/src-tauri/src/upload.rs index d27d1246bc..b220002a35 100644 --- a/apps/desktop/src-tauri/src/upload.rs +++ b/apps/desktop/src-tauri/src/upload.rs @@ -71,7 +71,7 @@ pub async fn upload_video( info!("Uploading video {video_id}..."); let start = Instant::now(); - let upload_id = api::upload_multipart_initiate(&app, &video_id).await?; + let upload_id = api::upload_multipart_initiate(app, &video_id).await?; let video_fut = async { let stream = progress( @@ -105,7 +105,7 @@ pub async fn upload_video( .map_err(|e| error!("Failed to get video metadata: {e}")) .ok(); - api::upload_multipart_complete(&app, &video_id, &upload_id, &parts, metadata.clone()) + api::upload_multipart_complete(app, &video_id, &upload_id, &parts, metadata.clone()) .await?; Ok(metadata) @@ -225,7 +225,7 @@ pub async fn create_or_get_video( s3_config_url.push_str(&format!("&width={}", meta.width)); s3_config_url.push_str(&format!("&height={}", meta.height)); if let Some(fps) = meta.fps { - s3_config_url.push_str(&format!("&fps={}", fps)); + s3_config_url.push_str(&format!("&fps={fps}")); } } @@ -245,12 +245,11 @@ pub async fn create_or_get_video( if let Some(error) = body .as_ref() .ok() - .and_then(|body| serde_json::from_str::(&*body).ok()) + .and_then(|body| serde_json::from_str::(body).ok()) && status == StatusCode::FORBIDDEN + && error.error == "upgrade_required" { - if error.error == "upgrade_required" { - return Err(AuthedApiError::UpgradeRequired); - } + return Err(AuthedApiError::UpgradeRequired); } return Err(format!("create_or_get_video/error/{status}: {body:?}").into()); @@ -290,7 +289,7 @@ pub fn build_video_meta(path: &PathBuf) -> Result { height: video.height(), fps: video .frame_rate() - .map(|v| (v.numerator() as f32 / v.denominator() as f32)), + .map(|v| v.numerator() as f32 / v.denominator() as f32), }) } @@ -494,7 +493,7 @@ pub fn from_pending_file_to_chunks( } }) .await - .map_err(|_| io::Error::new(io::ErrorKind::Other, "Failed to open file. The recording pipeline may have crashed?"))?; + .map_err(|_| io::Error::other("Failed to open file. The recording pipeline may have crashed?"))?; let mut part_number = 1; let mut last_read_position: u64 = 0; @@ -504,8 +503,7 @@ pub fn from_pending_file_to_chunks( loop { // Check if realtime recording is done - if !realtime_is_done.unwrap_or(true) { - if let Some(ref realtime_receiver) = realtime_upload_done { + if !realtime_is_done.unwrap_or(true) && let Some(ref realtime_receiver) = realtime_upload_done { match realtime_receiver.try_recv() { Ok(_) => realtime_is_done = Some(true), Err(flume::TryRecvError::Empty) => {}, @@ -514,7 +512,7 @@ pub fn from_pending_file_to_chunks( // It possibly means something has gone wrong but that's not the uploader's problem. Err(_) => realtime_is_done = Some(true), } - } + } let file_size = match file.metadata().await { @@ -680,7 +678,7 @@ fn multipart_uploader( part_number, chunk, } = item.map_err(|err| { - format!("uploader/part/{:?}/fs: {err:?}", expected_part_number) + format!("uploader/part/{expected_part_number:?}/fs: {err:?}") })?; trace!( "Uploading chunk {part_number} ({} bytes) for video {video_id:?}", diff --git a/apps/desktop/src-tauri/src/window_exclusion.rs b/apps/desktop/src-tauri/src/window_exclusion.rs index c0d6e72682..09c688502b 100644 --- a/apps/desktop/src-tauri/src/window_exclusion.rs +++ b/apps/desktop/src-tauri/src/window_exclusion.rs @@ -21,13 +21,12 @@ impl WindowExclusion { owner_name: Option<&str>, window_title: Option<&str>, ) -> bool { - if let Some(identifier) = self.bundle_identifier.as_deref() { - if bundle_identifier + if let Some(identifier) = self.bundle_identifier.as_deref() + && bundle_identifier .map(|candidate| candidate == identifier) .unwrap_or(false) - { - return true; - } + { + return true; } if let Some(expected_owner) = self.owner_name.as_deref() { diff --git a/crates/audio/src/latency.rs b/crates/audio/src/latency.rs index d7cb52855b..e39e2dca20 100644 --- a/crates/audio/src/latency.rs +++ b/crates/audio/src/latency.rs @@ -258,7 +258,7 @@ impl OutputLatencyEstimator { } pub fn with_bias(bias_secs: f64) -> Self { - let bias_secs = bias_secs.max(0.0).min(MAX_LATENCY_SECS); + let bias_secs = bias_secs.clamp(0.0, MAX_LATENCY_SECS); Self { smoothed_latency_secs: if bias_secs > 0.0 { Some(bias_secs) @@ -292,7 +292,7 @@ impl OutputLatencyEstimator { } pub fn set_bias_secs(&mut self, bias_secs: f64) { - self.bias_secs = bias_secs.max(0.0).min(MAX_LATENCY_SECS); + self.bias_secs = bias_secs.clamp(0.0, MAX_LATENCY_SECS); } pub fn set_floor_and_ceiling(&mut self, min_floor_secs: f64, max_ceiling_secs: f64) { @@ -367,14 +367,13 @@ impl OutputLatencyEstimator { return; } - if let Some(prev_raw) = self.last_raw_latency_secs { - if self.update_count < WARMUP_GUARD_SAMPLES as u64 - && clamped > prev_raw * WARMUP_SPIKE_RATIO - { - self.last_raw_latency_secs = Some(clamped); - self.update_count = self.update_count.saturating_add(1); - return; - } + if let Some(prev_raw) = self.last_raw_latency_secs + && self.update_count < WARMUP_GUARD_SAMPLES as u64 + && clamped > prev_raw * WARMUP_SPIKE_RATIO + { + self.last_raw_latency_secs = Some(clamped); + self.update_count = self.update_count.saturating_add(1); + return; } let now = Instant::now(); @@ -586,10 +585,10 @@ mod macos { let mut max_latency = 0u32; for stream in streams { - if is_output_stream(&stream)? { - if let Ok(latency) = stream.latency() { - max_latency = max_latency.max(latency); - } + if is_output_stream(&stream)? + && let Ok(latency) = stream.latency() + { + max_latency = max_latency.max(latency); } } diff --git a/crates/audio/src/lib.rs b/crates/audio/src/lib.rs index 9540e1e189..f07a0ff9bc 100644 --- a/crates/audio/src/lib.rs +++ b/crates/audio/src/lib.rs @@ -1,11 +1,9 @@ mod audio_data; mod latency; -mod playback; mod renderer; pub use audio_data::*; pub use latency::*; -// playback module now only re-exports from latency module pub use renderer::*; pub trait FromSampleBytes: cpal::SizedSample + std::fmt::Debug + Send + 'static { diff --git a/crates/audio/src/playback.rs b/crates/audio/src/playback.rs deleted file mode 100644 index 543709d083..0000000000 --- a/crates/audio/src/playback.rs +++ /dev/null @@ -1,6 +0,0 @@ -// Re-exports moved to lib.rs to avoid unused import warnings - -#[cfg(test)] -mod tests { - use crate::{OutputLatencyHint, OutputTransportKind}; -} diff --git a/crates/editor/src/playback.rs b/crates/editor/src/playback.rs index e3b5814822..07fd8ef55e 100644 --- a/crates/editor/src/playback.rs +++ b/crates/editor/src/playback.rs @@ -326,16 +326,15 @@ impl AudioPlayback { }; if let SupportedBufferSize::Range { min, max } = supported_config.buffer_size() + && clamped != desired { - if clamped != desired { - info!( - requested_frames = desired, - clamped_frames = clamped, - range_min = *min, - range_max = *max, - "Adjusted requested audio buffer to fit device capabilities", - ); - } + info!( + requested_frames = desired, + clamped_frames = clamped, + range_min = *min, + range_max = *max, + "Adjusted requested audio buffer to fit device capabilities", + ); } config.buffer_size = BufferSize::Fixed(clamped); @@ -404,22 +403,22 @@ impl AudioPlayback { audio_renderer.prefill(&project_snapshot, headroom_samples); } - if let Some(hint) = static_latency_hint { - if hint.latency_secs > 0.0 { - match hint.transport { - cap_audio::OutputTransportKind::Airplay => info!( - "Applying AirPlay output latency hint: {:.1} ms", - hint.latency_secs * 1_000.0 - ), - transport if transport.is_wireless() => info!( - "Applying wireless output latency hint: {:.1} ms", - hint.latency_secs * 1_000.0 - ), - _ => info!( - "Applying output latency hint: {:.1} ms", - hint.latency_secs * 1_000.0 - ), - } + if let Some(hint) = static_latency_hint + && hint.latency_secs > 0.0 + { + match hint.transport { + cap_audio::OutputTransportKind::Airplay => info!( + "Applying AirPlay output latency hint: {:.1} ms", + hint.latency_secs * 1_000.0 + ), + transport if transport.is_wireless() => info!( + "Applying wireless output latency hint: {:.1} ms", + hint.latency_secs * 1_000.0 + ), + _ => info!( + "Applying output latency hint: {:.1} ms", + hint.latency_secs * 1_000.0 + ), } } diff --git a/crates/enc-avfoundation/src/mp4.rs b/crates/enc-avfoundation/src/mp4.rs index d56f7d44f6..a4ded9825a 100644 --- a/crates/enc-avfoundation/src/mp4.rs +++ b/crates/enc-avfoundation/src/mp4.rs @@ -232,36 +232,36 @@ impl MP4Encoder { self.most_recent_frame = Some((frame.clone(), timestamp)); - if let Some(pause_timestamp) = self.pause_timestamp { - if let Some(gap) = timestamp.checked_sub(pause_timestamp) { - self.timestamp_offset += gap; - self.pause_timestamp = None; - } + if let Some(pause_timestamp) = self.pause_timestamp + && let Some(gap) = timestamp.checked_sub(pause_timestamp) + { + self.timestamp_offset += gap; + self.pause_timestamp = None; } let mut pts_duration = timestamp .checked_sub(self.timestamp_offset) .unwrap_or(Duration::ZERO); - if let Some(last_pts) = self.last_video_pts { - if pts_duration <= last_pts { - let frame_duration = self.video_frame_duration(); - let adjusted_pts = last_pts + frame_duration; - - trace!( - ?timestamp, - ?last_pts, - adjusted_pts = ?adjusted_pts, - frame_duration_ns = frame_duration.as_nanos(), - "Monotonic video pts correction", - ); - - if let Some(new_offset) = timestamp.checked_sub(adjusted_pts) { - self.timestamp_offset = new_offset; - } + if let Some(last_pts) = self.last_video_pts + && pts_duration <= last_pts + { + let frame_duration = self.video_frame_duration(); + let adjusted_pts = last_pts + frame_duration; + + trace!( + ?timestamp, + ?last_pts, + adjusted_pts = ?adjusted_pts, + frame_duration_ns = frame_duration.as_nanos(), + "Monotonic video pts correction", + ); - pts_duration = adjusted_pts; + if let Some(new_offset) = timestamp.checked_sub(adjusted_pts) { + self.timestamp_offset = new_offset; } + + pts_duration = adjusted_pts; } self.last_video_pts = Some(pts_duration); @@ -292,11 +292,11 @@ impl MP4Encoder { return Ok(()); } - if let Some(pause_timestamp) = self.pause_timestamp { - if let Some(gap) = timestamp.checked_sub(pause_timestamp) { - self.timestamp_offset += gap; - self.pause_timestamp = None; - } + if let Some(pause_timestamp) = self.pause_timestamp + && let Some(gap) = timestamp.checked_sub(pause_timestamp) + { + self.timestamp_offset += gap; + self.pause_timestamp = None; } let Some(audio_input) = &mut self.audio_input else { @@ -341,27 +341,27 @@ impl MP4Encoder { .checked_sub(self.timestamp_offset) .unwrap_or(Duration::ZERO); - if let Some(last_pts) = self.last_audio_pts { - if pts_duration <= last_pts { - let frame_duration = Self::audio_frame_duration(&frame); - let adjusted_pts = last_pts + frame_duration; - - trace!( - ?timestamp, - ?last_pts, - adjusted_pts = ?adjusted_pts, - frame_duration_ns = frame_duration.as_nanos(), - samples = frame.samples(), - sample_rate = frame.rate(), - "Monotonic audio pts correction", - ); - - if let Some(new_offset) = timestamp.checked_sub(adjusted_pts) { - self.timestamp_offset = new_offset; - } + if let Some(last_pts) = self.last_audio_pts + && pts_duration <= last_pts + { + let frame_duration = Self::audio_frame_duration(&frame); + let adjusted_pts = last_pts + frame_duration; + + trace!( + ?timestamp, + ?last_pts, + adjusted_pts = ?adjusted_pts, + frame_duration_ns = frame_duration.as_nanos(), + samples = frame.samples(), + sample_rate = frame.rate(), + "Monotonic audio pts correction", + ); - pts_duration = adjusted_pts; + if let Some(new_offset) = timestamp.checked_sub(adjusted_pts) { + self.timestamp_offset = new_offset; } + + pts_duration = adjusted_pts; } self.last_audio_pts = Some(pts_duration); diff --git a/crates/enc-ffmpeg/src/audio/buffered_resampler.rs b/crates/enc-ffmpeg/src/audio/buffered_resampler.rs index 8f26bcb54e..d350920856 100644 --- a/crates/enc-ffmpeg/src/audio/buffered_resampler.rs +++ b/crates/enc-ffmpeg/src/audio/buffered_resampler.rs @@ -48,7 +48,7 @@ impl BufferedResampler { pts += buffer.0.samples() as i64; } - return remaining_samples; + remaining_samples } pub fn output(&self) -> resampling::context::Definition { @@ -56,10 +56,10 @@ impl BufferedResampler { } pub fn add_frame(&mut self, mut frame: ffmpeg::frame::Audio) { - if let Some(min_next_pts) = self.min_next_pts { - if let Some(pts) = frame.pts() { - frame.set_pts(Some(pts.max(min_next_pts))); - } + if let Some(min_next_pts) = self.min_next_pts + && let Some(pts) = frame.pts() + { + frame.set_pts(Some(pts.max(min_next_pts))); } let pts = frame.pts().unwrap(); @@ -75,7 +75,7 @@ impl BufferedResampler { self.buffer.push_back((resampled_frame, resampled_pts)); - while let Some(_) = self.resampler.delay() { + while self.resampler.delay().is_some() { let mut resampled_frame = ffmpeg::frame::Audio::new( self.resampler.output().format, 0, @@ -89,7 +89,7 @@ impl BufferedResampler { self.buffer.push_back((resampled_frame, next_pts)); - next_pts = next_pts + samples as i64; + next_pts += samples as i64; } self.min_next_pts = Some(pts + frame.samples() as i64); diff --git a/crates/enc-ffmpeg/src/video/h264.rs b/crates/enc-ffmpeg/src/video/h264.rs index 62b49adfdf..99c6751289 100644 --- a/crates/enc-ffmpeg/src/video/h264.rs +++ b/crates/enc-ffmpeg/src/video/h264.rs @@ -99,9 +99,7 @@ impl H264EncoderBuilder { .video() .ok() .and_then(|codec_video| codec_video.formats()) - .map_or(false, |mut formats| { - formats.any(|f| f == input_config.pixel_format) - }); + .is_some_and(|mut formats| formats.any(|f| f == input_config.pixel_format)); let mut needs_pixel_conversion = false; @@ -267,7 +265,6 @@ impl H264Encoder { pub fn finish(&mut self, output: &mut format::context::Output) { if let Err(e) = self.base.process_eof(output, &mut self.encoder) { tracing::error!("Failed to send EOF to encoder: {:?}", e); - return; } } } diff --git a/crates/media-info/src/lib.rs b/crates/media-info/src/lib.rs index 3d90bd7f21..4c3f09a77c 100644 --- a/crates/media-info/src/lib.rs +++ b/crates/media-info/src/lib.rs @@ -73,7 +73,7 @@ impl AudioInfo { let channels = if Self::channel_layout_raw(raw_channels).is_some() { raw_channels } else { - raw_channels.min(Self::MAX_AUDIO_CHANNELS).max(1) + raw_channels.clamp(1, Self::MAX_AUDIO_CHANNELS) }; Self { diff --git a/crates/project/src/configuration.rs b/crates/project/src/configuration.rs index c29aa2ea77..5f4210d712 100644 --- a/crates/project/src/configuration.rs +++ b/crates/project/src/configuration.rs @@ -647,7 +647,7 @@ impl ProjectConfiguration { std::fs::rename( &temp_path, - &project_path.as_ref().join("project-config.json"), + project_path.as_ref().join("project-config.json"), )?; Ok(()) diff --git a/crates/recording/examples/camera.rs b/crates/recording/examples/camera.rs index 94b0b1ade7..d4de350fdf 100644 --- a/crates/recording/examples/camera.rs +++ b/crates/recording/examples/camera.rs @@ -1,6 +1,6 @@ use std::fmt::Display; -use cap_recording::feeds::{CameraFeed, DeviceOrModelID}; +use cap_recording::{CameraFeed, feeds::camera::DeviceOrModelID}; use ffmpeg::format::Pixel; use image::{ColorType, codecs::jpeg}; diff --git a/crates/recording/examples/screen_capture.rs b/crates/recording/examples/screen_capture.rs deleted file mode 100644 index 6f8b71e655..0000000000 --- a/crates/recording/examples/screen_capture.rs +++ /dev/null @@ -1,50 +0,0 @@ -use cap_recording::{ - pipeline::control::PipelineControlSignal, - sources::{CMSampleBufferCapture, ScreenCaptureConfig, ScreenCaptureTarget}, -}; -use scap_targets::Window; -use std::time::SystemTime; - -#[tokio::main] -async fn main() { - tracing_subscriber::fmt::init(); - - let (video_tx, video_rx) = flume::unbounded(); - let (ready_tx, _ready_rx) = flume::unbounded(); - let (_ctrl_tx, ctrl_rx) = flume::unbounded(); - - let mut source = ScreenCaptureConfig::::init( - &ScreenCaptureTarget::Window { - id: Window::list() - .into_iter() - .find(|w| w.owner_name().unwrap().contains("Zed")) - .unwrap() - .id(), - }, - false, - 60, - video_tx, - None, - SystemTime::now(), - tokio::runtime::Handle::current(), - ) - .await - .unwrap(); - - std::thread::spawn(move || { - let _ = source.run( - ready_tx, - PipelineControlSignal { - last_value: None, - receiver: ctrl_rx, - }, - ); - }); - - while let Ok((video, _)) = video_rx.recv_async().await { - video.image_buf().unwrap(); - dbg!(video.total_sample_size()); - } - - tokio::time::sleep(std::time::Duration::from_secs(5)).await; -} diff --git a/crates/recording/src/audio_buffer.rs b/crates/recording/src/audio_buffer.rs deleted file mode 100644 index fd3e6442fb..0000000000 --- a/crates/recording/src/audio_buffer.rs +++ /dev/null @@ -1,97 +0,0 @@ -use cap_audio::cast_bytes_to_f32_slice; -use cap_media_info::AudioInfo; -use ffmpeg::encoder; -pub use ffmpeg::util::frame::Audio as FFAudio; -use std::collections::VecDeque; - -#[derive(Debug)] -pub struct AudioBuffer { - pub data: Vec>, - pub frame_size: usize, - config: AudioInfo, - frame: FFAudio, -} - -impl AudioBuffer { - pub fn new(config: AudioInfo, encoder: &encoder::Audio) -> Self { - let sample_size = config.sample_size(); - let frame_buffer_size = usize::try_from(config.buffer_size).unwrap() * sample_size; - - let frame_size = encoder.frame_size() as usize; - - Self { - data: vec![VecDeque::with_capacity(frame_buffer_size); config.channels], - frame_size, - config, - frame: FFAudio::new( - ffmpeg::format::Sample::F32(ffmpeg::format::sample::Type::Packed), - frame_size, - ffmpeg::ChannelLayout::default(config.channels as i32), - ), - } - } - - fn is_empty(&self) -> bool { - self.data[0].is_empty() - } - - fn len(&self) -> usize { - self.data[0].len() - } - - pub fn consume(&mut self, frame: &FFAudio) { - if frame.samples() == 0 { - return; - } - - if frame.is_planar() { - for channel in 0..self.config.channels { - self.data[channel].extend(unsafe { cast_bytes_to_f32_slice(frame.data(channel)) }); - } - } else { - self.data[0].extend(unsafe { - cast_bytes_to_f32_slice( - &frame.data(0)[0..frame.samples() * frame.channels() as usize], - ) - }); - } - } - - pub fn next_frame(&mut self, drain: bool) -> Option<&FFAudio> { - if self.is_empty() { - return None; - } - - if !drain && self.len() < self.frame_size * self.config.channels { - return None; - } - - let actual_samples_per_channel = if drain { - (self.len() / self.config.channels).min(self.frame_size) - } else { - self.frame_size - }; - - if self.frame.is_planar() { - for channel in 0..self.config.channels { - for (index, byte) in self.data[channel] - .drain(0..actual_samples_per_channel) - .enumerate() - { - self.frame.data_mut(channel)[index * 4..(index + 1) * 4] - .copy_from_slice(&byte.to_ne_bytes()); - } - } - } else { - for (index, byte) in self.data[0] - .drain(0..actual_samples_per_channel * self.config.channels) - .enumerate() - { - self.frame.data_mut(0)[index * 4..(index + 1) * 4] - .copy_from_slice(&byte.to_ne_bytes()); - } - } - - Some(&self.frame) - } -} diff --git a/crates/recording/src/feeds/microphone.rs b/crates/recording/src/feeds/microphone.rs index 270e9c0e7b..b344ee5729 100644 --- a/crates/recording/src/feeds/microphone.rs +++ b/crates/recording/src/feeds/microphone.rs @@ -96,7 +96,7 @@ impl MicrophoneFeed { } } - pub fn default() -> Option<(String, Device, SupportedStreamConfig)> { + pub fn default_device() -> Option<(String, Device, SupportedStreamConfig)> { let host = cpal::default_host(); host.default_input_device().and_then(get_usable_device) } @@ -158,7 +158,7 @@ fn get_usable_device(device: Device) -> Option<(String, Device, SupportedStreamC && config.min_sample_rate().0 <= preferred_rate.0 && config.max_sample_rate().0 >= preferred_rate.0 }) { - return Some(config.clone().with_sample_rate(preferred_rate)); + return Some(config.with_sample_rate(preferred_rate)); } configs.into_iter().find_map(|config| { diff --git a/crates/recording/src/instant_recording.rs b/crates/recording/src/instant_recording.rs index 138d2dfb28..787a4cd490 100644 --- a/crates/recording/src/instant_recording.rs +++ b/crates/recording/src/instant_recording.rs @@ -404,13 +404,13 @@ fn clamp_size(input: (u32, u32), max: (u32, u32)) -> (u32, u32) { // 16/9-ish if input.0 >= input.1 && (input.0 as f64 / input.1 as f64) <= 16.0 / 9.0 { let mut width = max.0.min(input.0); - if width % 2 != 0 { + if width.is_multiple_of(2) { width -= 1; } let height_ratio = input.1 as f64 / input.0 as f64; let mut height = (height_ratio * width as f64).round() as u32; - if height % 2 != 0 { + if height.is_multiple_of(2) { height -= 1; } @@ -419,13 +419,13 @@ fn clamp_size(input: (u32, u32), max: (u32, u32)) -> (u32, u32) { // 9/16-ish else if input.0 <= input.1 && (input.0 as f64 / input.1 as f64) >= 9.0 / 16.0 { let mut height = max.0.min(input.1); - if height % 2 != 0 { + if height.is_multiple_of(2) { height -= 1; } let width_ratio = input.0 as f64 / input.1 as f64; let mut width = (width_ratio * height as f64).round() as u32; - if width % 2 != 0 { + if width.is_multiple_of(2) { width -= 1; } @@ -434,13 +434,13 @@ fn clamp_size(input: (u32, u32), max: (u32, u32)) -> (u32, u32) { // ultrawide else if input.0 >= input.1 && (input.0 as f64 / input.1 as f64) > 16.0 / 9.0 { let mut height = max.1.min(input.1); - if height % 2 != 0 { + if height.is_multiple_of(2) { height -= 1; } let width_ratio = input.0 as f64 / input.1 as f64; let mut width = (width_ratio * height as f64).round() as u32; - if width % 2 != 0 { + if width.is_multiple_of(2) { width -= 1; } @@ -450,13 +450,13 @@ fn clamp_size(input: (u32, u32), max: (u32, u32)) -> (u32, u32) { else if input.0 < input.1 && (input.0 as f64 / input.1 as f64) <= 9.0 / 16.0 { // swapped since max_width/height assume horizontal let mut width = max.1.min(input.0); - if width % 2 != 0 { + if width.is_multiple_of(2) { width -= 1; } let height_ratio = input.1 as f64 / input.0 as f64; let mut height = (height_ratio * width as f64).round() as u32; - if height % 2 != 0 { + if height.is_multiple_of(2) { height -= 1; } diff --git a/crates/recording/src/lib.rs b/crates/recording/src/lib.rs index 59de64d171..c2ca40f73a 100644 --- a/crates/recording/src/lib.rs +++ b/crates/recording/src/lib.rs @@ -1,4 +1,3 @@ -mod audio_buffer; mod capture_pipeline; pub mod cursor; pub mod feeds; diff --git a/crates/recording/src/output_pipeline/core.rs b/crates/recording/src/output_pipeline/core.rs index 767e588aa9..cbd00941b7 100644 --- a/crates/recording/src/output_pipeline/core.rs +++ b/crates/recording/src/output_pipeline/core.rs @@ -13,7 +13,7 @@ use std::{ any::Any, future, marker::PhantomData, - path::PathBuf, + path::{Path, PathBuf}, sync::{ Arc, atomic::{self, AtomicBool}, @@ -298,7 +298,7 @@ fn setup_build() -> ( done_tx, done_rx .map(|v| { - v.map_err(|s| anyhow::Error::from(s)) + v.map_err(anyhow::Error::from) .and_then(|v| v) .map_err(|e| PipelineDoneError(Arc::new(e))) }) @@ -308,15 +308,16 @@ fn setup_build() -> ( ) } +#[allow(clippy::too_many_arguments)] async fn finish_build( mut setup_ctx: SetupCtx, audio_sources: Vec, stop_token: CancellationToken, - muxer: Arc>, + muxer: Arc>, timestamps: Timestamps, done_tx: oneshot::Sender>, first_tx: Option>, - path: &PathBuf, + path: &Path, ) -> anyhow::Result<()> { configure_audio( &mut setup_ctx, @@ -382,7 +383,7 @@ async fn setup_video_source( async fn setup_muxer( muxer_config: TMuxer::Config, - path: &PathBuf, + path: &Path, video_info: Option, audio_info: Option, pause_flag: &Arc, @@ -391,7 +392,7 @@ async fn setup_muxer( let muxer = Arc::new(Mutex::new( TMuxer::setup( muxer_config, - path.clone(), + path.to_path_buf(), video_info, audio_info, pause_flag.clone(), @@ -466,7 +467,7 @@ async fn configure_audio( timestamps: Timestamps, mut first_tx: Option>, ) -> anyhow::Result<()> { - if audio_sources.len() < 1 { + if audio_sources.is_empty() { return Ok(()); } @@ -493,7 +494,8 @@ async fn configure_audio( Ok(()) } }); - let _ = ready_rx + + ready_rx .await .map_err(|_| anyhow::format_err!("Audio mixer crashed"))??; @@ -774,14 +776,14 @@ pub trait VideoFrame: Send + 'static { pub trait Muxer: Send + 'static { type Config; - async fn setup( + fn setup( config: Self::Config, output_path: PathBuf, video_config: Option, audio_config: Option, pause_flag: Arc, tasks: &mut TaskPool, - ) -> anyhow::Result + ) -> impl Future> + Send where Self: Sized; diff --git a/crates/recording/src/sources/audio_mixer.rs b/crates/recording/src/sources/audio_mixer.rs index 5e451f4162..3a706e5d1b 100644 --- a/crates/recording/src/sources/audio_mixer.rs +++ b/crates/recording/src/sources/audio_mixer.rs @@ -156,9 +156,9 @@ impl AudioMixerBuilder { samples_out: 0, last_tick: None, abuffers, - resamplers, abuffersink, output, + _resamplers: resamplers, _filter_graph: filter_graph, _amix: amix, _aformat: aformat, @@ -168,27 +168,6 @@ impl AudioMixerBuilder { }) } - async fn spawn(self, output: mpsc::Sender) -> anyhow::Result { - let (ready_tx, ready_rx) = oneshot::channel::>(); - let stop_flag = Arc::new(AtomicBool::new(false)); - - let thread_handle = std::thread::spawn({ - let stop_flag = stop_flag.clone(); - move || self.run(output, ready_tx, stop_flag) - }); - - ready_rx - .await - .map_err(|_| anyhow::format_err!("Audio mixer crashed"))??; - - info!("Audio mixer ready"); - - Ok(AudioMixerHandle { - thread_handle, - stop_flag, - }) - } - pub fn run( self, output: mpsc::Sender, @@ -231,8 +210,8 @@ pub struct AudioMixer { last_tick: Option, // sample_timestamps: VecDeque<(usize, Timestamp)>, abuffers: Vec, - resamplers: Vec, abuffersink: ffmpeg::filter::Context, + _resamplers: Vec, _filter_graph: ffmpeg::filter::Graph, _amix: ffmpeg::filter::Context, _aformat: ffmpeg::filter::Context, @@ -363,44 +342,43 @@ impl AudioMixer { .map(|v| v.timestamp); } - if let Some(start_timestamp) = self.start_timestamp { - if let Some(elapsed_since_start) = now + if let Some(start_timestamp) = self.start_timestamp + && let Some(elapsed_since_start) = now .duration_since(self.timestamps) .checked_sub(start_timestamp.duration_since(self.timestamps)) - && elapsed_since_start > self.max_buffer_timeout - { - for source in &mut self.sources { - if source.buffer_last.is_none() { - let rate = source.info.rate(); - let buffer_timeout = source.buffer_timeout; - - let mut remaining = elapsed_since_start; - while remaining > buffer_timeout { - let chunk_samples = samples_for_timeout(rate, buffer_timeout); - let frame_duration = duration_from_samples(chunk_samples, rate); - - let mut frame = ffmpeg::frame::Audio::new( - source.info.sample_format, - chunk_samples, - source.info.channel_layout(), - ); - - for i in 0..frame.planes() { - frame.data_mut(i).fill(0); - } + && elapsed_since_start > self.max_buffer_timeout + { + for source in &mut self.sources { + if source.buffer_last.is_none() { + let rate = source.info.rate(); + let buffer_timeout = source.buffer_timeout; + + let mut remaining = elapsed_since_start; + while remaining > buffer_timeout { + let chunk_samples = samples_for_timeout(rate, buffer_timeout); + let frame_duration = duration_from_samples(chunk_samples, rate); - frame.set_rate(source.info.rate() as u32); + let mut frame = ffmpeg::frame::Audio::new( + source.info.sample_format, + chunk_samples, + source.info.channel_layout(), + ); - let timestamp = start_timestamp + (elapsed_since_start - remaining); - source.buffer_last = Some((timestamp, frame_duration)); - source.buffer.push_front(AudioFrame::new(frame, timestamp)); + for i in 0..frame.planes() { + frame.data_mut(i).fill(0); + } - if frame_duration.is_zero() { - break; - } + frame.set_rate(source.info.rate() as u32); - remaining = remaining.saturating_sub(frame_duration); + let timestamp = start_timestamp + (elapsed_since_start - remaining); + source.buffer_last = Some((timestamp, frame_duration)); + source.buffer.push_front(AudioFrame::new(frame, timestamp)); + + if frame_duration.is_zero() { + break; } + + remaining = remaining.saturating_sub(frame_duration); } } } @@ -499,30 +477,6 @@ fn duration_from_samples(samples: usize, rate: i32) -> Duration { Duration::from_secs_f64(samples as f64 / rate as f64) } -pub struct AudioMixerHandle { - thread_handle: std::thread::JoinHandle<()>, - stop_flag: Arc, -} - -impl AudioMixerHandle { - pub fn new(thread_handle: std::thread::JoinHandle<()>, stop_flag: Arc) -> Self { - Self { - thread_handle, - stop_flag, - } - } - - pub fn stop(&self) { - self.stop_flag.store(true, Ordering::Relaxed); - } -} - -impl Drop for AudioMixerHandle { - fn drop(&mut self) { - self.stop_flag.store(true, Ordering::Relaxed); - } -} - #[cfg(test)] mod test { use super::*; @@ -709,12 +663,12 @@ mod test { assert!(source.rx.is_empty()); let item = &source.buffer[0]; - assert_eq!(item.1.duration_since(start), ONE_SECOND / 2); - assert_eq!(item.0.samples(), SOURCE_INFO.rate() as usize / 2); + assert_eq!(item.timestamp.duration_since(start), ONE_SECOND / 2); + assert_eq!(item.inner.samples(), SOURCE_INFO.rate() as usize / 2); let item = &source.buffer[1]; - assert_eq!(item.1.duration_since(start), ONE_SECOND); - assert_eq!(item.0.samples(), SOURCE_INFO.rate() as usize / 2); + assert_eq!(item.timestamp.duration_since(start), ONE_SECOND); + assert_eq!(item.inner.samples(), SOURCE_INFO.rate() as usize / 2); } } } diff --git a/crates/recording/src/sources/microphone.rs b/crates/recording/src/sources/microphone.rs index 752476661b..f567e8cd2d 100644 --- a/crates/recording/src/sources/microphone.rs +++ b/crates/recording/src/sources/microphone.rs @@ -7,11 +7,15 @@ use cap_media_info::AudioInfo; use futures::{SinkExt, channel::mpsc}; use std::sync::Arc; -pub struct Microphone(AudioInfo, Arc); +pub struct Microphone { + info: AudioInfo, + _lock: Arc, +} impl AudioSource for Microphone { type Config = Arc; + #[allow(clippy::manual_async_fn)] fn setup( feed_lock: Self::Config, mut audio_tx: mpsc::Sender, @@ -40,11 +44,14 @@ impl AudioSource for Microphone { } }); - Ok(Self(audio_info, feed_lock)) + Ok(Self { + info: audio_info, + _lock: feed_lock, + }) } } fn audio_info(&self) -> AudioInfo { - self.0 + self.info } } diff --git a/crates/recording/src/sources/screen_capture/macos.rs b/crates/recording/src/sources/screen_capture/macos.rs index abfbb7416f..cc6a135849 100644 --- a/crates/recording/src/sources/screen_capture/macos.rs +++ b/crates/recording/src/sources/screen_capture/macos.rs @@ -49,10 +49,6 @@ enum SourceError { NoDisplay(DisplayId), #[error("AsContentFilter")] AsContentFilter, - #[error("CreateActor: {0}")] - CreateActor(arc::R), - #[error("DidStopWithError: {0}")] - DidStopWithError(arc::R), } pub struct VideoFrame { @@ -111,7 +107,7 @@ impl ScreenCaptureConfig { excluded_sc_windows, ) .await - .ok_or_else(|| SourceError::AsContentFilter)?; + .ok_or(SourceError::AsContentFilter)?; debug!("SCK content filter: {:?}", content_filter); diff --git a/crates/recording/src/sources/screen_capture/mod.rs b/crates/recording/src/sources/screen_capture/mod.rs index 658f2ea41f..9daf4c4541 100644 --- a/crates/recording/src/sources/screen_capture/mod.rs +++ b/crates/recording/src/sources/screen_capture/mod.rs @@ -299,7 +299,6 @@ impl ScreenCaptureConfig { let fps = std::cmp::max(1, std::cmp::min(max_fps, target_refresh)); let output_size = crop_bounds - .clone() .and_then(|b| { #[cfg(target_os = "macos")] { diff --git a/crates/recording/src/studio_recording.rs b/crates/recording/src/studio_recording.rs index 85c4bfa9fe..682acf1e1f 100644 --- a/crates/recording/src/studio_recording.rs +++ b/crates/recording/src/studio_recording.rs @@ -53,10 +53,7 @@ pub struct ActorHandle { #[derive(kameo::Actor)] pub struct Actor { recording_dir: PathBuf, - capture_target: screen_capture::ScreenCaptureTarget, - video_info: VideoInfo, state: Option, - fps: u32, segment_factory: SegmentPipelineFactory, segments: Vec, completion_tx: watch::Sender>>, @@ -295,10 +292,10 @@ impl Pipeline { tokio::spawn(async move { while let Some(res) = futures.next().await { - if let Err(err) = res { - if completion_tx.borrow().is_none() { - let _ = completion_tx.send(Some(Err(err))); - } + if let Err(err) = res + && completion_tx.borrow().is_none() + { + let _ = completion_tx.send(Some(Err(err))); } } }); @@ -464,13 +461,9 @@ async fn spawn_studio_recording_actor( trace!("spawning recording actor"); let base_inputs = base_inputs.clone(); - let fps = pipeline.screen.video_info().unwrap().fps(); let actor_ref = Actor::spawn(Actor { recording_dir, - fps, - capture_target: base_inputs.capture_target.clone(), - video_info: pipeline.screen.video_info().unwrap(), state: Some(ActorState::Recording { pipeline, /*pipeline_done_rx,*/ @@ -680,7 +673,7 @@ pub enum CreateSegmentPipelineError { #[tracing::instrument(skip_all, name = "segment", fields(index = index))] #[allow(clippy::too_many_arguments)] async fn create_segment_pipeline( - segments_dir: &PathBuf, + segments_dir: &Path, cursors_dir: &Path, index: u32, base_inputs: RecordingBaseInputs, @@ -800,11 +793,6 @@ async fn create_segment_pipeline( }) } -struct CameraPipelineInfo { - inner: OutputPipeline, - fps: u32, -} - fn ensure_dir(path: &PathBuf) -> Result { std::fs::create_dir_all(path)?; Ok(path.clone()) diff --git a/crates/rendering-skia/src/layers/background.rs b/crates/rendering-skia/src/layers/background.rs index 2d67527790..df4e34c767 100644 --- a/crates/rendering-skia/src/layers/background.rs +++ b/crates/rendering-skia/src/layers/background.rs @@ -359,6 +359,7 @@ mod tests { background: BackgroundSource::Color { value: [65535, 0, 0], }, + border: None, }; // Should need update on first check @@ -382,6 +383,7 @@ mod tests { background: BackgroundSource::Color { value: [0, 65535, 0], }, + border: None, }; assert!(layer.needs_update(&new_uniforms)); } diff --git a/crates/rendering/src/decoder/avassetreader.rs b/crates/rendering/src/decoder/avassetreader.rs index e642ebda50..8bc0dac427 100644 --- a/crates/rendering/src/decoder/avassetreader.rs +++ b/crates/rendering/src/decoder/avassetreader.rs @@ -79,7 +79,7 @@ impl ImageBufProcessor { fn ensure_scratch(&mut self, format: format::Pixel, width: u32, height: u32) { let needs_new = self.scratch_spec - .map_or(true, |(current_format, current_width, current_height)| { + .is_none_or(|(current_format, current_width, current_height)| { current_format != format || current_width != width || current_height != height }); @@ -98,8 +98,8 @@ impl ImageBufProcessor { } let bytes_per_row = image_buf.plane_bytes_per_row(0); - let width = image_buf.width() as usize; - let height = image_buf.height() as usize; + let width = image_buf.width(); + let height = image_buf.height(); let slice = unsafe { std::slice::from_raw_parts::<'static, _>( diff --git a/crates/rendering/src/decoder/frame_converter.rs b/crates/rendering/src/decoder/frame_converter.rs index ebcb9d28ed..30b6d40add 100644 --- a/crates/rendering/src/decoder/frame_converter.rs +++ b/crates/rendering/src/decoder/frame_converter.rs @@ -24,7 +24,7 @@ impl FrameConverter { if frame.format() == format::Pixel::RGBA { let width = frame.width() as usize; let height = frame.height() as usize; - let stride = frame.stride(0) as usize; + let stride = frame.stride(0); return copy_rgba_plane(frame.data(0), stride, width, height); } @@ -48,14 +48,14 @@ impl FrameConverter { let rgba_frame = &self.rgba_frame; copy_rgba_plane( rgba_frame.data(0), - rgba_frame.stride(0) as usize, + rgba_frame.stride(0), rgba_frame.width() as usize, rgba_frame.height() as usize, ) } fn ensure_scaler(&mut self, input_format: format::Pixel, width: u32, height: u32) { - let needs_new = self.scaler.as_ref().map_or(true, |state| { + let needs_new = self.scaler.as_ref().is_none_or(|state| { state.input_format != input_format || state.width != width || state.height != height }); diff --git a/crates/rendering/src/layers/cursor.rs b/crates/rendering/src/layers/cursor.rs index a9acd6cba9..a5a493581b 100644 --- a/crates/rendering/src/layers/cursor.rs +++ b/crates/rendering/src/layers/cursor.rs @@ -472,73 +472,6 @@ fn compute_cursor_fade_in(cursor: &CursorEvents, current_time_ms: f64, hide_dela smoothstep64(0.0, CURSOR_IDLE_FADE_OUT_MS, time_since_resume) as f32 } -#[cfg(test)] -mod tests { - use super::*; - - fn move_event(time_ms: f64, x: f64, y: f64) -> CursorMoveEvent { - CursorMoveEvent { - active_modifiers: vec![], - cursor_id: "pointer".into(), - time_ms, - x, - y, - } - } - - fn cursor_events(times: &[(f64, f64, f64)]) -> CursorEvents { - CursorEvents { - moves: times - .iter() - .map(|(time, x, y)| move_event(*time, *x, *y)) - .collect(), - clicks: vec![], - } - } - - #[test] - fn opacity_stays_visible_with_recent_move() { - let cursor = cursor_events(&[(0.0, 0.0, 0.0), (1500.0, 0.1, 0.1)]); - - let opacity = compute_cursor_idle_opacity(&cursor, 2000.0, 2000.0); - - assert_eq!(opacity, 1.0); - } - - #[test] - fn opacity_fades_once_past_delay() { - let cursor = cursor_events(&[(0.0, 0.0, 0.0)]); - - let opacity = compute_cursor_idle_opacity(&cursor, 3000.0, 1000.0); - - assert_eq!(opacity, 0.0); - } - - #[test] - fn opacity_fades_in_after_long_inactivity() { - let cursor = cursor_events(&[(0.0, 0.0, 0.0), (5000.0, 0.5, 0.5)]); - - let hide_delay_ms = 2000.0; - - let at_resume = compute_cursor_idle_opacity(&cursor, 5000.0, hide_delay_ms); - assert_eq!(at_resume, 0.0); - - let halfway = compute_cursor_idle_opacity( - &cursor, - 5000.0 + CURSOR_IDLE_FADE_OUT_MS / 2.0, - hide_delay_ms, - ); - assert!((halfway - 0.5).abs() < 0.05); - - let after_fade = compute_cursor_idle_opacity( - &cursor, - 5000.0 + CURSOR_IDLE_FADE_OUT_MS * 2.0, - hide_delay_ms, - ); - assert_eq!(after_fade, 1.0); - } -} - fn get_click_t(clicks: &[CursorClickEvent], time_ms: f64) -> f32 { fn smoothstep(low: f32, high: f32, v: f32) -> f32 { let t = f32::clamp((v - low) / (high - low), 0.0, 1.0); @@ -683,3 +616,70 @@ impl CursorTexture { )) } } + +#[cfg(test)] +mod tests { + use super::*; + + fn move_event(time_ms: f64, x: f64, y: f64) -> CursorMoveEvent { + CursorMoveEvent { + active_modifiers: vec![], + cursor_id: "pointer".into(), + time_ms, + x, + y, + } + } + + fn cursor_events(times: &[(f64, f64, f64)]) -> CursorEvents { + CursorEvents { + moves: times + .iter() + .map(|(time, x, y)| move_event(*time, *x, *y)) + .collect(), + clicks: vec![], + } + } + + #[test] + fn opacity_stays_visible_with_recent_move() { + let cursor = cursor_events(&[(0.0, 0.0, 0.0), (1500.0, 0.1, 0.1)]); + + let opacity = compute_cursor_idle_opacity(&cursor, 2000.0, 2000.0); + + assert_eq!(opacity, 1.0); + } + + #[test] + fn opacity_fades_once_past_delay() { + let cursor = cursor_events(&[(0.0, 0.0, 0.0)]); + + let opacity = compute_cursor_idle_opacity(&cursor, 3000.0, 1000.0); + + assert_eq!(opacity, 0.0); + } + + #[test] + fn opacity_fades_in_after_long_inactivity() { + let cursor = cursor_events(&[(0.0, 0.0, 0.0), (5000.0, 0.5, 0.5)]); + + let hide_delay_ms = 2000.0; + + let at_resume = compute_cursor_idle_opacity(&cursor, 5000.0, hide_delay_ms); + assert_eq!(at_resume, 0.0); + + let halfway = compute_cursor_idle_opacity( + &cursor, + 5000.0 + CURSOR_IDLE_FADE_OUT_MS / 2.0, + hide_delay_ms, + ); + assert!((halfway - 0.5).abs() < 0.05); + + let after_fade = compute_cursor_idle_opacity( + &cursor, + 5000.0 + CURSOR_IDLE_FADE_OUT_MS * 2.0, + hide_delay_ms, + ); + assert_eq!(after_fade, 1.0); + } +} diff --git a/crates/rendering/src/lib.rs b/crates/rendering/src/lib.rs index 51202a3689..137a096a5c 100644 --- a/crates/rendering/src/lib.rs +++ b/crates/rendering/src/lib.rs @@ -1182,85 +1182,6 @@ impl RendererLayers { } } -#[cfg(test)] -mod project_uniforms_tests { - use super::*; - use cap_project::CursorMoveEvent; - - fn cursor_move(time_ms: f64, x: f64, y: f64) -> CursorMoveEvent { - CursorMoveEvent { - active_modifiers: vec![], - cursor_id: "primary".to_string(), - time_ms, - x, - y, - } - } - - fn default_smoothing() -> SpringMassDamperSimulationConfig { - SpringMassDamperSimulationConfig { - tension: 100.0, - mass: 1.0, - friction: 20.0, - } - } - - #[test] - fn auto_zoom_focus_defaults_without_cursor_data() { - let events = CursorEvents { - clicks: vec![], - moves: vec![], - }; - - let focus = ProjectUniforms::auto_zoom_focus(&events, 0.3, None, None); - - assert_eq!(focus.coord.x, 0.5); - assert_eq!(focus.coord.y, 0.5); - } - - #[test] - fn auto_zoom_focus_is_stable_for_slow_motion() { - let events = CursorEvents { - clicks: vec![], - moves: vec![ - cursor_move(0.0, 0.5, 0.5), - cursor_move(200.0, 0.55, 0.5), - cursor_move(400.0, 0.6, 0.5), - ], - }; - - let smoothing = Some(default_smoothing()); - - let current = interpolate_cursor(&events, 0.4, smoothing).expect("cursor position"); - let focus = - ProjectUniforms::auto_zoom_focus(&events, 0.4, smoothing, Some(current.clone())); - - let dx = (focus.coord.x - current.position.coord.x).abs(); - let dy = (focus.coord.y - current.position.coord.y).abs(); - - assert!(dx < 0.05, "expected minimal horizontal drift, got {dx}"); - assert!(dy < 0.05, "expected minimal vertical drift, got {dy}"); - } - - #[test] - fn auto_zoom_focus_leans_into_velocity_for_fast_motion() { - let events = CursorEvents { - clicks: vec![], - moves: vec![cursor_move(0.0, 0.1, 0.5), cursor_move(40.0, 0.9, 0.5)], - }; - - let smoothing = Some(default_smoothing()); - let query_time = 0.045; // slightly after the fast movement - - let current = interpolate_cursor(&events, query_time, smoothing).expect("cursor position"); - let focus = - ProjectUniforms::auto_zoom_focus(&events, query_time, smoothing, Some(current.clone())); - let delta = focus.coord.x - current.position.coord.x; - assert!(delta < 0.2, "focus moved too far ahead: {delta}"); - assert!(delta > -0.25, "focus lagged too far behind: {delta}"); - } -} - pub struct RenderSession { textures: (wgpu::Texture, wgpu::Texture), texture_views: (wgpu::TextureView, wgpu::TextureView), @@ -1366,7 +1287,7 @@ impl RenderSession { .readback_buffers .0 .as_ref() - .map_or(true, |_| self.readback_buffer_size < size); + .is_none_or(|_| self.readback_buffer_size < size); if needs_new { let make_buffer = || { @@ -1516,3 +1437,82 @@ fn srgb_to_linear(c: u16) -> f32 { ((c + 0.055) / 1.055).powf(2.4) } } + +#[cfg(test)] +mod project_uniforms_tests { + use super::*; + use cap_project::CursorMoveEvent; + + fn cursor_move(time_ms: f64, x: f64, y: f64) -> CursorMoveEvent { + CursorMoveEvent { + active_modifiers: vec![], + cursor_id: "primary".to_string(), + time_ms, + x, + y, + } + } + + fn default_smoothing() -> SpringMassDamperSimulationConfig { + SpringMassDamperSimulationConfig { + tension: 100.0, + mass: 1.0, + friction: 20.0, + } + } + + #[test] + fn auto_zoom_focus_defaults_without_cursor_data() { + let events = CursorEvents { + clicks: vec![], + moves: vec![], + }; + + let focus = ProjectUniforms::auto_zoom_focus(&events, 0.3, None, None); + + assert_eq!(focus.coord.x, 0.5); + assert_eq!(focus.coord.y, 0.5); + } + + #[test] + fn auto_zoom_focus_is_stable_for_slow_motion() { + let events = CursorEvents { + clicks: vec![], + moves: vec![ + cursor_move(0.0, 0.5, 0.5), + cursor_move(200.0, 0.55, 0.5), + cursor_move(400.0, 0.6, 0.5), + ], + }; + + let smoothing = Some(default_smoothing()); + + let current = interpolate_cursor(&events, 0.4, smoothing).expect("cursor position"); + let focus = + ProjectUniforms::auto_zoom_focus(&events, 0.4, smoothing, Some(current.clone())); + + let dx = (focus.coord.x - current.position.coord.x).abs(); + let dy = (focus.coord.y - current.position.coord.y).abs(); + + assert!(dx < 0.05, "expected minimal horizontal drift, got {dx}"); + assert!(dy < 0.05, "expected minimal vertical drift, got {dy}"); + } + + #[test] + fn auto_zoom_focus_leans_into_velocity_for_fast_motion() { + let events = CursorEvents { + clicks: vec![], + moves: vec![cursor_move(0.0, 0.1, 0.5), cursor_move(40.0, 0.9, 0.5)], + }; + + let smoothing = Some(default_smoothing()); + let query_time = 0.045; // slightly after the fast movement + + let current = interpolate_cursor(&events, query_time, smoothing).expect("cursor position"); + let focus = + ProjectUniforms::auto_zoom_focus(&events, query_time, smoothing, Some(current.clone())); + let delta = focus.coord.x - current.position.coord.x; + assert!(delta < 0.2, "focus moved too far ahead: {delta}"); + assert!(delta > -0.25, "focus lagged too far behind: {delta}"); + } +} diff --git a/crates/scap-ffmpeg/examples/cli.rs b/crates/scap-ffmpeg/examples/cli.rs index dc3dc57061..274f5382c5 100644 --- a/crates/scap-ffmpeg/examples/cli.rs +++ b/crates/scap-ffmpeg/examples/cli.rs @@ -1,7 +1,3 @@ -use scap_ffmpeg::*; -use scap_targets::*; -use std::time::Duration; - #[tokio::main] pub async fn main() { #[cfg(windows)] @@ -40,32 +36,44 @@ pub async fn main() { #[cfg(target_os = "macos")] { + use std::time::Duration; + + use cidre::sc; use futures::executor::block_on; use scap_screencapturekit::*; + use scap_targets::Display; let display = Display::primary(); let config = StreamCfgBuilder::default() .with_fps(60.0) - .with_width(display.physical_size().width() as usize) - .with_height(display.physical_size().height() as usize) + .with_width(display.physical_size().unwrap().width() as usize) + .with_height(display.physical_size().unwrap().height() as usize) .build(); let capturer = Capturer::builder( - display.raw_handle().as_content_filter().await.unwrap(), + display + .raw_handle() + .as_content_filter(sc::ShareableContent::current().await.unwrap()) + .await + .unwrap(), config, ) .with_output_sample_buf_cb(|frame| { + use scap_ffmpeg::AsFFmpeg; + let Frame::Screen(video_frame) = frame else { return; }; let ff_frame = video_frame.as_ffmpeg().unwrap(); - dbg!(ff_frame.width(), ff_frame.height(), ff_frame.format()); + ff_frame.width(); + ff_frame.height(); + ff_frame.format(); }) .with_stop_with_err_cb(|stream, error| { - dbg!(stream, error); + (stream, error); }) .build() .expect("Failed to build capturer");