From 099ce2bbb5a65881efaf3716037d1d7f32f7fa2a Mon Sep 17 00:00:00 2001 From: webadderall <131426131+webadderall@users.noreply.github.com> Date: Fri, 17 Apr 2026 19:57:26 +1000 Subject: [PATCH 1/6] refactor: split handlers.ts into focused sub-modules MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit handlers.ts was ~5967 lines. Extracted into 22 focused modules: - ipc/types.ts — shared TypeScript interfaces and types - ipc/constants.ts — module-level constants - ipc/state.ts — all mutable state with typed setters - ipc/utils.ts — shared low-level utilities (getScreen, normalizePath, etc.) - ipc/ffmpeg/binary.ts — ffmpeg binary resolution - ipc/ffmpeg/filters.ts — audio sync/filter builders - ipc/captions/parser.ts — SRT/Whisper JSON parsers - ipc/captions/whisper.ts — Whisper model download/status - ipc/captions/generate.ts — auto-caption generation - ipc/paths/binaries.ts — native binary path resolution - ipc/cursor/monitor.ts — cursor monitor process management - ipc/cursor/telemetry.ts — cursor sampling and telemetry - ipc/cursor/bounds.ts — window bounds capture and resolution - ipc/cursor/interaction.ts — mouse hook and interaction capture - ipc/recording/events.ts — recording lifecycle events - ipc/recording/diagnostics.ts — media validation and diagnostics - ipc/recording/prune.ts — auto-recording cleanup - ipc/recording/ffmpeg.ts — FFmpeg screen capture - ipc/recording/windows.ts — Windows native capture (WGC) - ipc/recording/mac.ts — Mac ScreenCaptureKit integration - ipc/export/native-video.ts — native video export sessions - ipc/project/session.ts — recording session manifests - ipc/project/manager.ts — project library and file management handlers.ts reduced from 5967 → 2930 lines (registerIpcHandlers + helpers only) --- electron/ipc/captions/generate.ts | 248 ++ electron/ipc/captions/parser.ts | 183 + electron/ipc/captions/whisper.ts | 146 + electron/ipc/constants.ts | 30 + electron/ipc/cursor/bounds.ts | 260 ++ electron/ipc/cursor/interaction.ts | 219 ++ electron/ipc/cursor/monitor.ts | 136 + electron/ipc/cursor/telemetry.ts | 173 + electron/ipc/export/native-video.ts | 463 +++ electron/ipc/ffmpeg/binary.ts | 76 + electron/ipc/ffmpeg/filters.ts | 194 + electron/ipc/handlers.ts | 4861 ++----------------------- electron/ipc/paths/binaries.ts | 249 ++ electron/ipc/project/manager.ts | 337 ++ electron/ipc/project/session.ts | 138 + electron/ipc/recording/diagnostics.ts | 175 + electron/ipc/recording/events.ts | 9 + electron/ipc/recording/ffmpeg.ts | 204 ++ electron/ipc/recording/mac.ts | 459 +++ electron/ipc/recording/prune.ts | 91 + electron/ipc/recording/windows.ts | 344 ++ electron/ipc/state.ts | 169 + electron/ipc/types.ts | 202 + electron/ipc/utils.ts | 105 + 24 files changed, 5013 insertions(+), 4458 deletions(-) create mode 100644 electron/ipc/captions/generate.ts create mode 100644 electron/ipc/captions/parser.ts create mode 100644 electron/ipc/captions/whisper.ts create mode 100644 electron/ipc/constants.ts create mode 100644 electron/ipc/cursor/bounds.ts create mode 100644 electron/ipc/cursor/interaction.ts create mode 100644 electron/ipc/cursor/monitor.ts create mode 100644 electron/ipc/cursor/telemetry.ts create mode 100644 electron/ipc/export/native-video.ts create mode 100644 electron/ipc/ffmpeg/binary.ts create mode 100644 electron/ipc/ffmpeg/filters.ts create mode 100644 electron/ipc/paths/binaries.ts create mode 100644 electron/ipc/project/manager.ts create mode 100644 electron/ipc/project/session.ts create mode 100644 electron/ipc/recording/diagnostics.ts create mode 100644 electron/ipc/recording/events.ts create mode 100644 electron/ipc/recording/ffmpeg.ts create mode 100644 electron/ipc/recording/mac.ts create mode 100644 electron/ipc/recording/prune.ts create mode 100644 electron/ipc/recording/windows.ts create mode 100644 electron/ipc/state.ts create mode 100644 electron/ipc/types.ts create mode 100644 electron/ipc/utils.ts diff --git a/electron/ipc/captions/generate.ts b/electron/ipc/captions/generate.ts new file mode 100644 index 00000000..f74abf15 --- /dev/null +++ b/electron/ipc/captions/generate.ts @@ -0,0 +1,248 @@ +import { constants as fsConstants } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { execFile, spawnSync } from "node:child_process"; +import { promisify } from "node:util"; +import { app } from "electron"; +import { getFfmpegBinaryPath } from "../ffmpeg/binary"; +import { getBundledWhisperExecutableCandidates } from "../paths/binaries"; +import { parseWhisperJsonCues, parseSrtCues, shouldRetryWhisperWithoutJson } from "./parser"; +import { normalizeVideoSourcePath } from "../utils"; +import { resolveRecordingSession } from "../project/session"; + +const execFileAsync = promisify(execFile); + +export async function ensureReadableFile(filePath: string, description: string) { + await fs.access(filePath, fsConstants.R_OK); + if (description === "whisper executable") { + try { + await fs.access(filePath, fsConstants.X_OK); + } catch { + throw new Error("The selected Whisper executable is not marked as executable."); + } + } +} + +export async function isExecutableFile(filePath: string) { + try { + await fs.access(filePath, fsConstants.R_OK | fsConstants.X_OK); + return true; + } catch { + return false; + } +} + +export async function resolveWhisperExecutablePath(preferredPath?: string | null) { + const candidatePaths = [ + preferredPath?.trim() || null, + ...getBundledWhisperExecutableCandidates(), + process.env["WHISPER_CPP_PATH"]?.trim() || null, + process.platform === "darwin" ? "/opt/homebrew/bin/whisper-cli" : null, + process.platform === "darwin" ? "/usr/local/bin/whisper-cli" : null, + process.platform === "darwin" ? "/opt/homebrew/bin/whisper-cpp" : null, + process.platform === "darwin" ? "/usr/local/bin/whisper-cpp" : null, + ].filter((value): value is string => Boolean(value)); + + for (const candidate of candidatePaths) { + const normalized = path.resolve(candidate); + if (await isExecutableFile(normalized)) { + return normalized; + } + } + + const pathCommand = process.platform === "win32" ? "where" : "which"; + const binaryNames = + process.platform === "win32" + ? ["whisper-cli.exe", "whisper.exe", "main.exe"] + : ["whisper-cli", "whisper-cpp", "whisper", "main"]; + + for (const binaryName of binaryNames) { + const result = spawnSync(pathCommand, [binaryName], { encoding: "utf-8" }); + if (result.status === 0) { + const resolvedPath = result.stdout + .split(/\r?\n/) + .map((line) => line.trim()) + .find(Boolean); + + if (resolvedPath && (await isExecutableFile(resolvedPath))) { + return resolvedPath; + } + } + } + + throw new Error( + "No Whisper runtime was found. Recordly looked for a bundled binary first, then checked common system install locations.", + ); +} + +export async function resolveCaptionAudioCandidates(videoPath: string) { + const candidates: Array<{ path: string; label: string }> = []; + const seenPaths = new Set(); + + const pushCandidate = (candidatePath: string | null | undefined, label: string) => { + const normalizedCandidatePath = normalizeVideoSourcePath(candidatePath); + if (!normalizedCandidatePath || seenPaths.has(normalizedCandidatePath)) { + return; + } + + seenPaths.add(normalizedCandidatePath); + candidates.push({ path: normalizedCandidatePath, label }); + }; + + pushCandidate(videoPath, "recording"); + + const requestedRecordingSession = await resolveRecordingSession(videoPath); + pushCandidate(requestedRecordingSession?.webcamPath, "linked webcam recording"); + + return candidates; +} + +export async function extractCaptionAudioSource(options: { + videoPath: string; + ffmpegPath: string; + wavPath: string; +}) { + const candidates = await resolveCaptionAudioCandidates(options.videoPath); + const attemptedCandidates: Array<{ + path: string; + label: string; + readable: boolean; + extractedAudio: boolean; + error?: string; + }> = []; + + for (const candidate of candidates) { + try { + await ensureReadableFile(candidate.path, "video file"); + await execFileAsync( + options.ffmpegPath, + [ + "-y", + "-i", + candidate.path, + "-map", + "0:a:0", + "-vn", + "-ac", + "1", + "-ar", + "16000", + "-c:a", + "pcm_s16le", + options.wavPath, + ], + { timeout: 5 * 60 * 1000, maxBuffer: 20 * 1024 * 1024 }, + ); + attemptedCandidates.push({ ...candidate, readable: true, extractedAudio: true }); + return candidate; + } catch (error) { + attemptedCandidates.push({ + ...candidate, + readable: true, + extractedAudio: false, + error: error instanceof Error ? error.message : String(error), + }); + } + } + + console.warn( + "[auto-captions] No audio source candidate could be extracted:", + attemptedCandidates, + ); + + throw new Error( + "No audio was found to transcribe in the saved recording file. Captions need an audio track. If this recording should have contained sound, the recording was saved without an audio stream.", + ); +} + +export async function generateAutoCaptionsFromVideo(options: { + videoPath: string; + whisperExecutablePath?: string; + whisperModelPath: string; + language?: string; +}) { + const ffmpegPath = getFfmpegBinaryPath(); + const normalizedVideoPath = normalizeVideoSourcePath(options.videoPath); + if (!normalizedVideoPath) { + throw new Error("Missing source video path."); + } + + const whisperExecutablePath = await resolveWhisperExecutablePath(options.whisperExecutablePath); + const whisperModelPath = path.resolve(options.whisperModelPath); + await ensureReadableFile(whisperExecutablePath, "whisper executable"); + await ensureReadableFile(whisperModelPath, "whisper model"); + + const tempBase = path.join( + app.getPath("temp"), + `recordly-captions-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`, + ); + const wavPath = `${tempBase}.wav`; + const outputBase = `${tempBase}-whisper`; + const srtPath = `${outputBase}.srt`; + const jsonPath = `${outputBase}.json`; + + try { + const audioSource = await extractCaptionAudioSource({ + videoPath: normalizedVideoPath, + ffmpegPath, + wavPath, + }); + + const language = + options.language && options.language.trim() ? options.language.trim() : "auto"; + const whisperBaseArgs = [ + "-m", + whisperModelPath, + "-f", + wavPath, + "-osrt", + "-of", + outputBase, + "-l", + language, + "-np", + ]; + + let jsonEnabled = true; + try { + await execFileAsync(whisperExecutablePath, [...whisperBaseArgs, "-ojf"], { + timeout: 30 * 60 * 1000, + maxBuffer: 20 * 1024 * 1024, + }); + } catch (error) { + if (!shouldRetryWhisperWithoutJson(error)) { + throw error; + } + + jsonEnabled = false; + console.warn( + "[auto-captions] Whisper runtime does not support JSON full output, retrying with SRT only:", + error, + ); + await execFileAsync(whisperExecutablePath, whisperBaseArgs, { + timeout: 30 * 60 * 1000, + maxBuffer: 20 * 1024 * 1024, + }); + } + + const timedCues = jsonEnabled + ? parseWhisperJsonCues(await fs.readFile(jsonPath, "utf-8")) + : []; + const cues = + timedCues.length > 0 ? timedCues : parseSrtCues(await fs.readFile(srtPath, "utf-8")); + if (cues.length === 0) { + throw new Error("Whisper completed, but no caption cues were produced."); + } + + return { + cues, + audioSourceLabel: audioSource.label, + }; + } finally { + await Promise.allSettled([ + fs.rm(wavPath, { force: true }), + fs.rm(srtPath, { force: true }), + fs.rm(jsonPath, { force: true }), + ]); + } +} diff --git a/electron/ipc/captions/parser.ts b/electron/ipc/captions/parser.ts new file mode 100644 index 00000000..ff4c62ce --- /dev/null +++ b/electron/ipc/captions/parser.ts @@ -0,0 +1,183 @@ +import type { + CaptionCuePayload, + CaptionWordPayload, + WhisperJsonSegment, + WhisperJsonToken, +} from "../types"; + +function isFiniteNumber(value: unknown): value is number { + return typeof value === "number" && Number.isFinite(value); +} + +export function buildCaptionTextFromWords(words: CaptionWordPayload[]): string { + return words + .map((word, index) => `${index > 0 && word.leadingSpace ? " " : ""}${word.text}`) + .join("") + .trim(); +} + +export function parseWhisperJsonWords(tokens: unknown): CaptionWordPayload[] { + if (!Array.isArray(tokens)) { + return []; + } + + const words: CaptionWordPayload[] = []; + let nextLeadingSpace = false; + + for (const token of tokens) { + if (!token || typeof token !== "object") { + continue; + } + + const tokenData = token as WhisperJsonToken; + const tokenText = typeof tokenData.text === "string" ? tokenData.text : ""; + if (!tokenText) { + continue; + } + + const tokenStartMs = isFiniteNumber(tokenData.offsets?.from) + ? Math.round(tokenData.offsets.from) + : null; + const tokenEndMs = isFiniteNumber(tokenData.offsets?.to) + ? Math.round(tokenData.offsets.to) + : null; + const parts = tokenText.match(/\s+|[^\s]+/g) ?? []; + + for (const part of parts) { + if (/^\s+$/.test(part)) { + nextLeadingSpace = words.length > 0; + continue; + } + + if (tokenStartMs == null || tokenEndMs == null || tokenEndMs <= tokenStartMs) { + return []; + } + + const previousWord = words.length > 0 ? words[words.length - 1] : null; + if (!previousWord || nextLeadingSpace) { + words.push({ + text: part, + startMs: tokenStartMs, + endMs: tokenEndMs, + ...(words.length > 0 && nextLeadingSpace ? { leadingSpace: true } : {}), + }); + } else { + previousWord.text += part; + previousWord.endMs = Math.max(previousWord.endMs, tokenEndMs); + } + + nextLeadingSpace = false; + } + } + + return words.filter((word) => word.text.trim().length > 0); +} + +export function parseWhisperJsonCues(content: string): CaptionCuePayload[] { + try { + const parsed = JSON.parse(content) as { + transcription?: unknown; + }; + + if (!Array.isArray(parsed.transcription)) { + return []; + } + + return parsed.transcription + .map((segment, index) => { + if (!segment || typeof segment !== "object") { + return null; + } + + const segmentData = segment as WhisperJsonSegment; + const startMs = isFiniteNumber(segmentData.offsets?.from) + ? Math.round(segmentData.offsets.from) + : null; + const endMs = isFiniteNumber(segmentData.offsets?.to) + ? Math.round(segmentData.offsets.to) + : null; + const segmentText = + typeof segmentData.text === "string" ? segmentData.text.trim() : ""; + + if (startMs == null || endMs == null || endMs <= startMs) { + return null; + } + + const words = parseWhisperJsonWords(segmentData.tokens); + const text = words.length > 0 ? buildCaptionTextFromWords(words) : segmentText; + + if (!text) { + return null; + } + + return { + id: `caption-${index + 1}`, + startMs, + endMs, + text, + ...(words.length > 0 ? { words } : {}), + }; + }) + .filter((cue): cue is CaptionCuePayload => cue != null); + } catch (error) { + console.warn("[auto-captions] Failed to parse Whisper JSON output:", error); + return []; + } +} + +export function parseSrtTimestamp(value: string): number | null { + const match = value.trim().match(/^(\d{2}):(\d{2}):(\d{2}),(\d{3})$/); + if (!match) { + return null; + } + + const [, hours, minutes, seconds, milliseconds] = match; + return ( + Number(hours) * 60 * 60 * 1000 + + Number(minutes) * 60 * 1000 + + Number(seconds) * 1000 + + Number(milliseconds) + ); +} + +export function parseSrtCues(content: string): CaptionCuePayload[] { + return content + .split(/\r?\n\r?\n/) + .map((block, index) => { + const lines = block.split(/\r?\n/).map((line) => line.trim()); + const timingLine = lines.find((line) => line.includes("-->")); + if (!timingLine) { + return null; + } + + const [rawStart, rawEnd] = timingLine.split("-->").map((part) => part.trim()); + const startMs = parseSrtTimestamp(rawStart); + const endMs = parseSrtTimestamp(rawEnd); + if (startMs == null || endMs == null || endMs <= startMs) { + return null; + } + + const text = lines + .slice(lines.indexOf(timingLine) + 1) + .filter((line) => line.length > 0) + .join("\n") + .trim(); + + if (!text) { + return null; + } + + return { + id: `caption-${index + 1}`, + startMs, + endMs, + text, + }; + }) + .filter((cue): cue is CaptionCuePayload => cue != null); +} + +export function shouldRetryWhisperWithoutJson(error: unknown): boolean { + const message = error instanceof Error ? error.message : String(error); + return /unknown argument|output-json-full|output-json|ojf|\boj\b/i.test(message); +} diff --git a/electron/ipc/captions/whisper.ts b/electron/ipc/captions/whisper.ts new file mode 100644 index 00000000..70a97ede --- /dev/null +++ b/electron/ipc/captions/whisper.ts @@ -0,0 +1,146 @@ +import { createWriteStream } from "node:fs"; +import { constants as fsConstants } from "node:fs"; +import fs from "node:fs/promises"; +import { get as httpsGet } from "node:https"; +import type Electron from "electron"; +import { WHISPER_MODEL_DIR, WHISPER_MODEL_DOWNLOAD_URL, WHISPER_SMALL_MODEL_PATH } from "../constants"; + +export function sendWhisperModelDownloadProgress( + webContents: Electron.WebContents, + payload: { + status: "idle" | "downloading" | "downloaded" | "error"; + progress: number; + path?: string | null; + error?: string; + }, +) { + webContents.send("whisper-small-model-download-progress", payload); +} + +export async function getWhisperSmallModelStatus() { + try { + await fs.access(WHISPER_SMALL_MODEL_PATH, fsConstants.R_OK); + return { + success: true, + exists: true, + path: WHISPER_SMALL_MODEL_PATH, + }; + } catch { + return { + success: true, + exists: false, + path: null, + }; + } +} + +export function downloadFileWithProgress( + url: string, + destinationPath: string, + onProgress: (progress: number) => void, +): Promise { + const request = (currentUrl: string, redirectCount = 0): Promise => { + return new Promise((resolve, reject) => { + const req = httpsGet(currentUrl, (response) => { + const statusCode = response.statusCode ?? 0; + const location = response.headers.location; + + if (statusCode >= 300 && statusCode < 400 && location) { + response.resume(); + if (redirectCount >= 5) { + reject(new Error("Too many redirects while downloading Whisper model.")); + return; + } + + const nextUrl = new URL(location, currentUrl).toString(); + void request(nextUrl, redirectCount + 1) + .then(resolve) + .catch(reject); + return; + } + + if (statusCode < 200 || statusCode >= 300) { + response.resume(); + reject(new Error(`Whisper model download failed with status ${statusCode}.`)); + return; + } + + const totalBytes = Number.parseInt( + String(response.headers["content-length"] ?? "0"), + 10, + ); + let downloadedBytes = 0; + const fileStream = createWriteStream(destinationPath); + + response.on("data", (chunk: Buffer) => { + downloadedBytes += chunk.length; + if (Number.isFinite(totalBytes) && totalBytes > 0) { + onProgress(Math.min(100, Math.round((downloadedBytes / totalBytes) * 100))); + } + }); + + response.on("error", (error) => { + fileStream.destroy(error); + }); + + fileStream.on("error", (error) => { + response.destroy(error); + reject(error); + }); + + fileStream.on("finish", () => { + onProgress(100); + resolve(); + }); + + response.pipe(fileStream); + }); + + req.on("error", reject); + }); + }; + + return request(url); +} + +export async function downloadWhisperSmallModel(webContents: Electron.WebContents): Promise { + await fs.mkdir(WHISPER_MODEL_DIR, { recursive: true }); + const tempPath = `${WHISPER_SMALL_MODEL_PATH}.download`; + + sendWhisperModelDownloadProgress(webContents, { + status: "downloading", + progress: 0, + path: null, + }); + + try { + await fs.rm(tempPath, { force: true }); + await downloadFileWithProgress(WHISPER_MODEL_DOWNLOAD_URL, tempPath, (progress) => { + sendWhisperModelDownloadProgress(webContents, { + status: "downloading", + progress, + path: null, + }); + }); + await fs.rename(tempPath, WHISPER_SMALL_MODEL_PATH); + sendWhisperModelDownloadProgress(webContents, { + status: "downloaded", + progress: 100, + path: WHISPER_SMALL_MODEL_PATH, + }); + return WHISPER_SMALL_MODEL_PATH; + } catch (error) { + await fs.rm(tempPath, { force: true }).catch(() => undefined); + sendWhisperModelDownloadProgress(webContents, { + status: "error", + progress: 0, + path: null, + error: String(error), + }); + throw error; + } +} + +export async function deleteWhisperSmallModel(): Promise { + await fs.rm(WHISPER_SMALL_MODEL_PATH, { force: true }); +} diff --git a/electron/ipc/constants.ts b/electron/ipc/constants.ts new file mode 100644 index 00000000..e3c34590 --- /dev/null +++ b/electron/ipc/constants.ts @@ -0,0 +1,30 @@ +import path from "node:path"; +import { USER_DATA_PATH } from "../appPaths"; + +export const PROJECT_FILE_EXTENSION = "recordly"; +export const LEGACY_PROJECT_FILE_EXTENSIONS = ["openscreen"]; +export const PROJECTS_DIRECTORY_NAME = "Projects"; +export const PROJECT_THUMBNAIL_SUFFIX = ".preview.png"; +export const RECENT_PROJECTS_FILE = path.join(USER_DATA_PATH, "recent-projects.json"); +export const MAX_RECENT_PROJECTS = 16; +export const SHORTCUTS_FILE = path.join(USER_DATA_PATH, "shortcuts.json"); +export const RECORDINGS_SETTINGS_FILE = path.join(USER_DATA_PATH, "recordings-settings.json"); +export const COUNTDOWN_SETTINGS_FILE = path.join(USER_DATA_PATH, "countdown-settings.json"); +export const AUTO_RECORDING_PREFIX = "recording-"; +export const AUTO_RECORDING_RETENTION_COUNT = 20; +export const AUTO_RECORDING_MAX_AGE_MS = 14 * 24 * 60 * 60 * 1000; +export const ALLOW_RECORDLY_WINDOW_CAPTURE = Boolean(process.env["VITE_DEV_SERVER_URL"]); +export const RECORDING_SESSION_MANIFEST_SUFFIX = ".recordly-session.json"; +export const WHISPER_MODEL_DOWNLOAD_URL = + "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin"; +export const WHISPER_MODEL_DIR = path.join(USER_DATA_PATH, "whisper"); +export const WHISPER_SMALL_MODEL_PATH = path.join(WHISPER_MODEL_DIR, "ggml-small.bin"); +export const COMPANION_AUDIO_LAYOUTS = [ + { platform: "mac" as const, systemSuffix: ".system.m4a", micSuffix: ".mic.m4a" }, + { platform: "win" as const, systemSuffix: ".system.wav", micSuffix: ".mic.wav" }, + { platform: "mac" as const, systemSuffix: ".system.webm", micSuffix: ".mic.webm" }, +]; + +export const CURSOR_TELEMETRY_VERSION = 2; +export const CURSOR_SAMPLE_INTERVAL_MS = 33; +export const MAX_CURSOR_SAMPLES = 60 * 60 * 30; // 1 hour @ 30Hz diff --git a/electron/ipc/cursor/bounds.ts b/electron/ipc/cursor/bounds.ts new file mode 100644 index 00000000..6cf638db --- /dev/null +++ b/electron/ipc/cursor/bounds.ts @@ -0,0 +1,260 @@ +import { execFile } from "node:child_process"; +import { promisify } from "node:util"; +import type { NativeMacWindowSource, WindowBounds, SelectedSource } from "../types"; +import { + selectedSource, + setSelectedWindowBounds, + interactionCaptureCleanup, + setInteractionCaptureCleanup, + windowBoundsCaptureInterval, + setWindowBoundsCaptureInterval, + cachedNativeMacWindowSources, + setCachedNativeMacWindowSources, + cachedNativeMacWindowSourcesAtMs, + setCachedNativeMacWindowSourcesAtMs, +} from "../state"; +import { parseWindowId } from "../utils"; +import { ensureNativeWindowListBinary } from "../paths/binaries"; + +const execFileAsync = promisify(execFile); + +export async function getNativeMacWindowSources(options?: { maxAgeMs?: number }) { + if (process.platform !== "darwin") { + return [] as NativeMacWindowSource[]; + } + + const maxAgeMs = options?.maxAgeMs ?? 5000; + const now = Date.now(); + if (cachedNativeMacWindowSources && now - cachedNativeMacWindowSourcesAtMs < maxAgeMs) { + return cachedNativeMacWindowSources; + } + + const binaryPath = await ensureNativeWindowListBinary(); + const { stdout } = await execFileAsync(binaryPath, [], { + timeout: 30000, + maxBuffer: 10 * 1024 * 1024, + }); + + const parsed = JSON.parse(stdout); + if (!Array.isArray(parsed)) { + return [] as NativeMacWindowSource[]; + } + + const entries = parsed.filter((entry: unknown): entry is NativeMacWindowSource => { + if (!entry || typeof entry !== "object") { + return false; + } + + const candidate = entry as Partial; + return typeof candidate.id === "string" && typeof candidate.name === "string"; + }); + + setCachedNativeMacWindowSources(entries); + setCachedNativeMacWindowSourcesAtMs(now); + return entries; +} + +export function getWindowBoundsFromNativeSource( + source?: NativeMacWindowSource | null, +): WindowBounds | null { + if (!source) { + return null; + } + + const { x, y, width, height } = source; + if ( + typeof x !== "number" || + !Number.isFinite(x) || + typeof y !== "number" || + !Number.isFinite(y) || + typeof width !== "number" || + !Number.isFinite(width) || + typeof height !== "number" || + !Number.isFinite(height) + ) { + return null; + } + + if (width <= 0 || height <= 0) { + return null; + } + + return { x, y, width, height }; +} + +export async function resolveMacWindowBounds(source: SelectedSource): Promise { + const windowId = parseWindowId(source.id); + if (!windowId) { + return null; + } + + try { + const nativeSources = await getNativeMacWindowSources({ maxAgeMs: 250 }); + const matchedSource = nativeSources.find((entry) => parseWindowId(entry.id) === windowId); + return getWindowBoundsFromNativeSource(matchedSource); + } catch { + return null; + } +} + +export function parseXwininfoBounds(stdout: string): WindowBounds | null { + const absX = stdout.match(/Absolute upper-left X:\s+(-?\d+)/); + const absY = stdout.match(/Absolute upper-left Y:\s+(-?\d+)/); + const width = stdout.match(/Width:\s+(\d+)/); + const height = stdout.match(/Height:\s+(\d+)/); + + if (!absX || !absY || !width || !height) { + return null; + } + + return { + x: Number.parseInt(absX[1], 10), + y: Number.parseInt(absY[1], 10), + width: Number.parseInt(width[1], 10), + height: Number.parseInt(height[1], 10), + }; +} + +export async function resolveLinuxWindowBounds(source: SelectedSource): Promise { + const windowId = parseWindowId(source?.id); + + if (windowId) { + try { + const { stdout } = await execFileAsync("xwininfo", ["-id", String(windowId)], { + timeout: 1500, + }); + const bounds = parseXwininfoBounds(stdout); + if (bounds && bounds.width > 0 && bounds.height > 0) { + return bounds; + } + } catch { + // fall back to title lookup below + } + } + + const windowTitle = + typeof source.windowTitle === "string" ? source.windowTitle.trim() : source.name.trim(); + if (!windowTitle) { + return null; + } + + try { + const { stdout } = await execFileAsync("xwininfo", ["-name", windowTitle], { + timeout: 1500, + }); + const bounds = parseXwininfoBounds(stdout); + return bounds && bounds.width > 0 && bounds.height > 0 ? bounds : null; + } catch { + return null; + } +} + +export async function resolveWindowsWindowBounds(source: SelectedSource): Promise { + const windowId = parseWindowId(source?.id); + const windowTitle = + typeof source.windowTitle === "string" ? source.windowTitle.trim() : source.name.trim(); + + if (!windowId && !windowTitle) { + return null; + } + + const script = [ + "param([string]$windowId, [string]$windowTitle)", + 'Add-Type -TypeDefinition @"', + "using System;", + "using System.Runtime.InteropServices;", + "public static class RecordlyWindowBounds {", + " [StructLayout(LayoutKind.Sequential)]", + " public struct RECT {", + " public int Left;", + " public int Top;", + " public int Right;", + " public int Bottom;", + " }", + ' [DllImport("user32.dll")]', + " [return: MarshalAs(UnmanagedType.Bool)]", + " public static extern bool GetWindowRect(IntPtr hWnd, out RECT rect);", + "}", + '"@', + "$handle = [Int64]0", + "if ($windowId) {", + " $handle = [Int64]$windowId", + "}", + "if ($handle -le 0 -and $windowTitle) {", + ' $matchingProcess = Get-Process | Where-Object { $_.MainWindowTitle -eq $windowTitle -or $_.MainWindowTitle -like "*$windowTitle*" } | Select-Object -First 1', + " if ($matchingProcess) {", + " $handle = $matchingProcess.MainWindowHandle.ToInt64()", + " }", + "}", + "if ($handle -le 0) {", + " exit 1", + "}", + "$rect = New-Object RecordlyWindowBounds+RECT", + "if (-not [RecordlyWindowBounds]::GetWindowRect([IntPtr]$handle, [ref]$rect)) {", + " exit 1", + "}", + "@{ x = $rect.Left; y = $rect.Top; width = $rect.Right - $rect.Left; height = $rect.Bottom - $rect.Top } | ConvertTo-Json -Compress", + ].join("\n"); + + try { + const { stdout } = await execFileAsync( + "powershell.exe", + ["-NoProfile", "-Command", script, String(windowId ?? ""), windowTitle], + { timeout: 1500 }, + ); + const bounds = JSON.parse(stdout) as WindowBounds; + return bounds && bounds.width > 0 && bounds.height > 0 ? bounds : null; + } catch { + return null; + } +} + +export function stopInteractionCapture() { + if (interactionCaptureCleanup) { + interactionCaptureCleanup(); + setInteractionCaptureCleanup(null); + } +} + +export function stopWindowBoundsCapture() { + if (windowBoundsCaptureInterval) { + clearInterval(windowBoundsCaptureInterval); + setWindowBoundsCaptureInterval(null); + } + setSelectedWindowBounds(null); +} + +async function refreshSelectedWindowBounds() { + if (!selectedSource?.id?.startsWith("window:")) { + setSelectedWindowBounds(null); + return; + } + + let bounds: WindowBounds | null = null; + + if (process.platform === "darwin") { + bounds = await resolveMacWindowBounds(selectedSource); + } else if (process.platform === "win32") { + bounds = await resolveWindowsWindowBounds(selectedSource); + } else if (process.platform === "linux") { + bounds = await resolveLinuxWindowBounds(selectedSource); + } + + setSelectedWindowBounds(bounds); +} + +export function startWindowBoundsCapture() { + stopWindowBoundsCapture(); + + if ( + !["darwin", "win32", "linux"].includes(process.platform) || + !selectedSource?.id?.startsWith("window:") + ) { + return; + } + + void refreshSelectedWindowBounds(); + setWindowBoundsCaptureInterval(setInterval(() => { + void refreshSelectedWindowBounds(); + }, 250)); +} diff --git a/electron/ipc/cursor/interaction.ts b/electron/ipc/cursor/interaction.ts new file mode 100644 index 00000000..81b8bbdb --- /dev/null +++ b/electron/ipc/cursor/interaction.ts @@ -0,0 +1,219 @@ +import { createRequire } from "node:module"; +import type { HookMouseEvent, UiohookLike, UiohookModuleNamespace, CursorInteractionType } from "../types"; +import { + isCursorCaptureActive, + cursorCaptureStartTimeMs, + interactionCaptureCleanup, + setInteractionCaptureCleanup, + hasLoggedInteractionHookFailure, + setHasLoggedInteractionHookFailure, + lastLeftClick, + setLastLeftClick, + setLinuxCursorScreenPoint, +} from "../state"; +import { + getNormalizedCursorPoint, + getHookCursorScreenPoint, + pushCursorSample, +} from "./telemetry"; + +const nodeRequire = createRequire(import.meta.url); + +export function normalizeHookMouseButton(rawButton: unknown): 1 | 2 | 3 { + if (typeof rawButton !== "number" || !Number.isFinite(rawButton)) { + return 1; + } + + if (rawButton === 2 || rawButton === 39) { + return 2; + } + + if (rawButton === 3 || rawButton === 38) { + return 3; + } + + return 1; +} + +export function getHookMouseButton(event: HookMouseEvent | null | undefined): 1 | 2 | 3 { + return normalizeHookMouseButton( + event?.button ?? event?.mouseButton ?? event?.data?.button ?? event?.data?.mouseButton, + ); +} + +export function stopInteractionCapture() { + if (interactionCaptureCleanup) { + interactionCaptureCleanup(); + setInteractionCaptureCleanup(null); + } +} + +function isUiohookLike(value: unknown): value is UiohookLike { + const candidate = value as Partial | null; + return typeof candidate?.on === "function" && typeof candidate?.start === "function"; +} + +function loadUiohookModule() { + const moduleExports = nodeRequire("uiohook-napi") as UiohookModuleNamespace; + const defaultExport = moduleExports.default; + + if (moduleExports.uIOhook) { + return moduleExports.uIOhook; + } + + if (moduleExports.uiohook) { + return moduleExports.uiohook; + } + + if (moduleExports.Uiohook) { + return moduleExports.Uiohook; + } + + if (isUiohookLike(defaultExport)) { + return defaultExport; + } + + if (defaultExport?.uIOhook) { + return defaultExport.uIOhook; + } + + if (defaultExport?.uiohook) { + return defaultExport.uiohook; + } + + if (defaultExport?.Uiohook) { + return defaultExport.Uiohook; + } + + return null; +} + +export async function startInteractionCapture() { + if (!isCursorCaptureActive) { + return; + } + + if (!["darwin", "win32", "linux"].includes(process.platform)) { + return; + } + + try { + const hook = loadUiohookModule(); + console.log( + "[CursorTelemetry] hook loaded:", + !!hook, + "has.on:", + typeof hook?.on, + "has.start:", + typeof hook?.start, + ); + if (!isCursorCaptureActive) { + return; + } + + if (!hook || typeof hook.on !== "function" || typeof hook.start !== "function") { + console.log("[CursorTelemetry] hook unusable — aborting interaction capture"); + return; + } + + const onMouseDown = (event: HookMouseEvent) => { + if (!isCursorCaptureActive) { + return; + } + + const point = getNormalizedCursorPoint(); + if (!point) { + return; + } + + const timeMs = Date.now() - cursorCaptureStartTimeMs; + const button = getHookMouseButton(event); + let interactionType: CursorInteractionType = "click"; + + if (button === 2) { + interactionType = "right-click"; + } else if (button === 3) { + interactionType = "middle-click"; + } else { + const thresholdMs = 350; + const distance = lastLeftClick + ? Math.hypot(point.cx - lastLeftClick.cx, point.cy - lastLeftClick.cy) + : Number.POSITIVE_INFINITY; + + if ( + lastLeftClick && + timeMs - lastLeftClick.timeMs <= thresholdMs && + distance <= 0.04 + ) { + interactionType = "double-click"; + } + + setLastLeftClick({ timeMs, cx: point.cx, cy: point.cy }); + } + + pushCursorSample(point.cx, point.cy, timeMs, interactionType); + }; + + const onMouseUp = () => { + if (!isCursorCaptureActive) { + return; + } + + const point = getNormalizedCursorPoint(); + if (!point) { + return; + } + + const timeMs = Date.now() - cursorCaptureStartTimeMs; + pushCursorSample(point.cx, point.cy, timeMs, "mouseup"); + }; + + const onMouseMove = (event: HookMouseEvent) => { + if (process.platform !== "linux" || !isCursorCaptureActive) { + return; + } + + const point = getHookCursorScreenPoint(event); + if (!point) { + return; + } + + setLinuxCursorScreenPoint({ x: point.x, y: point.y, updatedAt: Date.now() }); + }; + + hook.on("mousedown", onMouseDown); + hook.on("mouseup", onMouseUp); + hook.on("mousemove", onMouseMove); + + hook.start(); + + setInteractionCaptureCleanup(() => { + try { + if (typeof hook.off === "function") { + hook.off("mousedown", onMouseDown); + hook.off("mouseup", onMouseUp); + hook.off("mousemove", onMouseMove); + } else if (typeof hook.removeListener === "function") { + hook.removeListener("mousedown", onMouseDown); + hook.removeListener("mouseup", onMouseUp); + hook.removeListener("mousemove", onMouseMove); + } + } catch { + // ignore listener cleanup errors + } + + try { + if (typeof hook.stop === "function") { + hook.stop(); + } + } catch { + // ignore hook shutdown errors + } + }); + } catch (error) { + if (!hasLoggedInteractionHookFailure) { + setHasLoggedInteractionHookFailure(true); + console.warn("[CursorTelemetry] Global interaction capture unavailable:", error); + } + } +} diff --git a/electron/ipc/cursor/monitor.ts b/electron/ipc/cursor/monitor.ts new file mode 100644 index 00000000..4943e523 --- /dev/null +++ b/electron/ipc/cursor/monitor.ts @@ -0,0 +1,136 @@ +import { spawn } from "node:child_process"; +import { constants as fsConstants } from "node:fs"; +import fs from "node:fs/promises"; +import type { CursorVisualType } from "../types"; +import { + currentCursorVisualType, + nativeCursorMonitorOutputBuffer, + nativeCursorMonitorProcess, + setCurrentCursorVisualType, + setNativeCursorMonitorOutputBuffer, + setNativeCursorMonitorProcess, +} from "../state"; +import { getCursorMonitorExePath, ensureNativeCursorMonitorBinary } from "../paths/binaries"; + +export function emitCursorStateChanged(cursorType: CursorVisualType) { + const { BrowserWindow } = require("electron") as typeof import("electron"); + BrowserWindow.getAllWindows().forEach((window) => { + if (!window.isDestroyed()) { + window.webContents.send("cursor-state-changed", { cursorType }); + } + }); +} + +export function handleCursorMonitorStdout(chunk: Buffer) { + setNativeCursorMonitorOutputBuffer(nativeCursorMonitorOutputBuffer + chunk.toString()); + const lines = nativeCursorMonitorOutputBuffer.split(/\r?\n/); + setNativeCursorMonitorOutputBuffer(lines.pop() ?? ""); + + for (const line of lines) { + const match = line.match(/^STATE:(.+)$/); + if (!match) continue; + const next = match[1].trim() as CursorVisualType; + if ( + next === "arrow" || + next === "text" || + next === "pointer" || + next === "crosshair" || + next === "open-hand" || + next === "closed-hand" || + next === "resize-ew" || + next === "resize-ns" || + next === "not-allowed" + ) { + if (currentCursorVisualType !== next) { + setCurrentCursorVisualType(next); + // sampleCursorStateChange is called from cursor/telemetry.ts via the handler + emitCursorStateChanged(next); + } + } + } +} + +export function stopNativeCursorMonitor() { + setCurrentCursorVisualType("arrow"); + + if (!nativeCursorMonitorProcess) { + return; + } + + try { + nativeCursorMonitorProcess.stdin.write("stop\n"); + } catch { + // ignore stop signal issues + } + try { + nativeCursorMonitorProcess.kill(); + } catch { + // ignore kill issues + } + + setNativeCursorMonitorProcess(null); + setNativeCursorMonitorOutputBuffer(""); +} + +export async function startNativeCursorMonitor() { + stopNativeCursorMonitor(); + + if (process.platform !== "darwin" && process.platform !== "win32") { + setCurrentCursorVisualType("arrow"); + return; + } + + try { + let helperPath: string; + if (process.platform === "win32") { + helperPath = getCursorMonitorExePath(); + try { + // Use F_OK on Windows — X_OK is meaningless and can give false positives + await fs.access(helperPath, fsConstants.F_OK); + } catch { + console.warn("Windows cursor monitor helper missing:", helperPath); + setCurrentCursorVisualType("arrow"); + return; + } + } else { + helperPath = await ensureNativeCursorMonitorBinary(); + } + + setNativeCursorMonitorOutputBuffer(""); + setCurrentCursorVisualType("arrow"); + + let proc: ReturnType | null; + try { + proc = spawn(helperPath, [], { + stdio: ["pipe", "pipe", "pipe"], + }); + } catch (spawnError) { + console.warn("Failed to spawn cursor monitor:", spawnError); + setNativeCursorMonitorProcess(null); + setCurrentCursorVisualType("arrow"); + return; + } + + setNativeCursorMonitorProcess(proc as Parameters[0]); + + proc.once("error", (error) => { + console.warn("Native cursor monitor process error:", error); + setNativeCursorMonitorProcess(null); + setNativeCursorMonitorOutputBuffer(""); + setCurrentCursorVisualType("arrow"); + }); + + if (proc.stdout) proc.stdout.on("data", handleCursorMonitorStdout); + + proc.once("close", () => { + setNativeCursorMonitorProcess(null); + setNativeCursorMonitorOutputBuffer(""); + setCurrentCursorVisualType("arrow"); + }); + } catch (error) { + console.warn("Failed to start native cursor monitor:", error); + setNativeCursorMonitorProcess(null); + setNativeCursorMonitorOutputBuffer(""); + setCurrentCursorVisualType("arrow"); + } +} diff --git a/electron/ipc/cursor/telemetry.ts b/electron/ipc/cursor/telemetry.ts new file mode 100644 index 00000000..f7eb5eaf --- /dev/null +++ b/electron/ipc/cursor/telemetry.ts @@ -0,0 +1,173 @@ +import fs from "node:fs/promises"; +import { getTelemetryPathForVideo, getScreen } from "../utils"; +import { + CURSOR_TELEMETRY_VERSION, + MAX_CURSOR_SAMPLES, + CURSOR_SAMPLE_INTERVAL_MS, +} from "../constants"; +import type { CursorVisualType, CursorInteractionType, CursorTelemetryPoint } from "../types"; +import { + cursorCaptureInterval, + setCursorCaptureInterval, + cursorCaptureStartTimeMs, + activeCursorSamples, + pendingCursorSamples, + setPendingCursorSamples, + isCursorCaptureActive, + currentCursorVisualType, + linuxCursorScreenPoint, + selectedSource, + selectedWindowBounds, +} from "../state"; + +export function clamp(value: number, min: number, max: number) { + return Math.min(max, Math.max(min, value)); +} + +export function stopCursorCapture() { + if (cursorCaptureInterval) { + clearInterval(cursorCaptureInterval); + setCursorCaptureInterval(null); + } +} + +export function getNormalizedCursorPoint() { + const fallbackCursor = getScreen().getCursorScreenPoint(); + const linuxCursorCache = process.platform === "linux" ? linuxCursorScreenPoint : null; + const isLinuxCacheFresh = !!linuxCursorCache && Date.now() - linuxCursorCache.updatedAt <= 1000; + + const primarySf = + process.platform !== "darwin" ? getScreen().getPrimaryDisplay().scaleFactor || 1 : 1; + + const cursor = isLinuxCacheFresh + ? { x: linuxCursorCache.x / primarySf, y: linuxCursorCache.y / primarySf } + : fallbackCursor; + + const windowBounds = selectedSource?.id?.startsWith("window:") ? selectedWindowBounds : null; + if (windowBounds) { + const sf = + process.platform !== "darwin" + ? getScreen().getDisplayNearestPoint({ + x: windowBounds.x / primarySf, + y: windowBounds.y / primarySf, + }).scaleFactor || 1 + : 1; + const width = Math.max(1, windowBounds.width / sf); + const height = Math.max(1, windowBounds.height / sf); + + return { + cx: clamp((cursor.x - windowBounds.x / sf) / width, 0, 1), + cy: clamp((cursor.y - windowBounds.y / sf) / height, 0, 1), + }; + } + + const sourceDisplayId = Number(selectedSource?.display_id); + const sourceDisplay = Number.isFinite(sourceDisplayId) + ? (getScreen() + .getAllDisplays() + .find((display) => display.id === sourceDisplayId) ?? null) + : null; + const display = sourceDisplay ?? getScreen().getDisplayNearestPoint(cursor); + const bounds = display.bounds; + const width = Math.max(1, bounds.width); + const height = Math.max(1, bounds.height); + + const cx = clamp((cursor.x - bounds.x) / width, 0, 1); + const cy = clamp((cursor.y - bounds.y) / height, 0, 1); + return { cx, cy }; +} + +export function getHookCursorScreenPoint( + event: { x?: number; y?: number; data?: { x?: number; y?: number; screenX?: number; screenY?: number }; screenX?: number; screenY?: number } | null | undefined, +): { x: number; y: number } | null { + const rawX = event?.x ?? event?.data?.x ?? event?.screenX ?? event?.data?.screenX; + const rawY = event?.y ?? event?.data?.y ?? event?.screenY ?? event?.data?.screenY; + + if ( + typeof rawX !== "number" || + !Number.isFinite(rawX) || + typeof rawY !== "number" || + !Number.isFinite(rawY) + ) { + return null; + } + + return { x: rawX, y: rawY }; +} + +export function pushCursorSample( + cx: number, + cy: number, + timeMs: number, + interactionType: CursorInteractionType = "move", + cursorType?: CursorVisualType, +) { + activeCursorSamples.push({ + timeMs: Math.max(0, timeMs), + cx, + cy, + interactionType, + cursorType: cursorType ?? currentCursorVisualType, + } as CursorTelemetryPoint); + + if (activeCursorSamples.length > MAX_CURSOR_SAMPLES) { + activeCursorSamples.shift(); + } +} + +export function sampleCursorPoint() { + const point = getNormalizedCursorPoint(); + if (!point) { + return; + } + + pushCursorSample(point.cx, point.cy, Date.now() - cursorCaptureStartTimeMs, "move"); +} + +export async function persistPendingCursorTelemetry(videoPath: string) { + const telemetryPath = getTelemetryPathForVideo(videoPath); + if (pendingCursorSamples.length > 0) { + await fs.writeFile( + telemetryPath, + JSON.stringify( + { version: CURSOR_TELEMETRY_VERSION, samples: pendingCursorSamples }, + null, + 2, + ), + "utf-8", + ); + } + setPendingCursorSamples([]); +} + +export function snapshotCursorTelemetryForPersistence() { + if (activeCursorSamples.length === 0) { + return; + } + + if (pendingCursorSamples.length === 0) { + setPendingCursorSamples([...activeCursorSamples]); + return; + } + + const lastPendingTimeMs = pendingCursorSamples[pendingCursorSamples.length - 1]?.timeMs ?? -1; + setPendingCursorSamples([ + ...pendingCursorSamples, + ...activeCursorSamples.filter((sample) => sample.timeMs > lastPendingTimeMs), + ]); +} + +export function startCursorSampling() { + stopCursorCapture(); + setCursorCaptureInterval( + setInterval(() => { + if (isCursorCaptureActive) { + sampleCursorPoint(); + } + }, CURSOR_SAMPLE_INTERVAL_MS), + ); +} + +// Re-export for consumers that use it from this module +export { getTelemetryPathForVideo } from "../utils"; +export { CURSOR_SAMPLE_INTERVAL_MS } from "../constants"; diff --git a/electron/ipc/export/native-video.ts b/electron/ipc/export/native-video.ts new file mode 100644 index 00000000..ccfe1486 --- /dev/null +++ b/electron/ipc/export/native-video.ts @@ -0,0 +1,463 @@ +import type { ChildProcessByStdio } from "node:child_process"; +import { execFile, spawn } from "node:child_process"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { promisify } from "node:util"; +import type { Readable, Writable } from "node:stream"; +import { app } from "electron"; +import type { WebContents } from "electron"; +import { getFfmpegBinaryPath } from "../ffmpeg/binary"; +import { buildTrimmedSourceAudioFilter, getEditedAudioExtension, getNativeVideoInputByteSize, getPreferredNativeVideoEncoders, buildNativeVideoExportArgs, parseAvailableFfmpegEncoders } from "../nativeVideoExport"; +import type { NativeExportEncodingMode, NativeVideoExportFinishOptions } from "../nativeVideoExport"; +import { cachedNativeVideoEncoder, setCachedNativeVideoEncoder } from "../state"; + +const execFileAsync = promisify(execFile); + +export type NativeVideoExportSession = { + ffmpegProcess: ChildProcessByStdio; + outputPath: string; + inputByteSize: number; + inputMode: "rawvideo" | "h264-stream"; + maxQueuedWriteBytes: number; + stderrOutput: string; + encoderName: string; + processError: Error | null; + stdinError: Error | null; + terminating: boolean; + writeSequence: Promise; + completionPromise: Promise; + sender: WebContents | null; + pendingWriteRequestIds: Set; +}; + +export const nativeVideoExportSessions = new Map(); + +export function cleanupNativeVideoExportSessions() { + for (const [sessionId, session] of nativeVideoExportSessions) { + session.terminating = true; + try { + if (!session.ffmpegProcess.stdin.destroyed) { + session.ffmpegProcess.stdin.destroy(); + } + } catch { + /* stream may already be closed */ + } + try { + session.ffmpegProcess.kill("SIGKILL"); + } catch { + /* process may already be exited */ + } + nativeVideoExportSessions.delete(sessionId); + } +} + +export function getNativeVideoExportMaxQueuedWriteBytes(inputByteSize: number) { + if (inputByteSize === 0) return 8 * 1024 * 1024; + return Math.min(64 * 1024 * 1024, Math.max(16 * 1024 * 1024, inputByteSize * 4)); +} + +export function isHardwareAcceleratedVideoEncoder(encoderName: string) { + return /(videotoolbox|nvenc|qsv|amf|mf)/i.test(encoderName); +} + +export async function removeTemporaryExportFile(filePath: string | null | undefined) { + if (!filePath) { + return; + } + + try { + await fs.rm(filePath, { force: true }); + } catch { + // Ignore cleanup failures for temp export artifacts. + } +} + +export function getNativeVideoExportSessionError(session: NativeVideoExportSession, fallback: string) { + return ( + session.stdinError?.message || + session.processError?.message || + session.stderrOutput.trim() || + fallback + ); +} + +export function sendNativeVideoExportWriteFrameResult( + sender: WebContents | null | undefined, + sessionId: string, + requestId: number, + result: { success: boolean; error?: string }, +) { + if (!sender || sender.isDestroyed()) { + return; + } + + sender.send("native-video-export-write-frame-result", { + sessionId, + requestId, + ...result, + }); +} + +export function settleNativeVideoExportWriteFrameRequest( + sessionId: string, + session: NativeVideoExportSession, + requestId: number, + result: { success: boolean; error?: string }, +) { + session.pendingWriteRequestIds.delete(requestId); + sendNativeVideoExportWriteFrameResult(session.sender, sessionId, requestId, result); +} + +export function flushNativeVideoExportPendingWriteRequests( + sessionId: string, + session: NativeVideoExportSession, + error: string, +) { + for (const requestId of session.pendingWriteRequestIds) { + sendNativeVideoExportWriteFrameResult(session.sender, sessionId, requestId, { + success: false, + error, + }); + } + + session.pendingWriteRequestIds.clear(); +} + +export function isIgnorableNativeVideoExportStreamError(error: Error | null | undefined): boolean { + if (!error) { + return false; + } + + const errno = error as NodeJS.ErrnoException; + return ( + errno.code === "EPIPE" || + errno.code === "ERR_STREAM_DESTROYED" || + /broken pipe|stream destroyed|eof/i.test(error.message) + ); +} + +export async function waitForNativeVideoExportDrain(session: NativeVideoExportSession) { + if ( + session.stdinError || + session.processError || + session.ffmpegProcess.stdin.destroyed || + session.ffmpegProcess.stdin.writableEnded || + !session.ffmpegProcess.stdin.writable || + session.ffmpegProcess.stdin.writableLength <= 0 + ) { + return; + } + + await new Promise((resolve, reject) => { + const timeout = setTimeout(() => { + cleanup(); + reject( + new Error("Timed out while waiting for native export writer backpressure to clear"), + ); + }, 15000); + + const cleanup = () => { + clearTimeout(timeout); + session.ffmpegProcess.stdin.off("drain", handleDrain); + session.ffmpegProcess.stdin.off("error", handleError); + session.ffmpegProcess.off("close", handleClose); + }; + + const handleDrain = () => { + cleanup(); + resolve(); + }; + + const handleError = (error: Error) => { + cleanup(); + reject(error); + }; + + const handleClose = () => { + cleanup(); + reject( + new Error( + getNativeVideoExportSessionError( + session, + "Native video export writer closed before draining", + ), + ), + ); + }; + + session.ffmpegProcess.stdin.once("drain", handleDrain); + session.ffmpegProcess.stdin.once("error", handleError); + session.ffmpegProcess.once("close", handleClose); + }); +} + +export function getNativeVideoExportFrameLength(frameData: Uint8Array | ArrayBuffer) { + return frameData.byteLength; +} + +export async function writeNativeVideoExportFrame( + session: NativeVideoExportSession, + frameData: Uint8Array | ArrayBuffer, +) { + if (session.inputMode !== "h264-stream" && getNativeVideoExportFrameLength(frameData) !== session.inputByteSize) { + throw new Error( + `Native video export expected ${session.inputByteSize} bytes per frame but received ${getNativeVideoExportFrameLength(frameData)}`, + ); + } + + if ( + session.stdinError || + session.processError || + session.ffmpegProcess.stdin.destroyed || + session.ffmpegProcess.stdin.writableEnded || + !session.ffmpegProcess.stdin.writable + ) { + throw new Error( + getNativeVideoExportSessionError( + session, + "Native video export encoder is not accepting frames", + ), + ); + } + + const frameBuffer = + frameData instanceof ArrayBuffer + ? Buffer.from(frameData) + : Buffer.from(frameData.buffer, frameData.byteOffset, frameData.byteLength); + + try { + session.ffmpegProcess.stdin.write(frameBuffer); + } catch (error) { + session.stdinError = error instanceof Error ? error : new Error(String(error)); + throw session.stdinError; + } + + if (session.ffmpegProcess.stdin.writableLength >= session.maxQueuedWriteBytes) { + try { + await waitForNativeVideoExportDrain(session); + } catch (error) { + session.stdinError = error instanceof Error ? error : new Error(String(error)); + throw session.stdinError; + } + } +} + +export async function enqueueNativeVideoExportFrameWrite( + session: NativeVideoExportSession, + frameData: Uint8Array | ArrayBuffer, +) { + const writePromise = session.writeSequence.then(async () => { + if (session.terminating) { + throw new Error("Native video export session was cancelled"); + } + + await writeNativeVideoExportFrame(session, frameData); + }); + + session.writeSequence = writePromise.catch(() => undefined); + await writePromise; +} + +export async function getAvailableNativeVideoEncoders(ffmpegPath: string) { + const { stdout } = await execFileAsync(ffmpegPath, ["-hide_banner", "-encoders"], { + timeout: 15000, + maxBuffer: 20 * 1024 * 1024, + }); + + return parseAvailableFfmpegEncoders(stdout); +} + +export async function probeNativeVideoEncoder( + ffmpegPath: string, + encoderName: string, + encodingMode: NativeExportEncodingMode, +) { + const outputPath = path.join( + app.getPath("temp"), + `recordly-export-probe-${Date.now()}-${Math.random().toString(36).slice(2, 8)}.mp4`, + ); + const args = buildNativeVideoExportArgs( + encoderName, + { + width: 64, + height: 64, + frameRate: 1, + bitrate: 1_500_000, + encodingMode, + }, + outputPath, + ); + + return new Promise((resolve) => { + const process = spawn(ffmpegPath, args, { + stdio: ["pipe", "ignore", "pipe"], + }); + let stderrOutput = ""; + const timeout = setTimeout(() => { + try { + process.kill("SIGKILL"); + } catch { + // ignore + } + resolve(false); + }, 15000); + + process.stderr.on("data", (chunk: Buffer) => { + stderrOutput += chunk.toString(); + }); + + process.on("close", (code) => { + clearTimeout(timeout); + void removeTemporaryExportFile(outputPath); + if (code !== 0 && stderrOutput.trim().length > 0) { + console.warn( + `[native-export] Encoder probe failed for ${encoderName}:`, + stderrOutput.trim(), + ); + } + resolve(code === 0); + }); + + process.stdin.end(Buffer.alloc(getNativeVideoInputByteSize(64, 64), 0)); + }); +} + +export async function resolveNativeVideoEncoder( + ffmpegPath: string, + encodingMode: NativeExportEncodingMode, +) { + if (cachedNativeVideoEncoder?.ffmpegPath === ffmpegPath) { + return cachedNativeVideoEncoder.encoderName; + } + + const availableEncoders = await getAvailableNativeVideoEncoders(ffmpegPath); + const candidates = [ + ...new Set([...getPreferredNativeVideoEncoders(process.platform), "libx264"]), + ]; + + for (const encoderName of candidates) { + if (!availableEncoders.has(encoderName)) { + continue; + } + + if (await probeNativeVideoEncoder(ffmpegPath, encoderName, encodingMode)) { + setCachedNativeVideoEncoder({ ffmpegPath, encoderName }); + return encoderName; + } + } + + throw new Error("No usable FFmpeg encoder was available for native export"); +} + +export async function muxNativeVideoExportAudio( + videoPath: string, + options: NativeVideoExportFinishOptions, +) { + const audioMode = options.audioMode ?? "none"; + if (audioMode === "none") { + return videoPath; + } + + const ffmpegPath = getFfmpegBinaryPath(); + const tempArtifacts: string[] = []; + let audioInputPath = options.audioSourcePath ?? null; + + if (audioMode === "edited-track") { + if (!options.editedAudioData) { + throw new Error("Edited audio data is missing for native export"); + } + + const extension = getEditedAudioExtension(options.editedAudioMimeType); + audioInputPath = path.join( + app.getPath("temp"), + `recordly-export-audio-${Date.now()}-${Math.random().toString(36).slice(2, 8)}${extension}`, + ); + await fs.writeFile(audioInputPath, Buffer.from(options.editedAudioData)); + tempArtifacts.push(audioInputPath); + } + + if (!audioInputPath) { + return videoPath; + } + + const outputPath = path.join( + path.dirname(videoPath), + `${path.basename(videoPath, path.extname(videoPath))}-final.mp4`, + ); + + const args = [ + "-y", + "-hide_banner", + "-loglevel", + "error", + "-i", + videoPath, + "-i", + audioInputPath, + ]; + + if (audioMode === "trim-source") { + const filter = buildTrimmedSourceAudioFilter(options.trimSegments ?? []); + if (filter) { + args.push("-filter_complex", filter, "-map", "0:v:0", "-map", "[aout]"); + } else { + args.push("-map", "0:v:0", "-map", "1:a:0"); + } + } else { + args.push("-map", "0:v:0", "-map", "1:a:0"); + } + + args.push( + "-c:v", + "copy", + "-c:a", + "aac", + "-b:a", + "192k", + "-shortest", + "-movflags", + "+faststart", + outputPath, + ); + + try { + await execFileAsync(ffmpegPath, args, { + timeout: 15 * 60 * 1000, + maxBuffer: 20 * 1024 * 1024, + }); + await removeTemporaryExportFile(videoPath); + return outputPath; + } finally { + await Promise.allSettled( + tempArtifacts.map((artifactPath) => removeTemporaryExportFile(artifactPath)), + ); + } +} + +export async function muxExportedVideoAudioBuffer( + videoData: ArrayBuffer, + options: NativeVideoExportFinishOptions, +) { + const tempVideoPath = path.join( + app.getPath("temp"), + `recordly-export-video-${Date.now()}-${Math.random().toString(36).slice(2, 8)}.mp4`, + ); + + try { + await fs.writeFile(tempVideoPath, Buffer.from(videoData)); + const finalizedPath = await muxNativeVideoExportAudio(tempVideoPath, options); + const muxedData = await fs.readFile(finalizedPath); + return new Uint8Array(muxedData); + } finally { + await Promise.allSettled([ + removeTemporaryExportFile(tempVideoPath), + removeTemporaryExportFile(`${tempVideoPath}.muxed.mp4`), + removeTemporaryExportFile( + path.join( + path.dirname(tempVideoPath), + `${path.basename(tempVideoPath, path.extname(tempVideoPath))}-final.mp4`, + ), + ), + ]); + } +} diff --git a/electron/ipc/ffmpeg/binary.ts b/electron/ipc/ffmpeg/binary.ts new file mode 100644 index 00000000..095db5a8 --- /dev/null +++ b/electron/ipc/ffmpeg/binary.ts @@ -0,0 +1,76 @@ +import { spawnSync } from "node:child_process"; +import { existsSync } from "node:fs"; +import { createRequire } from "node:module"; +import { app } from "electron"; + +const nodeRequire = createRequire(import.meta.url); + +export function loadFfmpegStatic(): string | null { + const moduleExports = nodeRequire("ffmpeg-static"); + if (typeof moduleExports === "string") { + return moduleExports; + } + + if (typeof moduleExports?.default === "string") { + return moduleExports.default as string; + } + + return null; +} + +export function resolveSystemFfmpegBinaryPath(): string | null { + const locator = process.platform === "win32" ? "where" : "which"; + const result = spawnSync(locator, ["ffmpeg"], { + encoding: "utf-8", + windowsHide: true, + }); + + if (result.status === 0) { + const candidate = result.stdout + .split(/\r?\n/) +.map((line: string) => line.trim()) + .find((line: string) => line.length > 0); + + if (candidate) { + return candidate; + } + } + + // Fallback: check common install paths directly (Electron's shell may lack full PATH) + if (process.platform !== "win32") { + const commonPaths = [ + "/opt/homebrew/bin/ffmpeg", + "/usr/local/bin/ffmpeg", + "/usr/bin/ffmpeg", + ]; + for (const p of commonPaths) { + if (existsSync(p)) { + return p; + } + } + } + + return null; +} + +export function getFfmpegBinaryPath(): string { + const ffmpegStatic = loadFfmpegStatic(); + if (ffmpegStatic && typeof ffmpegStatic === "string") { + const bundledPath = app.isPackaged + ? ffmpegStatic.replace(/\.asar([/\\])/, ".asar.unpacked$1") + : ffmpegStatic; + + if (existsSync(bundledPath)) { + return bundledPath; + } + } + + const systemFfmpeg = resolveSystemFfmpegBinaryPath(); + if (systemFfmpeg) { + return systemFfmpeg; + } + + throw new Error( + "FFmpeg binary is unavailable. Install ffmpeg-static for this platform or make ffmpeg available on PATH.", + ); +} diff --git a/electron/ipc/ffmpeg/filters.ts b/electron/ipc/ffmpeg/filters.ts new file mode 100644 index 00000000..dccbd8b4 --- /dev/null +++ b/electron/ipc/ffmpeg/filters.ts @@ -0,0 +1,194 @@ +import type { AudioSyncAdjustment, PauseSegment } from "../types"; + +export function buildAtempoFilters(tempoRatio: number): string[] { + if (!Number.isFinite(tempoRatio) || tempoRatio <= 0) { + return []; + } + + const filters: string[] = []; + let remaining = tempoRatio; + + while (remaining < 0.5) { + filters.push("atempo=0.5"); + remaining /= 0.5; + } + + while (remaining > 2) { + filters.push("atempo=2.0"); + remaining /= 2.0; + } + + if (Math.abs(remaining - 1) > 0.0005) { + filters.push(`atempo=${remaining.toFixed(6)}`); + } + + return filters; +} + +export function getAudioSyncAdjustment( + videoDuration: number, + audioDuration: number, +): AudioSyncAdjustment { + if ( + !Number.isFinite(videoDuration) || + !Number.isFinite(audioDuration) || + videoDuration <= 0 || + audioDuration <= 0 + ) { + return { mode: "none", delayMs: 0, tempoRatio: 1, durationDeltaMs: 0 }; + } + + const durationDeltaMs = Math.round((videoDuration - audioDuration) * 1000); + const absDeltaMs = Math.abs(durationDeltaMs); + if (absDeltaMs <= 50) { + return { mode: "none", delayMs: 0, tempoRatio: 1, durationDeltaMs }; + } + + const tempoRatio = Math.max(0.5, Math.min(2, audioDuration / videoDuration)); + const relativeDelta = absDeltaMs / Math.max(videoDuration * 1000, 1); + + if (relativeDelta <= 0.03 || absDeltaMs <= 1500 || durationDeltaMs < 0) { + return { mode: "tempo", delayMs: 0, tempoRatio, durationDeltaMs }; + } + + return { mode: "delay", delayMs: durationDeltaMs, tempoRatio: 1, durationDeltaMs }; +} + +export function appendSyncedAudioFilter( + filterParts: string[], + inputLabel: string, + outputLabel: string, + adjustment: AudioSyncAdjustment, +) { + const filters: string[] = []; + + if (adjustment.mode === "delay" && adjustment.delayMs > 0) { + filters.push(`adelay=${adjustment.delayMs}|${adjustment.delayMs}`); + } + + if (adjustment.mode === "tempo") { + filters.push(...buildAtempoFilters(adjustment.tempoRatio)); + } + + filters.push("aresample=async=1:first_pts=0", "asetpts=PTS-STARTPTS"); + filterParts.push(`${inputLabel}${filters.join(",")}[${outputLabel}]`); +} + +export function formatFfmpegSeconds(milliseconds: number): string { + return (milliseconds / 1000).toFixed(3); +} + +export function normalizePauseSegments( + pauseSegments: PauseSegment[] | undefined, +): PauseSegment[] { + if (!Array.isArray(pauseSegments) || pauseSegments.length === 0) { + return []; + } + + const normalized = pauseSegments + .map((segment) => { + const startMs = Number(segment?.startMs); + const endMs = Number(segment?.endMs); + + if (!Number.isFinite(startMs) || !Number.isFinite(endMs)) { + return null; + } + + const clampedStart = Math.max(0, Math.round(startMs)); + const clampedEnd = Math.max(0, Math.round(endMs)); + if (clampedEnd <= clampedStart) { + return null; + } + + return { startMs: clampedStart, endMs: clampedEnd }; + }) + .filter((segment): segment is PauseSegment => !!segment) + .sort((left, right) => left.startMs - right.startMs); + + if (normalized.length <= 1) { + return normalized; + } + + const merged: PauseSegment[] = [{ ...normalized[0] }]; + + for (const segment of normalized.slice(1)) { + const previous = merged[merged.length - 1]; + if (segment.startMs <= previous.endMs) { + previous.endMs = Math.max(previous.endMs, segment.endMs); + } else { + merged.push({ ...segment }); + } + } + + return merged; +} + +export function buildPausedAudioFilter( + inputLabel: string, + outputLabel: string, + pauseSegments: PauseSegment[], +): string | null { + if (pauseSegments.length === 0) { + return null; + } + + const activeSegments: Array<{ startMs: number; endMs?: number }> = []; + let cursorMs = 0; + + for (const pauseSegment of pauseSegments) { + if (pauseSegment.startMs > cursorMs) { + activeSegments.push({ startMs: cursorMs, endMs: pauseSegment.startMs }); + } + cursorMs = Math.max(cursorMs, pauseSegment.endMs); + } + + activeSegments.push({ startMs: cursorMs }); + + const filterParts: string[] = []; + const segmentLabels: string[] = []; + + activeSegments.forEach((segment, index) => { + if (typeof segment.endMs === "number" && segment.endMs <= segment.startMs) { + return; + } + + const segmentLabel = `${outputLabel}_part${index}`; + const trimArgs = + typeof segment.endMs === "number" + ? `start=${formatFfmpegSeconds(segment.startMs)}:end=${formatFfmpegSeconds(segment.endMs)}` + : `start=${formatFfmpegSeconds(segment.startMs)}`; + + filterParts.push(`[${inputLabel}]atrim=${trimArgs},asetpts=PTS-STARTPTS[${segmentLabel}]`); + segmentLabels.push(`[${segmentLabel}]`); + }); + + if (segmentLabels.length === 0) { + return null; + } + + if (segmentLabels.length === 1) { + filterParts.push(`${segmentLabels[0]}anull[${outputLabel}]`); + } else { + filterParts.push( + `${segmentLabels.join("")}concat=n=${segmentLabels.length}:v=0:a=1[${outputLabel}]`, + ); + } + + return filterParts.join(";"); +} + +export function parseFfmpegDurationSeconds(stderr: string): number | null { + const match = stderr.match(/Duration:\s+(\d+):(\d+):(\d+(?:\.\d+)?)/i); + if (!match) { + return null; + } + + const hours = Number(match[1]); + const minutes = Number(match[2]); + const seconds = Number(match[3]); + if (![hours, minutes, seconds].every(Number.isFinite)) { + return null; + } + + return hours * 3600 + minutes * 60 + seconds; +} diff --git a/electron/ipc/handlers.ts b/electron/ipc/handlers.ts index 9e7d56be..db91ccb7 100644 --- a/electron/ipc/handlers.ts +++ b/electron/ipc/handlers.ts @@ -1,14 +1,12 @@ import type { ChildProcessByStdio, ChildProcessWithoutNullStreams } from "node:child_process"; -import { execFile, spawn, spawnSync } from "node:child_process"; -import { createWriteStream, existsSync, constants as fsConstants } from "node:fs"; +import { execFile, spawn } from "node:child_process"; +import { existsSync, constants as fsConstants } from "node:fs"; import fs from "node:fs/promises"; -import { get as httpsGet } from "node:https"; -import { createRequire } from "node:module"; import path from "node:path"; import type { Readable, Writable } from "node:stream"; -import { fileURLToPath, pathToFileURL } from "node:url"; +import { pathToFileURL } from "node:url"; import { promisify } from "node:util"; -import type { SaveDialogOptions, WebContents } from "electron"; +import type { SaveDialogOptions } from "electron"; import { app, BrowserWindow, @@ -24,4333 +22,275 @@ import { closeCountdownWindow, createCountdownWindow, getCountdownWindow } from import { buildNativeH264StreamExportArgs, buildNativeVideoExportArgs, - buildTrimmedSourceAudioFilter, - getEditedAudioExtension, getNativeVideoInputByteSize, - getPreferredNativeVideoEncoders, type NativeExportEncodingMode, type NativeVideoExportFinishOptions, - parseAvailableFfmpegEncoders, } from "./nativeVideoExport"; import { resolveWindowsCaptureDisplay } from "./windowsCaptureSelection"; +import { + PROJECT_FILE_EXTENSION, + LEGACY_PROJECT_FILE_EXTENSIONS, + SHORTCUTS_FILE, + RECORDINGS_SETTINGS_FILE, + COUNTDOWN_SETTINGS_FILE, + ALLOW_RECORDLY_WINDOW_CAPTURE, + CURSOR_SAMPLE_INTERVAL_MS, +} from "./constants"; +import type { + SelectedSource, + NativeMacRecordingOptions, + PauseSegment, + SystemCursorAsset, + CursorTelemetryPoint, +} from "./types"; +import { + selectedSource, + setSelectedSource, + currentProjectPath, + setCurrentProjectPath, + nativeScreenRecordingActive, + setNativeScreenRecordingActive, + currentVideoPath, + setCurrentVideoPath, + currentRecordingSession, + setCurrentRecordingSession, + approvedLocalReadPaths, + nativeCaptureProcess, + setNativeCaptureProcess, + nativeCaptureOutputBuffer, + setNativeCaptureOutputBuffer, + nativeCaptureTargetPath, + setNativeCaptureTargetPath, + setNativeCaptureStopRequested, + nativeCaptureSystemAudioPath, + setNativeCaptureSystemAudioPath, + nativeCaptureMicrophonePath, + setNativeCaptureMicrophonePath, + nativeCapturePaused, + setNativeCapturePaused, + windowsCaptureProcess, + setWindowsCaptureProcess, + windowsCaptureOutputBuffer, + setWindowsCaptureOutputBuffer, + windowsCaptureTargetPath, + setWindowsCaptureTargetPath, + windowsNativeCaptureActive, + setWindowsNativeCaptureActive, + setWindowsCaptureStopRequested, + windowsCapturePaused, + setWindowsCapturePaused, + windowsSystemAudioPath, + setWindowsSystemAudioPath, + windowsMicAudioPath, + setWindowsMicAudioPath, + windowsPendingVideoPath, + setWindowsPendingVideoPath, + lastNativeCaptureDiagnostics, + ffmpegScreenRecordingActive, + setFfmpegScreenRecordingActive, + ffmpegCaptureProcess, + setFfmpegCaptureProcess, + ffmpegCaptureOutputBuffer, + setFfmpegCaptureOutputBuffer, + ffmpegCaptureTargetPath, + setFfmpegCaptureTargetPath, + cachedSystemCursorAssets, + setCachedSystemCursorAssets, + cachedSystemCursorAssetsSourceMtimeMs, + setCachedSystemCursorAssetsSourceMtimeMs, + countdownTimer, + setCountdownTimer, + countdownCancelled, + setCountdownCancelled, + countdownInProgress, + setCountdownInProgress, + countdownRemaining, + setCountdownRemaining, + setCursorCaptureInterval, + setCursorCaptureStartTimeMs, + setActiveCursorSamples, + setPendingCursorSamples, + setIsCursorCaptureActive, + setLastLeftClick, + setLinuxCursorScreenPoint, +} from "./state"; +import { getFfmpegBinaryPath } from "./ffmpeg/binary"; +import { + sendWhisperModelDownloadProgress, + getWhisperSmallModelStatus, + downloadWhisperSmallModel, + deleteWhisperSmallModel, +} from "./captions/whisper"; +import { + getNativeCaptureHelperBinaryPath, + getSystemCursorHelperSourcePath, + getSystemCursorHelperBinaryPath, + ensureSwiftHelperBinary, + getWindowsCaptureExePath, + ensureNativeCaptureHelperBinary, +} from "./paths/binaries"; +import { + stopNativeCursorMonitor, + startNativeCursorMonitor, +} from "./cursor/monitor"; +import { getScreen, normalizePath, normalizeVideoSourcePath, parseWindowId, getTelemetryPathForVideo, isAutoRecordingPath, moveFileWithOverwrite, getRecordingsDir } from "./utils"; +import { recordNativeCaptureDiagnostics, getFileSizeIfPresent, getCompanionAudioFallbackPaths } from "./recording/diagnostics"; +import { getProjectsDir, persistRecordingsDirectorySetting, saveProjectThumbnail, rememberRecentProject, listProjectLibraryEntries, loadProjectFromPath, isAllowedLocalReadPath, rememberApprovedLocalReadPath, replaceApprovedSessionLocalReadPaths, getAssetRootPath } from "./project/manager"; +import { persistRecordingSessionManifest, resolveRecordingSession } from "./project/session"; +import { + nativeVideoExportSessions, + getNativeVideoExportMaxQueuedWriteBytes, + isHardwareAcceleratedVideoEncoder, + removeTemporaryExportFile, + getNativeVideoExportSessionError, + sendNativeVideoExportWriteFrameResult, + settleNativeVideoExportWriteFrameRequest, + flushNativeVideoExportPendingWriteRequests, + isIgnorableNativeVideoExportStreamError, + enqueueNativeVideoExportFrameWrite, + resolveNativeVideoEncoder, + muxNativeVideoExportAudio, + muxExportedVideoAudioBuffer, + type NativeVideoExportSession, +} from "./export/native-video"; +import { generateAutoCaptionsFromVideo } from "./captions/generate"; +import { buildFfmpegCaptureArgs, waitForFfmpegCaptureStart, waitForFfmpegCaptureStop, getDisplayBoundsForSource } from "./recording/ffmpeg"; +import { isNativeWindowsCaptureAvailable, waitForWindowsCaptureStart, waitForWindowsCaptureStop, attachWindowsCaptureLifecycle, muxNativeWindowsVideoWithAudio } from "./recording/windows"; +import { waitForNativeCaptureStart, waitForNativeCaptureStop, muxNativeMacRecordingWithAudio, attachNativeCaptureLifecycle, finalizeStoredVideo, recoverNativeMacCaptureOutput } from "./recording/mac"; +import { clamp, stopCursorCapture, sampleCursorPoint, snapshotCursorTelemetryForPersistence } from "./cursor/telemetry"; +import { getNativeMacWindowSources, stopWindowBoundsCapture, resolveMacWindowBounds, startWindowBoundsCapture, resolveLinuxWindowBounds, resolveWindowsWindowBounds } from "./cursor/bounds"; +import { startInteractionCapture, stopInteractionCapture } from "./cursor/interaction"; + +export { cleanupNativeVideoExportSessions } from "./export/native-video"; -const execFileAsync = promisify(execFile); -const nodeRequire = createRequire(import.meta.url); - -const PROJECT_FILE_EXTENSION = "recordly"; -const LEGACY_PROJECT_FILE_EXTENSIONS = ["openscreen"]; -const PROJECTS_DIRECTORY_NAME = "Projects"; -const PROJECT_THUMBNAIL_SUFFIX = ".preview.png"; -const RECENT_PROJECTS_FILE = path.join(USER_DATA_PATH, "recent-projects.json"); -const MAX_RECENT_PROJECTS = 16; -const SHORTCUTS_FILE = path.join(USER_DATA_PATH, "shortcuts.json"); -const RECORDINGS_SETTINGS_FILE = path.join(USER_DATA_PATH, "recordings-settings.json"); -const COUNTDOWN_SETTINGS_FILE = path.join(USER_DATA_PATH, "countdown-settings.json"); -const AUTO_RECORDING_PREFIX = "recording-"; -const AUTO_RECORDING_RETENTION_COUNT = 20; -const AUTO_RECORDING_MAX_AGE_MS = 14 * 24 * 60 * 60 * 1000; -const ALLOW_RECORDLY_WINDOW_CAPTURE = Boolean(process.env["VITE_DEV_SERVER_URL"]); -const RECORDING_SESSION_MANIFEST_SUFFIX = ".recordly-session.json"; -const WHISPER_MODEL_DOWNLOAD_URL = - "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin"; -const WHISPER_MODEL_DIR = path.join(USER_DATA_PATH, "whisper"); -const WHISPER_SMALL_MODEL_PATH = path.join(WHISPER_MODEL_DIR, "ggml-small.bin"); -const COMPANION_AUDIO_LAYOUTS = [ - { platform: "mac" as const, systemSuffix: ".system.m4a", micSuffix: ".mic.m4a" }, - { platform: "win" as const, systemSuffix: ".system.wav", micSuffix: ".mic.wav" }, - { platform: "mac" as const, systemSuffix: ".system.webm", micSuffix: ".mic.webm" }, -]; - -function getAssetRootPath() { - if (app.isPackaged) { - return path.join(process.resourcesPath, "assets"); - } - - return path.join(app.getAppPath(), "public"); -} - -function getScreen() { - if (!app.isReady()) { - throw new Error( - "getScreen() called before app is ready. Ensure all screen access happens after app.whenReady().", - ); - } - return nodeRequire("electron").screen as typeof import("electron").screen; -} - -function normalizeRecordingTimeOffsetMs(value: unknown): number { - return typeof value === "number" && Number.isFinite(value) ? Math.round(value) : 0; -} - -function broadcastSelectedSourceChange() { - for (const window of BrowserWindow.getAllWindows()) { - if (!window.isDestroyed()) { - window.webContents.send("selected-source-changed", selectedSource); - } - } -} - -type SelectedSource = { - id?: string; - name: string; - display_id?: string; - sourceType?: "screen" | "window"; - appName?: string; - windowTitle?: string; - [key: string]: unknown; -}; - -type NativeMacRecordingOptions = { - capturesSystemAudio?: boolean; - capturesMicrophone?: boolean; - microphoneDeviceId?: string; - microphoneLabel?: string; -}; - -type WindowBounds = { - x: number; - y: number; - width: number; - height: number; -}; - -type NativeCaptureDiagnostics = { - backend: "windows-wgc" | "mac-screencapturekit" | "browser-store" | "ffmpeg"; - phase: "availability" | "start" | "stop" | "mux"; - timestamp: string; - sourceId?: string | null; - sourceType?: SelectedSource["sourceType"] | "unknown"; - displayId?: number | null; - displayBounds?: WindowBounds | null; - windowHandle?: number | null; - helperPath?: string | null; - outputPath?: string | null; - systemAudioPath?: string | null; - microphonePath?: string | null; - osRelease?: string; - supported?: boolean; - helperExists?: boolean; - fileSizeBytes?: number | null; - processOutput?: string; - error?: string; -}; - -type RecordingSessionData = { - videoPath: string; - webcamPath?: string | null; - timeOffsetMs?: number; -}; - -type PauseSegment = { - startMs: number; - endMs: number; -}; - -type RecordingSessionManifest = { - version: 1 | 2; - videoFileName: string; - webcamFileName?: string | null; - timeOffsetMs?: number; -}; - -type ProjectLibraryEntry = { - path: string; - name: string; - updatedAt: number; - thumbnailPath: string | null; - isCurrent: boolean; - isInProjectsDirectory: boolean; -}; - -let selectedSource: SelectedSource | null = null; -let currentProjectPath: string | null = null; -let nativeScreenRecordingActive = false; -let currentVideoPath: string | null = null; -let currentRecordingSession: RecordingSessionData | null = null; -const approvedLocalReadPaths = new Set(); -function approveUserPath(filePath: string | null | undefined) { - if (!filePath) { - return; - } - - try { - approvedLocalReadPaths.add(path.resolve(filePath)); - } catch { - // Ignore invalid paths; later reads will surface the underlying error. - } -} -let nativeCaptureProcess: ChildProcessWithoutNullStreams | null = null; -let nativeCaptureOutputBuffer = ""; -let nativeCaptureTargetPath: string | null = null; -let nativeCaptureStopRequested = false; -let nativeCaptureSystemAudioPath: string | null = null; -let nativeCaptureMicrophonePath: string | null = null; -let nativeCapturePaused = false; -let nativeCursorMonitorProcess: ChildProcessWithoutNullStreams | null = null; -let nativeCursorMonitorOutputBuffer = ""; -let windowsCaptureProcess: ChildProcessWithoutNullStreams | null = null; -let windowsCaptureOutputBuffer = ""; -let windowsCaptureTargetPath: string | null = null; -let windowsNativeCaptureActive = false; -let windowsCaptureStopRequested = false; -let windowsCapturePaused = false; -let windowsSystemAudioPath: string | null = null; -let windowsMicAudioPath: string | null = null; -let windowsPendingVideoPath: string | null = null; -let lastNativeCaptureDiagnostics: NativeCaptureDiagnostics | null = null; -let ffmpegScreenRecordingActive = false; -let ffmpegCaptureProcess: ChildProcessWithoutNullStreams | null = null; -let ffmpegCaptureOutputBuffer = ""; -let ffmpegCaptureTargetPath: string | null = null; -let customRecordingsDir: string | null = null; -let recordingsDirLoaded = false; -let cachedSystemCursorAssets: Record | null = null; -let cachedSystemCursorAssetsSourceMtimeMs: number | null = null; -let countdownTimer: ReturnType | null = null; -let countdownCancelled = false; -let countdownInProgress = false; -let countdownRemaining: number | null = null; - -type SystemCursorAsset = { - dataUrl: string; - hotspotX: number; - hotspotY: number; - width: number; - height: number; -}; - -type CursorVisualType = - | "arrow" - | "text" - | "pointer" - | "crosshair" - | "open-hand" - | "closed-hand" - | "resize-ew" - | "resize-ns" - | "not-allowed"; - -let currentCursorVisualType: CursorVisualType | undefined = undefined; - -/** Returns the currently selected source ID for setDisplayMediaRequestHandler */ -export function getSelectedSourceId(): string | null { - return (selectedSource?.id as string | null) ?? null; -} - -export function killWindowsCaptureProcess() { - if (windowsCaptureProcess) { - try { - windowsCaptureProcess.kill(); - } catch { - /* ignore */ - } - windowsCaptureProcess = null; - windowsCaptureTargetPath = null; - windowsNativeCaptureActive = false; - nativeScreenRecordingActive = false; - windowsCaptureStopRequested = false; - windowsCapturePaused = false; - windowsSystemAudioPath = null; - windowsMicAudioPath = null; - windowsPendingVideoPath = null; - } -} - -function normalizePath(filePath: string) { - return path.resolve(filePath); -} - -function normalizeDesktopSourceName(value: string) { - return value.trim().replace(/\s+/g, " ").toLowerCase(); -} - -function hasUsableSourceThumbnail( - thumbnail: - | { - isEmpty: () => boolean; - getSize: () => { width: number; height: number }; - } - | null - | undefined, -) { - if (!thumbnail || thumbnail.isEmpty()) { - return false; - } - - const size = thumbnail.getSize(); - return size.width > 1 && size.height > 1; -} - -function getMacPrivacySettingsUrl(pane: "screen" | "accessibility" | "microphone") { - if (pane === "screen") - return "x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture"; - if (pane === "microphone") - return "x-apple.systempreferences:com.apple.preference.security?Privacy_Microphone"; - return "x-apple.systempreferences:com.apple.preference.security?Privacy_Accessibility"; -} - -function isAutoRecordingPath(filePath: string) { - return path.basename(filePath).startsWith(AUTO_RECORDING_PREFIX); -} - -function getTelemetryPathForVideo(videoPath: string) { - return `${videoPath}.cursor.json`; -} - -async function loadRecordingsDirectorySetting() { - if (recordingsDirLoaded) { - return; - } - - recordingsDirLoaded = true; - - try { - const content = await fs.readFile(RECORDINGS_SETTINGS_FILE, "utf-8"); - const parsed = JSON.parse(content) as { recordingsDir?: unknown }; - if (typeof parsed.recordingsDir === "string" && parsed.recordingsDir.trim()) { - customRecordingsDir = path.resolve(parsed.recordingsDir); - } - } catch { - customRecordingsDir = null; - } -} - -async function getRecordingsDir() { - await loadRecordingsDirectorySetting(); - const targetDir = customRecordingsDir ?? RECORDINGS_DIR; - await fs.mkdir(targetDir, { recursive: true }); - return targetDir; -} - -function recordNativeCaptureDiagnostics(diagnostics: Omit) { - lastNativeCaptureDiagnostics = { - timestamp: new Date().toISOString(), - ...diagnostics, - }; - - return lastNativeCaptureDiagnostics; -} - -async function getFileSizeIfPresent(filePath: string | null | undefined) { - if (!filePath) { - return null; - } - - try { - const stat = await fs.stat(filePath); - return stat.size; - } catch { - return null; - } -} - -function parseFfmpegDurationSeconds(stderr: string) { - const match = stderr.match(/Duration:\s+(\d+):(\d+):(\d+(?:\.\d+)?)/i); - if (!match) { - return null; - } - - const hours = Number(match[1]); - const minutes = Number(match[2]); - const seconds = Number(match[3]); - if (![hours, minutes, seconds].every(Number.isFinite)) { - return null; - } - - return hours * 3600 + minutes * 60 + seconds; -} - -type CompanionAudioCandidate = { - platform: (typeof COMPANION_AUDIO_LAYOUTS)[number]["platform"]; - systemPath: string; - micPath: string; - usablePaths: string[]; -}; - -async function getUsableCompanionAudioCandidates( - videoPath: string, -): Promise { - const basePath = videoPath.replace(/\.[^.]+$/u, ""); - const candidates: CompanionAudioCandidate[] = []; - - for (const layout of COMPANION_AUDIO_LAYOUTS) { - const systemPath = `${basePath}${layout.systemSuffix}`; - const micPath = `${basePath}${layout.micSuffix}`; - const usablePaths: string[] = []; - - for (const companionPath of [systemPath, micPath]) { - try { - const stat = await fs.stat(companionPath); - if (stat.size > 0) { - usablePaths.push(companionPath); - } - } catch { - // Missing companion audio is expected for many recordings. - } - } - - if (usablePaths.length > 0) { - candidates.push({ - platform: layout.platform, - systemPath, - micPath, - usablePaths, - }); - } - } - - return candidates; -} - -async function hasEmbeddedAudioStream(videoPath: string) { - const ffmpegPath = getFfmpegBinaryPath(); - let stderr = ""; - - try { - const result = await execFileAsync( - ffmpegPath, - ["-hide_banner", "-i", videoPath, "-map", "0:a:0", "-frames:a", "1", "-f", "null", "-"], - { timeout: 20000, maxBuffer: 10 * 1024 * 1024 }, - ); - stderr = result.stderr; - } catch (error) { - stderr = (error as NodeJS.ErrnoException & { stderr?: string }).stderr ?? ""; - } - - return /Stream #.*Audio:/i.test(stderr); -} - -async function getCompanionAudioFallbackPaths(videoPath: string) { - const companionCandidates = await getUsableCompanionAudioCandidates(videoPath); - if (companionCandidates.length === 0) { - return []; - } - - if (await hasEmbeddedAudioStream(videoPath)) { - return []; - } - - return companionCandidates.flatMap((candidate) => candidate.usablePaths); -} - -async function validateRecordedVideo(videoPath: string) { - const stat = await fs.stat(videoPath); - if (!stat.isFile()) { - throw new Error(`Recorded output is not a file: ${videoPath}`); - } - - if (stat.size <= 0) { - throw new Error(`Recorded output is empty: ${videoPath}`); - } - - const ffmpegPath = getFfmpegBinaryPath(); - let stderr = ""; - - try { - const result = await execFileAsync( - ffmpegPath, - ["-hide_banner", "-i", videoPath, "-map", "0:v:0", "-frames:v", "1", "-f", "null", "-"], - { timeout: 20000, maxBuffer: 10 * 1024 * 1024 }, - ); - stderr = result.stderr; - } catch (error) { - const execError = error as NodeJS.ErrnoException & { stderr?: string }; - const output = execError.stderr?.trim(); - throw new Error(output || `Recorded output could not be decoded: ${videoPath}`); - } - - if (!/Stream #.*Video:/i.test(stderr)) { - throw new Error(`Recorded output does not contain a readable video stream: ${videoPath}`); - } - - const durationSeconds = parseFfmpegDurationSeconds(stderr); - if (durationSeconds !== null && durationSeconds <= 0) { - throw new Error(`Recorded output has an invalid duration: ${videoPath}`); - } - - return { - fileSizeBytes: stat.size, - durationSeconds, - }; -} - -async function getProjectsDir() { - const projectsDir = path.join(await getRecordingsDir(), PROJECTS_DIRECTORY_NAME); - await fs.mkdir(projectsDir, { recursive: true }); - return projectsDir; -} - -async function persistRecordingsDirectorySetting(nextDir: string) { - customRecordingsDir = path.resolve(nextDir); - recordingsDirLoaded = true; - await fs.writeFile( - RECORDINGS_SETTINGS_FILE, - JSON.stringify({ recordingsDir: customRecordingsDir }, null, 2), - "utf-8", - ); -} - -function hasProjectFileExtension(filePath: string) { - const extension = path.extname(filePath).replace(/^\./, "").toLowerCase(); - return [PROJECT_FILE_EXTENSION, ...LEGACY_PROJECT_FILE_EXTENSIONS].includes(extension); -} - -function getProjectThumbnailPath(projectPath: string) { - return `${projectPath}${PROJECT_THUMBNAIL_SUFFIX}`; -} - -async function saveProjectThumbnail(projectPath: string, thumbnailDataUrl?: string | null) { - const thumbnailPath = getProjectThumbnailPath(projectPath); - if (!thumbnailDataUrl) { - await fs.rm(thumbnailPath, { force: true }).catch(() => undefined); - return null; - } - - const match = thumbnailDataUrl.match(/^data:image\/png;base64,(.+)$/); - if (!match) { - throw new Error("Project thumbnail must be a PNG data URL."); - } - - await fs.writeFile(thumbnailPath, Buffer.from(match[1], "base64")); - return thumbnailPath; -} - -async function loadRecentProjectPaths() { - try { - const content = await fs.readFile(RECENT_PROJECTS_FILE, "utf-8"); - const parsed = JSON.parse(content) as { paths?: unknown }; - return Array.isArray(parsed.paths) - ? parsed.paths.filter( - (value): value is string => - typeof value === "string" && value.trim().length > 0, - ) - : []; - } catch { - return []; - } -} - -async function saveRecentProjectPaths(paths: string[]) { - const normalizedPaths = Array.from(new Set(paths.map((value) => normalizePath(value)))).slice( - 0, - MAX_RECENT_PROJECTS, - ); - await fs.writeFile( - RECENT_PROJECTS_FILE, - JSON.stringify({ paths: normalizedPaths }, null, 2), - "utf-8", - ); -} - -async function rememberRecentProject(projectPath: string) { - if (!hasProjectFileExtension(projectPath)) { - return; - } - - const existingPaths = await loadRecentProjectPaths(); - await saveRecentProjectPaths([projectPath, ...existingPaths]); -} - -async function buildProjectLibraryEntry( - projectPath: string, - projectsDir: string, -): Promise { - try { - const normalizedPath = normalizePath(projectPath); - if (!hasProjectFileExtension(normalizedPath)) { - return null; - } - - const stats = await fs.stat(normalizedPath); - if (!stats.isFile()) { - return null; - } - - const thumbnailPath = getProjectThumbnailPath(normalizedPath); - const thumbnailExists = await fs - .access(thumbnailPath, fsConstants.R_OK) - .then(() => true) - .catch(() => false); - - return { - path: normalizedPath, - name: path.basename(normalizedPath).replace(/\.(recordly|openscreen)$/i, ""), - updatedAt: stats.mtimeMs, - thumbnailPath: thumbnailExists ? thumbnailPath : null, - isCurrent: Boolean( - currentProjectPath && normalizePath(currentProjectPath) === normalizedPath, - ), - isInProjectsDirectory: path.dirname(normalizedPath) === normalizePath(projectsDir), - }; - } catch { - return null; - } -} - -async function listProjectLibraryEntries() { - const projectsDir = await getProjectsDir(); - const projectPaths: string[] = []; - - try { - const entries = await fs.readdir(projectsDir, { withFileTypes: true }); - for (const entry of entries) { - if (!entry.isFile()) { - continue; - } - - const entryPath = path.join(projectsDir, entry.name); - if (hasProjectFileExtension(entryPath)) { - projectPaths.push(entryPath); - } - } - } catch { - // Ignore directory read failures and fall back to recent files. - } - - const recentProjectPaths = await loadRecentProjectPaths(); - const candidatePaths = Array.from(new Set([...projectPaths, ...recentProjectPaths])); - const entries = ( - await Promise.all( - candidatePaths.map((candidatePath) => - buildProjectLibraryEntry(candidatePath, projectsDir), - ), - ) - ) - .filter((entry): entry is ProjectLibraryEntry => entry != null) - .sort((left, right) => right.updatedAt - left.updatedAt); - - await saveRecentProjectPaths(entries.map((entry) => entry.path)); - - return { - projectsDir, - entries, - }; -} - -async function loadProjectFromPath(projectPath: string) { - const normalizedPath = normalizePath(projectPath); - const content = await fs.readFile(normalizedPath, "utf-8"); - const project = JSON.parse(content); - const mediaSources = await resolveProjectMediaSources(project); - - if (!mediaSources.success) { - return { - success: false, - canceled: false, - message: mediaSources.message, - }; - } - - currentProjectPath = normalizedPath; - currentVideoPath = mediaSources.videoPath; - const projectObj = project as Record; - const editorObj = projectObj?.editor as Record | undefined; - const audioTracks = editorObj?.audioTracks as { sourcePath?: unknown }[] | undefined; - const approvedProjectPaths: Array = [ - mediaSources.videoPath, - mediaSources.webcamPath, - ]; - if (Array.isArray(audioTracks)) { - for (const track of audioTracks) { - if (typeof track?.sourcePath === "string") { - approvedProjectPaths.push(track.sourcePath); - } - } - } - await replaceApprovedSessionLocalReadPaths(approvedProjectPaths); - currentRecordingSession = { - videoPath: mediaSources.videoPath, - webcamPath: mediaSources.webcamPath, - timeOffsetMs: 0, - }; - await rememberRecentProject(normalizedPath); - - return { - success: true, - path: normalizedPath, - project, - }; -} - -function normalizeVideoSourcePath(videoPath?: string | null): string | null { - if (typeof videoPath !== "string") { - return null; - } - - const trimmed = videoPath.trim(); - if (!trimmed) { - return null; - } - - if (/^file:\/\//i.test(trimmed)) { - try { - return fileURLToPath(trimmed); - } catch { - // Fall through and keep best-effort string path below. - } - } - - return trimmed; -} - -function isPathInsideDirectory(candidatePath: string, directoryPath: string) { - const normalizedDirectoryPath = normalizePath(directoryPath); - return ( - candidatePath === normalizedDirectoryPath || - candidatePath.startsWith(`${normalizedDirectoryPath}${path.sep}`) - ); -} - -function isAllowedLocalReadPath(candidatePath: string) { - const allowedPrefixes = [RECORDINGS_DIR, USER_DATA_PATH, getAssetRootPath(), app.getPath("temp")]; - - return ( - existsSync(candidatePath) || - allowedPrefixes.some((prefix) => isPathInsideDirectory(candidatePath, prefix)) || - approvedLocalReadPaths.has(candidatePath) - ); -} - -async function rememberApprovedLocalReadPath(filePath?: string | null) { - const normalizedPath = normalizeVideoSourcePath(filePath); - if (!normalizedPath) { - return; - } - - const resolvedPath = normalizePath(normalizedPath); - approvedLocalReadPaths.add(resolvedPath); - - try { - approvedLocalReadPaths.add(await fs.realpath(resolvedPath)); - } catch { - // Ignore missing files; the eventual read will surface the real error. - } -} - -async function replaceApprovedSessionLocalReadPaths(filePaths: Array) { - approvedLocalReadPaths.clear(); - await Promise.all(filePaths.map((filePath) => rememberApprovedLocalReadPath(filePath))); -} - -async function resolveProjectMediaSources(project: unknown): Promise< - | { - success: true; - videoPath: string; - webcamPath: string | null; - } - | { - success: false; - message: string; - } -> { - if (!project || typeof project !== "object") { - return { success: false, message: "Invalid project file format" }; - } - - const rawVideoPath = (project as { videoPath?: unknown }).videoPath; - if (typeof rawVideoPath !== "string") { - return { success: false, message: "Project file is missing a video path" }; - } - - const normalizedVideoPath = normalizeVideoSourcePath(rawVideoPath); - if (!normalizedVideoPath) { - return { success: false, message: "Project file is missing a valid video path" }; - } - - try { - await fs.access(normalizedVideoPath, fsConstants.F_OK); - } catch { - return { - success: false, - message: `Project video file not found: ${normalizedVideoPath}`, - }; - } - - const rawWebcamPath = - typeof (project as { editor?: { webcam?: { sourcePath?: unknown } } }).editor?.webcam - ?.sourcePath === "string" - ? ((project as { editor?: { webcam?: { sourcePath?: string } } }).editor?.webcam - ?.sourcePath ?? null) - : null; - const normalizedWebcamPath = normalizeVideoSourcePath(rawWebcamPath); - - if (!normalizedWebcamPath) { - return { - success: true, - videoPath: normalizedVideoPath, - webcamPath: null, - }; - } - - try { - await fs.access(normalizedWebcamPath, fsConstants.F_OK); - return { - success: true, - videoPath: normalizedVideoPath, - webcamPath: normalizedWebcamPath, - }; - } catch { - return { - success: true, - videoPath: normalizedVideoPath, - webcamPath: null, - }; - } -} - -function getRecordingSessionManifestPath(videoPath: string) { - const extension = path.extname(videoPath); - const baseName = path.basename(videoPath, extension); - return path.join(path.dirname(videoPath), `${baseName}${RECORDING_SESSION_MANIFEST_SUFFIX}`); -} - -async function persistRecordingSessionManifest(session: RecordingSessionData): Promise { - const normalizedVideoPath = normalizeVideoSourcePath(session.videoPath); - if (!normalizedVideoPath) { - return; - } - - const normalizedWebcamPath = normalizeVideoSourcePath(session.webcamPath ?? null); - const manifestPath = getRecordingSessionManifestPath(normalizedVideoPath); - - if (!normalizedWebcamPath) { - await fs.rm(manifestPath, { force: true }); - return; - } - - const manifest: RecordingSessionManifest = { - version: 2, - videoFileName: path.basename(normalizedVideoPath), - webcamFileName: path.basename(normalizedWebcamPath), - timeOffsetMs: normalizeRecordingTimeOffsetMs(session.timeOffsetMs), - }; - - await fs.writeFile(manifestPath, JSON.stringify(manifest, null, 2), "utf-8"); -} - -async function resolveRecordingSessionManifest( - videoPath?: string | null, -): Promise { - const normalizedVideoPath = normalizeVideoSourcePath(videoPath); - if (!normalizedVideoPath) { - return null; - } - - const manifestPath = getRecordingSessionManifestPath(normalizedVideoPath); - - try { - const content = await fs.readFile(manifestPath, "utf-8"); - const parsed = JSON.parse(content) as Partial; - if (parsed.version !== 1 && parsed.version !== 2) { - return null; - } - - const webcamFileName = - typeof parsed.webcamFileName === "string" && parsed.webcamFileName.trim() - ? parsed.webcamFileName.trim() - : null; - - if (!webcamFileName) { - return { - videoPath: normalizedVideoPath, - webcamPath: null, - timeOffsetMs: 0, - }; - } - - const webcamPath = path.join(path.dirname(normalizedVideoPath), webcamFileName); - await fs.access(webcamPath, fsConstants.F_OK); - - return { - videoPath: normalizedVideoPath, - webcamPath, - timeOffsetMs: normalizeRecordingTimeOffsetMs(parsed.timeOffsetMs), - }; - } catch { - return null; - } -} - -async function resolveLinkedWebcamPath(videoPath?: string | null): Promise { - const normalizedVideoPath = normalizeVideoSourcePath(videoPath); - if (!normalizedVideoPath) { - return null; - } - - const extension = path.extname(normalizedVideoPath); - const baseName = path.basename(normalizedVideoPath, extension); - if (!baseName || baseName.endsWith("-webcam")) { - return null; - } - - const candidateExtensions = Array.from( - new Set([extension, ".webm", ".mp4", ".mov", ".mkv", ".avi"].filter(Boolean)), - ); - - for (const candidateExtension of candidateExtensions) { - const candidatePath = path.join( - path.dirname(normalizedVideoPath), - `${baseName}-webcam${candidateExtension}`, - ); - - try { - await fs.access(candidatePath, fsConstants.F_OK); - return candidatePath; - } catch { - continue; - } - } - - return null; -} - -async function resolveRecordingSession( - videoPath?: string | null, -): Promise { - const manifestSession = await resolveRecordingSessionManifest(videoPath); - if (manifestSession) { - return manifestSession; - } - - const normalizedVideoPath = normalizeVideoSourcePath(videoPath); - if (!normalizedVideoPath) { - return null; - } - - const linkedWebcamPath = await resolveLinkedWebcamPath(normalizedVideoPath); - return { - videoPath: normalizedVideoPath, - webcamPath: linkedWebcamPath, - }; -} - -async function hasSiblingProjectFile(videoPath: string) { - const baseName = path.basename(videoPath, path.extname(videoPath)); - const candidateExtensions = [PROJECT_FILE_EXTENSION, ...LEGACY_PROJECT_FILE_EXTENSIONS]; - - for (const extension of candidateExtensions) { - const projectPath = path.join(path.dirname(videoPath), `${baseName}.${extension}`); - - try { - await fs.access(projectPath); - return true; - } catch { - continue; - } - } - - return false; -} - -async function pruneAutoRecordings(exemptPaths: string[] = []) { - const recordingsDir = await getRecordingsDir(); - const exempt = new Set( - [currentVideoPath, ...exemptPaths] - .filter((value): value is string => Boolean(value)) - .map((value) => normalizePath(value)), - ); - - const entries = await fs.readdir(recordingsDir, { withFileTypes: true }); - const autoRecordingStats = await Promise.all( - entries - .filter((entry) => entry.isFile() && /^recording-.*\.(mp4|mov|webm)$/i.test(entry.name)) - .map(async (entry) => { - const filePath = path.join(recordingsDir, entry.name); - const stats = await fs.stat(filePath); - return { filePath, stats }; - }), - ); - - const sorted = autoRecordingStats.sort( - (left, right) => right.stats.mtimeMs - left.stats.mtimeMs, - ); - const now = Date.now(); - - for (const [index, entry] of sorted.entries()) { - const normalizedFilePath = normalizePath(entry.filePath); - if (exempt.has(normalizedFilePath)) { - continue; - } - - if (await hasSiblingProjectFile(entry.filePath)) { - continue; - } - - const tooOld = now - entry.stats.mtimeMs > AUTO_RECORDING_MAX_AGE_MS; - const overLimit = index >= AUTO_RECORDING_RETENTION_COUNT; - if (!tooOld && !overLimit) { - continue; - } - - try { - await fs.rm(entry.filePath, { force: true }); - await fs.rm(getTelemetryPathForVideo(entry.filePath), { force: true }); - // Clean up companion audio files left from recording (macOS .m4a, Windows .wav) - const base = entry.filePath.replace(/\.(mp4|mov|webm)$/i, ""); - for (const suffix of [ - ".system.m4a", - ".mic.m4a", - ".system.wav", - ".mic.wav", - ".mic.webm", - ".system.webm", - ]) { - await fs.rm(base + suffix, { force: true }).catch(() => undefined); - } - } catch (error) { - console.warn("Failed to prune old auto recording:", entry.filePath, error); - } - } -} - -/** - * Resolve a path within the app bundle, handling asar unpacking in production. - * Files listed in asarUnpack are extracted to app.asar.unpacked/ and must be - * accessed via that path instead of the asar virtual filesystem. - */ -function resolveUnpackedAppPath(...segments: string[]) { - const base = app.getAppPath(); - const resolved = path.join(base, ...segments); - if (app.isPackaged) { - return resolved.replace(/\.asar([/\\])/, ".asar.unpacked$1"); - } - return resolved; -} - -function getNativeCaptureHelperSourcePath() { - return resolveUnpackedAppPath("electron", "native", "ScreenCaptureKitRecorder.swift"); -} - -function getNativeArchTag() { - if (process.platform === "darwin") { - return process.arch === "arm64" ? "darwin-arm64" : "darwin-x64"; - } - - if (process.platform === "win32") { - return process.arch === "arm64" ? "win32-arm64" : "win32-x64"; - } - - if (process.platform === "linux") { - return process.arch === "arm64" ? "linux-arm64" : "linux-x64"; - } - - return `${process.platform}-${process.arch}`; -} - -function getPrebundledNativeHelperPath(binaryName: string) { - return resolveUnpackedAppPath("electron", "native", "bin", getNativeArchTag(), binaryName); -} - -function resolvePreferredWindowsNativeHelperPath(helperDirectory: string, binaryName: string) { - const buildOutputPath = resolveUnpackedAppPath( - "electron", - "native", - helperDirectory, - "build", - "Release", - binaryName, - ); - const prebundledPath = getPrebundledNativeHelperPath(binaryName); - - if (existsSync(buildOutputPath)) { - return buildOutputPath; - } - - if (existsSync(prebundledPath)) { - return prebundledPath; - } - - return buildOutputPath; -} - -function getBundledWhisperExecutableCandidates() { - const binaryNames = - process.platform === "win32" - ? ["whisper-cli.exe", "whisper-cpp.exe", "whisper.exe", "main.exe"] - : ["whisper-cli", "whisper-cpp", "whisper", "main"]; - - return binaryNames.map((binaryName) => getPrebundledNativeHelperPath(binaryName)); -} - -function getNativeCaptureHelperBinaryPath() { - return path.join(app.getPath("userData"), "native-tools", "recordly-screencapturekit-helper"); -} - -function getSystemCursorHelperSourcePath() { - return resolveUnpackedAppPath("electron", "native", "SystemCursorAssets.swift"); -} - -function getSystemCursorHelperBinaryPath() { - return path.join(app.getPath("userData"), "native-tools", "recordly-system-cursors"); -} - -function getNativeCursorMonitorSourcePath() { - return resolveUnpackedAppPath("electron", "native", "NativeCursorMonitor.swift"); -} - -function getNativeCursorMonitorBinaryPath() { - return path.join(app.getPath("userData"), "native-tools", "recordly-native-cursor-monitor"); -} - -function getNativeWindowListSourcePath() { - return resolveUnpackedAppPath("electron", "native", "ScreenCaptureKitWindowList.swift"); -} - -function getNativeWindowListBinaryPath() { - return path.join(app.getPath("userData"), "native-tools", "recordly-window-list"); -} - -let nativeHelperMigrationPromise: Promise | null = null; - -async function migrateLegacyNativeHelperBinaries() { - const legacyToCurrentPaths: Array<[string, string]> = [ - [ - path.join(app.getPath("userData"), "native-tools", "openscreen-screencapturekit-helper"), - getNativeCaptureHelperBinaryPath(), - ], - [ - path.join(app.getPath("userData"), "native-tools", "openscreen-window-list"), - getNativeWindowListBinaryPath(), - ], - [ - path.join(app.getPath("userData"), "native-tools", "openscreen-system-cursors"), - getSystemCursorHelperBinaryPath(), - ], - [ - path.join(app.getPath("userData"), "native-tools", "openscreen-native-cursor-monitor"), - getNativeCursorMonitorBinaryPath(), - ], - ]; - - for (const [legacyPath, currentPath] of legacyToCurrentPaths) { - if (legacyPath === currentPath || existsSync(currentPath) || !existsSync(legacyPath)) { - continue; - } - - try { - await fs.mkdir(path.dirname(currentPath), { recursive: true }); - await fs.rename(legacyPath, currentPath); - } catch (error) { - console.warn("[native-tools] Failed to migrate helper binary", { - legacyPath, - currentPath, - error, - }); - } - } -} - -async function ensureNativeHelperMigration() { - if (!nativeHelperMigrationPromise) { - nativeHelperMigrationPromise = migrateLegacyNativeHelperBinaries().catch((error) => { - nativeHelperMigrationPromise = null; - throw error; - }); - } - - return nativeHelperMigrationPromise; -} - -type NativeMacWindowSource = { - id: string; - name: string; - display_id?: string; - appName?: string; - windowTitle?: string; - bundleId?: string; - appIcon?: string | null; - x?: number; - y?: number; - width?: number; - height?: number; -}; - -let cachedNativeMacWindowSources: NativeMacWindowSource[] | null = null; -let cachedNativeMacWindowSourcesAtMs = 0; - -async function ensureSwiftHelperBinary( - sourcePath: string, - binaryPath: string, - label: string, - prebundledBinaryName?: string, -) { - if (prebundledBinaryName) { - const prebundledPath = getPrebundledNativeHelperPath(prebundledBinaryName); - try { - await fs.access(prebundledPath, fsConstants.X_OK); - return prebundledPath; - } catch { - if (app.isPackaged) { - throw new Error( - `${label} is missing from this app build (${prebundledPath}). Reinstall or update the app.`, - ); - } - } - } - - const helperDir = path.dirname(binaryPath); - - await fs.mkdir(helperDir, { recursive: true }); - - let shouldCompile = false; - try { - const [sourceStat, binaryStat] = await Promise.all([ - fs.stat(sourcePath), - fs.stat(binaryPath).catch(() => null), - ]); - shouldCompile = !binaryStat || sourceStat.mtimeMs > binaryStat.mtimeMs; - } catch (error) { - throw new Error(`${label} source is unavailable: ${String(error)}`); - } - - if (!shouldCompile) { - return binaryPath; - } - - const result = spawnSync("swiftc", ["-O", sourcePath, "-o", binaryPath], { - encoding: "utf8", - timeout: 120000, - }); - - if (result.status !== 0) { - const details = [result.stderr, result.stdout].filter(Boolean).join("\n").trim(); - throw new Error(details || `Failed to compile ${label}`); - } - - return binaryPath; -} - -async function ensureNativeCaptureHelperBinary() { - await ensureNativeHelperMigration(); - return ensureSwiftHelperBinary( - getNativeCaptureHelperSourcePath(), - getNativeCaptureHelperBinaryPath(), - "native ScreenCaptureKit helper", - "recordly-screencapturekit-helper", - ); -} - -async function ensureNativeWindowListBinary() { - await ensureNativeHelperMigration(); - return ensureSwiftHelperBinary( - getNativeWindowListSourcePath(), - getNativeWindowListBinaryPath(), - "native ScreenCaptureKit window list helper", - "recordly-window-list", - ); -} - -async function getNativeMacWindowSources(options?: { maxAgeMs?: number }) { - if (process.platform !== "darwin") { - return [] as NativeMacWindowSource[]; - } - - const maxAgeMs = options?.maxAgeMs ?? 5000; - const now = Date.now(); - if (cachedNativeMacWindowSources && now - cachedNativeMacWindowSourcesAtMs < maxAgeMs) { - return cachedNativeMacWindowSources; - } - - const binaryPath = await ensureNativeWindowListBinary(); - const { stdout } = await execFileAsync(binaryPath, [], { - timeout: 30000, - maxBuffer: 10 * 1024 * 1024, - }); - - const parsed = JSON.parse(stdout); - if (!Array.isArray(parsed)) { - return [] as NativeMacWindowSource[]; - } - - const entries = parsed.filter((entry: unknown): entry is NativeMacWindowSource => { - if (!entry || typeof entry !== "object") { - return false; - } - - const candidate = entry as Partial; - return typeof candidate.id === "string" && typeof candidate.name === "string"; - }); - - cachedNativeMacWindowSources = entries; - cachedNativeMacWindowSourcesAtMs = now; - return entries; -} - -async function getSystemCursorAssets() { - if (process.platform !== "darwin") { - cachedSystemCursorAssets = {}; - cachedSystemCursorAssetsSourceMtimeMs = null; - return cachedSystemCursorAssets; - } - - await ensureNativeHelperMigration(); - - const sourcePath = getSystemCursorHelperSourcePath(); - const sourceStat = await fs.stat(sourcePath); - if (cachedSystemCursorAssets && cachedSystemCursorAssetsSourceMtimeMs === sourceStat.mtimeMs) { - return cachedSystemCursorAssets; - } - - const binaryPath = await ensureSwiftHelperBinary( - sourcePath, - getSystemCursorHelperBinaryPath(), - "system cursor helper", - "recordly-system-cursors", - ); - - const { stdout } = await execFileAsync(binaryPath, [], { - timeout: 15000, - maxBuffer: 20 * 1024 * 1024, - }); - const parsed = JSON.parse(stdout) as Record>; - cachedSystemCursorAssets = Object.fromEntries( - Object.entries(parsed).filter( - ([, asset]) => - typeof asset?.dataUrl === "string" && - typeof asset?.hotspotX === "number" && - typeof asset?.hotspotY === "number" && - typeof asset?.width === "number" && - typeof asset?.height === "number", - ), - ) as Record; - cachedSystemCursorAssetsSourceMtimeMs = sourceStat.mtimeMs; - - return cachedSystemCursorAssets; -} - -function parseWindowId(sourceId?: string) { - if (!sourceId) return null; - const match = sourceId.match(/^window:(\d+)/); - return match ? Number.parseInt(match[1], 10) : null; -} - -function loadFfmpegStatic() { - const moduleExports = nodeRequire("ffmpeg-static"); - if (typeof moduleExports === "string") { - return moduleExports; - } - - if (typeof moduleExports?.default === "string") { - return moduleExports.default as string; - } - - return null; -} - -type HookEventName = "mousedown" | "mouseup" | "mousemove"; - -type HookMouseEvent = { - button?: number; - mouseButton?: number; - x?: number; - y?: number; - screenX?: number; - screenY?: number; - data?: { - button?: number; - mouseButton?: number; - x?: number; - y?: number; - screenX?: number; - screenY?: number; - }; -}; - -type HookEventListener = (event: HookMouseEvent) => void; - -type UiohookLike = { - on: (eventName: HookEventName, listener: HookEventListener) => void; - off?: (eventName: HookEventName, listener: HookEventListener) => void; - removeListener?: (eventName: HookEventName, listener: HookEventListener) => void; - start: () => void; - stop?: () => void; -}; - -type UiohookModuleNamespace = { - uIOhook?: UiohookLike; - uiohook?: UiohookLike; - Uiohook?: UiohookLike; - default?: UiohookLike | UiohookModuleNamespace; -}; - -function isUiohookLike(value: unknown): value is UiohookLike { - const candidate = value as Partial | null; - return typeof candidate?.on === "function" && typeof candidate?.start === "function"; -} - -function resolveSystemFfmpegBinaryPath() { - const locator = process.platform === 'win32' ? 'where' : 'which' - const result = spawnSync(locator, ['ffmpeg'], { - encoding: 'utf-8', - windowsHide: true, - }) - - if (result.status !== 0) { - return null - } - - const candidate = result.stdout - .split(/\r?\n/) - .map((line) => line.trim()) - .find((line) => line.length > 0) - - return candidate || null -} - -function loadUiohookModule() { - const moduleExports = nodeRequire("uiohook-napi") as UiohookModuleNamespace; - const defaultExport = moduleExports.default; - - if (moduleExports.uIOhook) { - return moduleExports.uIOhook; - } - - if (moduleExports.uiohook) { - return moduleExports.uiohook; - } - - if (moduleExports.Uiohook) { - return moduleExports.Uiohook; - } - - if (isUiohookLike(defaultExport)) { - return defaultExport; - } - - if (defaultExport?.uIOhook) { - return defaultExport.uIOhook; - } - - if (defaultExport?.uiohook) { - return defaultExport.uiohook; - } - - if (defaultExport?.Uiohook) { - return defaultExport.Uiohook; - } - - return null; -} - -function getFfmpegBinaryPath() { - const ffmpegStatic = loadFfmpegStatic() - if (ffmpegStatic && typeof ffmpegStatic === 'string') { - const bundledPath = app.isPackaged - ? ffmpegStatic.replace(/\.asar([\/\\])/, '.asar.unpacked$1') - : ffmpegStatic - - if (existsSync(bundledPath)) { - return bundledPath - } - } - - const systemFfmpeg = resolveSystemFfmpegBinaryPath() - if (systemFfmpeg) { - return systemFfmpeg - } - - throw new Error('FFmpeg binary is unavailable. Install ffmpeg-static for this platform or make ffmpeg available on PATH.') -} - -type NativeVideoExportSession = { - ffmpegProcess: ChildProcessByStdio; - outputPath: string; - inputByteSize: number; - inputMode: 'rawvideo' | 'h264-stream'; - maxQueuedWriteBytes: number; - stderrOutput: string; - encoderName: string; - processError: Error | null; - stdinError: Error | null; - terminating: boolean; - writeSequence: Promise; - completionPromise: Promise; - sender: WebContents | null; - pendingWriteRequestIds: Set; -}; - -const nativeVideoExportSessions = new Map(); -let cachedNativeVideoEncoder: { ffmpegPath: string; encoderName: string } | null = null; - -export function cleanupNativeVideoExportSessions() { - for (const [sessionId, session] of nativeVideoExportSessions) { - session.terminating = true; - try { - if (!session.ffmpegProcess.stdin.destroyed) { - session.ffmpegProcess.stdin.destroy(); - } - } catch { - /* stream may already be closed */ - } - try { - session.ffmpegProcess.kill("SIGKILL"); - } catch { - /* process may already be exited */ - } - nativeVideoExportSessions.delete(sessionId); - } -} - -function getNativeVideoExportMaxQueuedWriteBytes(inputByteSize: number) { - if (inputByteSize === 0) return 8 * 1024 * 1024; // H264 stream: variable-size chunks - return Math.min(64 * 1024 * 1024, Math.max(16 * 1024 * 1024, inputByteSize * 4)); -} - -function isHardwareAcceleratedVideoEncoder(encoderName: string) { - return /(videotoolbox|nvenc|qsv|amf|mf)/i.test(encoderName); -} - -async function removeTemporaryExportFile(filePath: string | null | undefined) { - if (!filePath) { - return; - } - - try { - await fs.rm(filePath, { force: true }); - } catch { - // Ignore cleanup failures for temp export artifacts. - } -} - -function getNativeVideoExportSessionError(session: NativeVideoExportSession, fallback: string) { - return ( - session.stdinError?.message || - session.processError?.message || - session.stderrOutput.trim() || - fallback - ); -} - -function sendNativeVideoExportWriteFrameResult( - sender: WebContents | null | undefined, - sessionId: string, - requestId: number, - result: { success: boolean; error?: string }, -) { - if (!sender || sender.isDestroyed()) { - return; - } - - sender.send("native-video-export-write-frame-result", { - sessionId, - requestId, - ...result, - }); -} - -function settleNativeVideoExportWriteFrameRequest( - sessionId: string, - session: NativeVideoExportSession, - requestId: number, - result: { success: boolean; error?: string }, -) { - session.pendingWriteRequestIds.delete(requestId); - sendNativeVideoExportWriteFrameResult(session.sender, sessionId, requestId, result); -} - -function flushNativeVideoExportPendingWriteRequests( - sessionId: string, - session: NativeVideoExportSession, - error: string, -) { - for (const requestId of session.pendingWriteRequestIds) { - sendNativeVideoExportWriteFrameResult(session.sender, sessionId, requestId, { - success: false, - error, - }); - } - - session.pendingWriteRequestIds.clear(); -} - -function isIgnorableNativeVideoExportStreamError(error: Error | null | undefined): boolean { - if (!error) { - return false; - } - - const errno = error as NodeJS.ErrnoException; - return ( - errno.code === "EPIPE" || - errno.code === "ERR_STREAM_DESTROYED" || - /broken pipe|stream destroyed|eof/i.test(error.message) - ); -} - -async function waitForNativeVideoExportDrain(session: NativeVideoExportSession) { - if ( - session.stdinError || - session.processError || - session.ffmpegProcess.stdin.destroyed || - session.ffmpegProcess.stdin.writableEnded || - !session.ffmpegProcess.stdin.writable || - session.ffmpegProcess.stdin.writableLength <= 0 - ) { - return; - } - - await new Promise((resolve, reject) => { - const timeout = setTimeout(() => { - cleanup(); - reject( - new Error("Timed out while waiting for native export writer backpressure to clear"), - ); - }, 15000); - - const cleanup = () => { - clearTimeout(timeout); - session.ffmpegProcess.stdin.off("drain", handleDrain); - session.ffmpegProcess.stdin.off("error", handleError); - session.ffmpegProcess.off("close", handleClose); - }; - - const handleDrain = () => { - cleanup(); - resolve(); - }; - - const handleError = (error: Error) => { - cleanup(); - reject(error); - }; - - const handleClose = () => { - cleanup(); - reject( - new Error( - getNativeVideoExportSessionError( - session, - "Native video export writer closed before draining", - ), - ), - ); - }; - - session.ffmpegProcess.stdin.once("drain", handleDrain); - session.ffmpegProcess.stdin.once("error", handleError); - session.ffmpegProcess.once("close", handleClose); - }); -} - -function getNativeVideoExportFrameLength(frameData: Uint8Array | ArrayBuffer) { - return frameData.byteLength; -} - -async function writeNativeVideoExportFrame( - session: NativeVideoExportSession, - frameData: Uint8Array | ArrayBuffer, -) { - if (session.inputMode !== 'h264-stream' && getNativeVideoExportFrameLength(frameData) !== session.inputByteSize) { - throw new Error( - `Native video export expected ${session.inputByteSize} bytes per frame but received ${getNativeVideoExportFrameLength(frameData)}`, - ); - } - - if ( - session.stdinError || - session.processError || - session.ffmpegProcess.stdin.destroyed || - session.ffmpegProcess.stdin.writableEnded || - !session.ffmpegProcess.stdin.writable - ) { - throw new Error( - getNativeVideoExportSessionError( - session, - "Native video export encoder is not accepting frames", - ), - ); - } - - const frameBuffer = - frameData instanceof ArrayBuffer - ? Buffer.from(frameData) - : Buffer.from(frameData.buffer, frameData.byteOffset, frameData.byteLength); - - try { - session.ffmpegProcess.stdin.write(frameBuffer); - } catch (error) { - session.stdinError = error instanceof Error ? error : new Error(String(error)); - throw session.stdinError; - } - - if (session.ffmpegProcess.stdin.writableLength >= session.maxQueuedWriteBytes) { - try { - await waitForNativeVideoExportDrain(session); - } catch (error) { - session.stdinError = error instanceof Error ? error : new Error(String(error)); - throw session.stdinError; - } - } -} - -async function enqueueNativeVideoExportFrameWrite( - session: NativeVideoExportSession, - frameData: Uint8Array | ArrayBuffer, -) { - const writePromise = session.writeSequence.then(async () => { - if (session.terminating) { - throw new Error("Native video export session was cancelled"); - } - - await writeNativeVideoExportFrame(session, frameData); - }); - - session.writeSequence = writePromise.catch(() => undefined); - await writePromise; -} - -async function getAvailableNativeVideoEncoders(ffmpegPath: string) { - const { stdout } = await execFileAsync(ffmpegPath, ["-hide_banner", "-encoders"], { - timeout: 15000, - maxBuffer: 20 * 1024 * 1024, - }); - - return parseAvailableFfmpegEncoders(stdout); -} - -async function probeNativeVideoEncoder( - ffmpegPath: string, - encoderName: string, - encodingMode: NativeExportEncodingMode, -) { - const outputPath = path.join( - app.getPath("temp"), - `recordly-export-probe-${Date.now()}-${Math.random().toString(36).slice(2, 8)}.mp4`, - ); - const args = buildNativeVideoExportArgs( - encoderName, - { - width: 64, - height: 64, - frameRate: 1, - bitrate: 1_500_000, - encodingMode, - }, - outputPath, - ); - - return new Promise((resolve) => { - const process = spawn(ffmpegPath, args, { - stdio: ["pipe", "ignore", "pipe"], - }); - let stderrOutput = ""; - const timeout = setTimeout(() => { - try { - process.kill("SIGKILL"); - } catch { - // ignore - } - resolve(false); - }, 15000); - - process.stderr.on("data", (chunk: Buffer) => { - stderrOutput += chunk.toString(); - }); - - process.on("close", (code) => { - clearTimeout(timeout); - void removeTemporaryExportFile(outputPath); - if (code !== 0 && stderrOutput.trim().length > 0) { - console.warn( - `[native-export] Encoder probe failed for ${encoderName}:`, - stderrOutput.trim(), - ); - } - resolve(code === 0); - }); - - process.stdin.end(Buffer.alloc(getNativeVideoInputByteSize(64, 64), 0)); - }); -} - -async function resolveNativeVideoEncoder( - ffmpegPath: string, - encodingMode: NativeExportEncodingMode, -) { - if (cachedNativeVideoEncoder?.ffmpegPath === ffmpegPath) { - return cachedNativeVideoEncoder.encoderName; - } - - const availableEncoders = await getAvailableNativeVideoEncoders(ffmpegPath); - const candidates = [ - ...new Set([...getPreferredNativeVideoEncoders(process.platform), "libx264"]), - ]; - - for (const encoderName of candidates) { - if (!availableEncoders.has(encoderName)) { - continue; - } - - if (await probeNativeVideoEncoder(ffmpegPath, encoderName, encodingMode)) { - cachedNativeVideoEncoder = { ffmpegPath, encoderName }; - return encoderName; - } - } - - throw new Error("No usable FFmpeg encoder was available for native export"); -} - -async function muxNativeVideoExportAudio( - videoPath: string, - options: NativeVideoExportFinishOptions, -) { - const audioMode = options.audioMode ?? "none"; - if (audioMode === "none") { - return videoPath; - } - - const ffmpegPath = getFfmpegBinaryPath(); - const tempArtifacts: string[] = []; - let audioInputPath = options.audioSourcePath ?? null; - - if (audioMode === "edited-track") { - if (!options.editedAudioData) { - throw new Error("Edited audio data is missing for native export"); - } - - const extension = getEditedAudioExtension(options.editedAudioMimeType); - audioInputPath = path.join( - app.getPath("temp"), - `recordly-export-audio-${Date.now()}-${Math.random().toString(36).slice(2, 8)}${extension}`, - ); - await fs.writeFile(audioInputPath, Buffer.from(options.editedAudioData)); - tempArtifacts.push(audioInputPath); - } - - if (!audioInputPath) { - return videoPath; - } - - const outputPath = path.join( - path.dirname(videoPath), - `${path.basename(videoPath, path.extname(videoPath))}-final.mp4`, - ); - - const args = [ - "-y", - "-hide_banner", - "-loglevel", - "error", - "-i", - videoPath, - "-i", - audioInputPath, - ]; - - if (audioMode === "trim-source") { - const filter = buildTrimmedSourceAudioFilter(options.trimSegments ?? []); - if (filter) { - args.push("-filter_complex", filter, "-map", "0:v:0", "-map", "[aout]"); - } else { - args.push("-map", "0:v:0", "-map", "1:a:0"); - } - } else { - args.push("-map", "0:v:0", "-map", "1:a:0"); - } - - args.push( - "-c:v", - "copy", - "-c:a", - "aac", - "-b:a", - "192k", - "-shortest", - "-movflags", - "+faststart", - outputPath, - ); - - try { - await execFileAsync(ffmpegPath, args, { - timeout: 15 * 60 * 1000, - maxBuffer: 20 * 1024 * 1024, - }); - await removeTemporaryExportFile(videoPath); - return outputPath; - } finally { - await Promise.allSettled( - tempArtifacts.map((artifactPath) => removeTemporaryExportFile(artifactPath)), - ); - } -} - -async function muxExportedVideoAudioBuffer( - videoData: ArrayBuffer, - options: NativeVideoExportFinishOptions, -) { - const tempVideoPath = path.join( - app.getPath('temp'), - `recordly-export-video-${Date.now()}-${Math.random().toString(36).slice(2, 8)}.mp4`, - ) - - try { - await fs.writeFile(tempVideoPath, Buffer.from(videoData)) - const finalizedPath = await muxNativeVideoExportAudio(tempVideoPath, options) - const muxedData = await fs.readFile(finalizedPath) - return new Uint8Array(muxedData) - } finally { - await Promise.allSettled([ - removeTemporaryExportFile(tempVideoPath), - removeTemporaryExportFile(`${tempVideoPath}.muxed.mp4`), - removeTemporaryExportFile( - path.join( - path.dirname(tempVideoPath), - `${path.basename(tempVideoPath, path.extname(tempVideoPath))}-final.mp4`, - ), - ), - ]) - } -} - -/** Probe the duration of a media file (in seconds) using the container header. */ -async function probeMediaDurationSeconds(filePath: string): Promise { - const ffmpegPath = getFfmpegBinaryPath(); - try { - await execFileAsync(ffmpegPath, ["-i", filePath, "-hide_banner"], { timeout: 5000 }); - } catch (error) { - const stderr = (error as NodeJS.ErrnoException & { stderr?: string })?.stderr ?? ""; - const match = stderr.match(/Duration:\s*(\d{2}):(\d{2}):(\d{2})\.(\d{2,3})/); - if (match) { - const h = Number(match[1]); - const m = Number(match[2]); - const s = Number(match[3]); - const frac = Number(match[4]) / (match[4].length === 3 ? 1000 : 100); - return h * 3600 + m * 60 + s + frac; - } - } - return 0; -} - -type AudioSyncAdjustment = { - mode: "none" | "tempo" | "delay"; - delayMs: number; - tempoRatio: number; - durationDeltaMs: number; -}; - -function buildAtempoFilters(tempoRatio: number) { - if (!Number.isFinite(tempoRatio) || tempoRatio <= 0) { - return []; - } - - const filters: string[] = []; - let remaining = tempoRatio; - - while (remaining < 0.5) { - filters.push("atempo=0.5"); - remaining /= 0.5; - } - - while (remaining > 2) { - filters.push("atempo=2.0"); - remaining /= 2.0; - } - - if (Math.abs(remaining - 1) > 0.0005) { - filters.push(`atempo=${remaining.toFixed(6)}`); - } - - return filters; -} - -function getAudioSyncAdjustment(videoDuration: number, audioDuration: number): AudioSyncAdjustment { - if ( - !Number.isFinite(videoDuration) || - !Number.isFinite(audioDuration) || - videoDuration <= 0 || - audioDuration <= 0 - ) { - return { mode: "none", delayMs: 0, tempoRatio: 1, durationDeltaMs: 0 }; - } - - const durationDeltaMs = Math.round((videoDuration - audioDuration) * 1000); - const absDeltaMs = Math.abs(durationDeltaMs); - if (absDeltaMs <= 50) { - return { mode: "none", delayMs: 0, tempoRatio: 1, durationDeltaMs }; - } - - const tempoRatio = Math.max(0.5, Math.min(2, audioDuration / videoDuration)); - const relativeDelta = absDeltaMs / Math.max(videoDuration * 1000, 1); - - if (relativeDelta <= 0.03 || absDeltaMs <= 1500 || durationDeltaMs < 0) { - return { mode: "tempo", delayMs: 0, tempoRatio, durationDeltaMs }; - } - - return { mode: "delay", delayMs: durationDeltaMs, tempoRatio: 1, durationDeltaMs }; -} - -function appendSyncedAudioFilter( - filterParts: string[], - inputLabel: string, - outputLabel: string, - adjustment: AudioSyncAdjustment, -) { - const filters: string[] = []; - - if (adjustment.mode === "delay" && adjustment.delayMs > 0) { - filters.push(`adelay=${adjustment.delayMs}|${adjustment.delayMs}`); - } - - if (adjustment.mode === "tempo") { - filters.push(...buildAtempoFilters(adjustment.tempoRatio)); - } - - filters.push("aresample=async=1:first_pts=0", "asetpts=PTS-STARTPTS"); - filterParts.push(`${inputLabel}${filters.join(",")}[${outputLabel}]`); -} - -function sendWhisperModelDownloadProgress( - webContents: Electron.WebContents, - payload: { - status: "idle" | "downloading" | "downloaded" | "error"; - progress: number; - path?: string | null; - error?: string; - }, -) { - webContents.send("whisper-small-model-download-progress", payload); -} - -async function getWhisperSmallModelStatus() { - try { - await fs.access(WHISPER_SMALL_MODEL_PATH, fsConstants.R_OK); - return { - success: true, - exists: true, - path: WHISPER_SMALL_MODEL_PATH, - }; - } catch { - return { - success: true, - exists: false, - path: null, - }; - } -} - -function downloadFileWithProgress( - url: string, - destinationPath: string, - onProgress: (progress: number) => void, -): Promise { - const request = (currentUrl: string, redirectCount = 0): Promise => { - return new Promise((resolve, reject) => { - const req = httpsGet(currentUrl, (response) => { - const statusCode = response.statusCode ?? 0; - const location = response.headers.location; - - if (statusCode >= 300 && statusCode < 400 && location) { - response.resume(); - if (redirectCount >= 5) { - reject(new Error("Too many redirects while downloading Whisper model.")); - return; - } - - const nextUrl = new URL(location, currentUrl).toString(); - void request(nextUrl, redirectCount + 1) - .then(resolve) - .catch(reject); - return; - } - - if (statusCode < 200 || statusCode >= 300) { - response.resume(); - reject(new Error(`Whisper model download failed with status ${statusCode}.`)); - return; - } - - const totalBytes = Number.parseInt( - String(response.headers["content-length"] ?? "0"), - 10, - ); - let downloadedBytes = 0; - const fileStream = createWriteStream(destinationPath); - - response.on("data", (chunk: Buffer) => { - downloadedBytes += chunk.length; - if (Number.isFinite(totalBytes) && totalBytes > 0) { - onProgress(Math.min(100, Math.round((downloadedBytes / totalBytes) * 100))); - } - }); - - response.on("error", (error) => { - fileStream.destroy(error); - }); - - fileStream.on("error", (error) => { - response.destroy(error); - reject(error); - }); - - fileStream.on("finish", () => { - onProgress(100); - resolve(); - }); - - response.pipe(fileStream); - }); - - req.on("error", reject); - }); - }; - - return request(url); -} - -async function downloadWhisperSmallModel(webContents: Electron.WebContents) { - await fs.mkdir(WHISPER_MODEL_DIR, { recursive: true }); - const tempPath = `${WHISPER_SMALL_MODEL_PATH}.download`; - - sendWhisperModelDownloadProgress(webContents, { - status: "downloading", - progress: 0, - path: null, - }); - - try { - await fs.rm(tempPath, { force: true }); - await downloadFileWithProgress(WHISPER_MODEL_DOWNLOAD_URL, tempPath, (progress) => { - sendWhisperModelDownloadProgress(webContents, { - status: "downloading", - progress, - path: null, - }); - }); - await fs.rename(tempPath, WHISPER_SMALL_MODEL_PATH); - sendWhisperModelDownloadProgress(webContents, { - status: "downloaded", - progress: 100, - path: WHISPER_SMALL_MODEL_PATH, - }); - return WHISPER_SMALL_MODEL_PATH; - } catch (error) { - await fs.rm(tempPath, { force: true }).catch(() => undefined); - sendWhisperModelDownloadProgress(webContents, { - status: "error", - progress: 0, - path: null, - error: String(error), - }); - throw error; - } -} - -async function deleteWhisperSmallModel() { - await fs.rm(WHISPER_SMALL_MODEL_PATH, { force: true }); -} - -function parseSrtTimestamp(value: string) { - const match = value.trim().match(/^(\d{2}):(\d{2}):(\d{2}),(\d{3})$/); - if (!match) { - return null; - } - - const [, hours, minutes, seconds, milliseconds] = match; - return ( - Number(hours) * 60 * 60 * 1000 + - Number(minutes) * 60 * 1000 + - Number(seconds) * 1000 + - Number(milliseconds) - ); -} - -type CaptionWordPayload = { - text: string; - startMs: number; - endMs: number; - leadingSpace?: boolean; -}; - -type CaptionCuePayload = { - id: string; - startMs: number; - endMs: number; - text: string; - words?: CaptionWordPayload[]; -}; - -type WhisperJsonToken = { - text?: unknown; - offsets?: { - from?: unknown; - to?: unknown; - }; -}; - -type WhisperJsonSegment = { - text?: unknown; - offsets?: { - from?: unknown; - to?: unknown; - }; - tokens?: unknown; -}; - -function isFiniteNumber(value: unknown): value is number { - return typeof value === "number" && Number.isFinite(value); -} - -function buildCaptionTextFromWords(words: CaptionWordPayload[]) { - return words - .map((word, index) => `${index > 0 && word.leadingSpace ? " " : ""}${word.text}`) - .join("") - .trim(); -} - -function parseWhisperJsonWords(tokens: unknown) { - if (!Array.isArray(tokens)) { - return []; - } - - const words: CaptionWordPayload[] = []; - let nextLeadingSpace = false; - - for (const token of tokens) { - if (!token || typeof token !== "object") { - continue; - } - - const tokenData = token as WhisperJsonToken; - const tokenText = typeof tokenData.text === "string" ? tokenData.text : ""; - if (!tokenText) { - continue; - } - - const tokenStartMs = isFiniteNumber(tokenData.offsets?.from) - ? Math.round(tokenData.offsets.from) - : null; - const tokenEndMs = isFiniteNumber(tokenData.offsets?.to) - ? Math.round(tokenData.offsets.to) - : null; - const parts = tokenText.match(/\s+|[^\s]+/g) ?? []; - - for (const part of parts) { - if (/^\s+$/.test(part)) { - nextLeadingSpace = words.length > 0; - continue; - } - - if (tokenStartMs == null || tokenEndMs == null || tokenEndMs <= tokenStartMs) { - return []; - } - - const previousWord = words.length > 0 ? words[words.length - 1] : null; - if (!previousWord || nextLeadingSpace) { - words.push({ - text: part, - startMs: tokenStartMs, - endMs: tokenEndMs, - ...(words.length > 0 && nextLeadingSpace ? { leadingSpace: true } : {}), - }); - } else { - previousWord.text += part; - previousWord.endMs = Math.max(previousWord.endMs, tokenEndMs); - } - - nextLeadingSpace = false; - } - } - - return words.filter((word) => word.text.trim().length > 0); -} - -function parseWhisperJsonCues(content: string) { - try { - const parsed = JSON.parse(content) as { - transcription?: unknown; - }; - - if (!Array.isArray(parsed.transcription)) { - return []; - } - - return parsed.transcription - .map((segment, index) => { - if (!segment || typeof segment !== "object") { - return null; - } - - const segmentData = segment as WhisperJsonSegment; - const startMs = isFiniteNumber(segmentData.offsets?.from) - ? Math.round(segmentData.offsets.from) - : null; - const endMs = isFiniteNumber(segmentData.offsets?.to) - ? Math.round(segmentData.offsets.to) - : null; - const segmentText = - typeof segmentData.text === "string" ? segmentData.text.trim() : ""; - - if (startMs == null || endMs == null || endMs <= startMs) { - return null; - } - - const words = parseWhisperJsonWords(segmentData.tokens); - const text = words.length > 0 ? buildCaptionTextFromWords(words) : segmentText; - - if (!text) { - return null; - } - - return { - id: `caption-${index + 1}`, - startMs, - endMs, - text, - ...(words.length > 0 ? { words } : {}), - }; - }) - .filter((cue): cue is CaptionCuePayload => cue != null); - } catch (error) { - console.warn("[auto-captions] Failed to parse Whisper JSON output:", error); - return []; - } -} - -function parseSrtCues(content: string) { - return content - .split(/\r?\n\r?\n/) - .map((block, index) => { - const lines = block.split(/\r?\n/).map((line) => line.trim()); - const timingLine = lines.find((line) => line.includes("-->")); - if (!timingLine) { - return null; - } - - const [rawStart, rawEnd] = timingLine.split("-->").map((part) => part.trim()); - const startMs = parseSrtTimestamp(rawStart); - const endMs = parseSrtTimestamp(rawEnd); - if (startMs == null || endMs == null || endMs <= startMs) { - return null; - } - - const text = lines - .slice(lines.indexOf(timingLine) + 1) - .filter((line) => line.length > 0) - .join("\n") - .trim(); - - if (!text) { - return null; - } - - return { - id: `caption-${index + 1}`, - startMs, - endMs, - text, - }; - }) - .filter((cue): cue is CaptionCuePayload => cue != null); -} - -function shouldRetryWhisperWithoutJson(error: unknown) { - const message = error instanceof Error ? error.message : String(error); - return /unknown argument|output-json-full|output-json|ojf|\boj\b/i.test(message); -} - -async function ensureReadableFile(filePath: string, description: string) { - await fs.access(filePath, fsConstants.R_OK); - if (description === "whisper executable") { - try { - await fs.access(filePath, fsConstants.X_OK); - } catch { - throw new Error("The selected Whisper executable is not marked as executable."); - } - } -} - -async function isExecutableFile(filePath: string) { - try { - await fs.access(filePath, fsConstants.R_OK | fsConstants.X_OK); - return true; - } catch { - return false; - } -} - -async function resolveWhisperExecutablePath(preferredPath?: string | null) { - const candidatePaths = [ - preferredPath?.trim() || null, - ...getBundledWhisperExecutableCandidates(), - process.env["WHISPER_CPP_PATH"]?.trim() || null, - process.platform === "darwin" ? "/opt/homebrew/bin/whisper-cli" : null, - process.platform === "darwin" ? "/usr/local/bin/whisper-cli" : null, - process.platform === "darwin" ? "/opt/homebrew/bin/whisper-cpp" : null, - process.platform === "darwin" ? "/usr/local/bin/whisper-cpp" : null, - ].filter((value): value is string => Boolean(value)); - - for (const candidate of candidatePaths) { - const normalized = path.resolve(candidate); - if (await isExecutableFile(normalized)) { - return normalized; - } - } - - const pathCommand = process.platform === "win32" ? "where" : "which"; - const binaryNames = - process.platform === "win32" - ? ["whisper-cli.exe", "whisper.exe", "main.exe"] - : ["whisper-cli", "whisper-cpp", "whisper", "main"]; - - for (const binaryName of binaryNames) { - const result = spawnSync(pathCommand, [binaryName], { encoding: "utf-8" }); - if (result.status === 0) { - const resolvedPath = result.stdout - .split(/\r?\n/) - .map((line) => line.trim()) - .find(Boolean); - - if (resolvedPath && (await isExecutableFile(resolvedPath))) { - return resolvedPath; - } - } - } - - throw new Error( - "No Whisper runtime was found. Recordly looked for a bundled binary first, then checked common system install locations.", - ); -} - -async function resolveCaptionAudioCandidates(videoPath: string) { - const candidates: Array<{ path: string; label: string }> = []; - const seenPaths = new Set(); - - const pushCandidate = (candidatePath: string | null | undefined, label: string) => { - const normalizedCandidatePath = normalizeVideoSourcePath(candidatePath); - if (!normalizedCandidatePath || seenPaths.has(normalizedCandidatePath)) { - return; - } - - seenPaths.add(normalizedCandidatePath); - candidates.push({ path: normalizedCandidatePath, label }); - }; - - pushCandidate(videoPath, "recording"); - - const requestedRecordingSession = await resolveRecordingSession(videoPath); - pushCandidate(requestedRecordingSession?.webcamPath, "linked webcam recording"); - - return candidates; -} - -async function extractCaptionAudioSource(options: { - videoPath: string; - ffmpegPath: string; - wavPath: string; -}) { - const candidates = await resolveCaptionAudioCandidates(options.videoPath); - const attemptedCandidates: Array<{ - path: string; - label: string; - readable: boolean; - extractedAudio: boolean; - error?: string; - }> = []; - - for (const candidate of candidates) { - try { - await ensureReadableFile(candidate.path, "video file"); - await execFileAsync( - options.ffmpegPath, - [ - "-y", - "-i", - candidate.path, - "-map", - "0:a:0", - "-vn", - "-ac", - "1", - "-ar", - "16000", - "-c:a", - "pcm_s16le", - options.wavPath, - ], - { timeout: 5 * 60 * 1000, maxBuffer: 20 * 1024 * 1024 }, - ); - attemptedCandidates.push({ ...candidate, readable: true, extractedAudio: true }); - return candidate; - } catch (error) { - attemptedCandidates.push({ - ...candidate, - readable: true, - extractedAudio: false, - error: error instanceof Error ? error.message : String(error), - }); - // Try the next candidate instead of failing on stale editor state. - } - } - - console.warn( - "[auto-captions] No audio source candidate could be extracted:", - attemptedCandidates, - ); - - throw new Error( - "No audio was found to transcribe in the saved recording file. Captions need an audio track. If this recording should have contained sound, the recording was saved without an audio stream.", - ); -} - -async function generateAutoCaptionsFromVideo(options: { - videoPath: string; - whisperExecutablePath?: string; - whisperModelPath: string; - language?: string; -}) { - const ffmpegPath = getFfmpegBinaryPath(); - const normalizedVideoPath = normalizeVideoSourcePath(options.videoPath); - if (!normalizedVideoPath) { - throw new Error("Missing source video path."); - } - - const whisperExecutablePath = await resolveWhisperExecutablePath(options.whisperExecutablePath); - const whisperModelPath = path.resolve(options.whisperModelPath); - await ensureReadableFile(whisperExecutablePath, "whisper executable"); - await ensureReadableFile(whisperModelPath, "whisper model"); - - const tempBase = path.join( - app.getPath("temp"), - `recordly-captions-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`, - ); - const wavPath = `${tempBase}.wav`; - const outputBase = `${tempBase}-whisper`; - const srtPath = `${outputBase}.srt`; - const jsonPath = `${outputBase}.json`; - - try { - const audioSource = await extractCaptionAudioSource({ - videoPath: normalizedVideoPath, - ffmpegPath, - wavPath, - }); - - const language = - options.language && options.language.trim() ? options.language.trim() : "auto"; - const whisperBaseArgs = [ - "-m", - whisperModelPath, - "-f", - wavPath, - "-osrt", - "-of", - outputBase, - "-l", - language, - "-np", - ]; - - let jsonEnabled = true; - try { - await execFileAsync(whisperExecutablePath, [...whisperBaseArgs, "-ojf"], { - timeout: 30 * 60 * 1000, - maxBuffer: 20 * 1024 * 1024, - }); - } catch (error) { - if (!shouldRetryWhisperWithoutJson(error)) { - throw error; - } - - jsonEnabled = false; - console.warn( - "[auto-captions] Whisper runtime does not support JSON full output, retrying with SRT only:", - error, - ); - await execFileAsync(whisperExecutablePath, whisperBaseArgs, { - timeout: 30 * 60 * 1000, - maxBuffer: 20 * 1024 * 1024, - }); - } - - const timedCues = jsonEnabled - ? parseWhisperJsonCues(await fs.readFile(jsonPath, "utf-8")) - : []; - const cues = - timedCues.length > 0 ? timedCues : parseSrtCues(await fs.readFile(srtPath, "utf-8")); - if (cues.length === 0) { - throw new Error("Whisper completed, but no caption cues were produced."); - } - - return { - cues, - audioSourceLabel: audioSource.label, - }; - } finally { - await Promise.allSettled([ - fs.rm(wavPath, { force: true }), - fs.rm(srtPath, { force: true }), - fs.rm(jsonPath, { force: true }), - ]); - } -} - -function waitForFfmpegCaptureStart(process: ChildProcessWithoutNullStreams) { - return new Promise((resolve, reject) => { - const onError = (error: Error) => { - cleanup(); - reject(error); - }; - - const onExit = (code: number | null) => { - cleanup(); - reject( - new Error( - ffmpegCaptureOutputBuffer.trim() || - `FFmpeg exited before recording started (code ${code ?? "unknown"})`, - ), - ); - }; - - const timer = setTimeout(() => { - cleanup(); - resolve(); - }, 900); - - const cleanup = () => { - clearTimeout(timer); - process.off("error", onError); - process.off("exit", onExit); - }; - - process.once("error", onError); - process.once("exit", onExit); - }); -} - -function waitForFfmpegCaptureStop(process: ChildProcessWithoutNullStreams, outputPath: string) { - return new Promise((resolve, reject) => { - const onClose = async (code: number | null) => { - cleanup(); - - try { - await fs.access(outputPath); - if (code === 0 || code === null) { - resolve(outputPath); - return; - } - - if (ffmpegCaptureOutputBuffer.includes("Exiting normally")) { - resolve(outputPath); - return; - } - } catch { - // handled below - } - - reject( - new Error( - ffmpegCaptureOutputBuffer.trim() || - `FFmpeg exited with code ${code ?? "unknown"}`, - ), - ); - }; - - const onError = (error: Error) => { - cleanup(); - reject(error); - }; - - const cleanup = () => { - process.off("close", onClose); - process.off("error", onError); - }; - - process.once("close", onClose); - process.once("error", onError); - }); -} - -function getDisplayBoundsForSource(source: SelectedSource) { - return resolveWindowsCaptureDisplay( - source, - getScreen().getAllDisplays(), - getScreen().getPrimaryDisplay(), - ).bounds; -} - -function parseXwininfoBounds(stdout: string): WindowBounds | null { - const absX = stdout.match(/Absolute upper-left X:\s+(-?\d+)/); - const absY = stdout.match(/Absolute upper-left Y:\s+(-?\d+)/); - const width = stdout.match(/Width:\s+(\d+)/); - const height = stdout.match(/Height:\s+(\d+)/); - - if (!absX || !absY || !width || !height) { - return null; - } - - return { - x: Number.parseInt(absX[1], 10), - y: Number.parseInt(absY[1], 10), - width: Number.parseInt(width[1], 10), - height: Number.parseInt(height[1], 10), - }; -} - -async function resolveLinuxWindowBounds(source: SelectedSource): Promise { - const windowId = parseWindowId(source?.id); - - if (windowId) { - try { - const { stdout } = await execFileAsync("xwininfo", ["-id", String(windowId)], { - timeout: 1500, - }); - const bounds = parseXwininfoBounds(stdout); - if (bounds && bounds.width > 0 && bounds.height > 0) { - return bounds; - } - } catch { - // fall back to title lookup below - } - } - - const windowTitle = - typeof source.windowTitle === "string" ? source.windowTitle.trim() : source.name.trim(); - if (!windowTitle) { - return null; - } - - try { - const { stdout } = await execFileAsync("xwininfo", ["-name", windowTitle], { - timeout: 1500, - }); - const bounds = parseXwininfoBounds(stdout); - return bounds && bounds.width > 0 && bounds.height > 0 ? bounds : null; - } catch { - return null; - } -} - -async function resolveWindowsWindowBounds(source: SelectedSource): Promise { - const windowId = parseWindowId(source?.id); - const windowTitle = - typeof source.windowTitle === "string" ? source.windowTitle.trim() : source.name.trim(); - - if (!windowId && !windowTitle) { - return null; - } - - const script = [ - "param([string]$windowId, [string]$windowTitle)", - 'Add-Type -TypeDefinition @"', - "using System;", - "using System.Runtime.InteropServices;", - "public static class RecordlyWindowBounds {", - " [StructLayout(LayoutKind.Sequential)]", - " public struct RECT {", - " public int Left;", - " public int Top;", - " public int Right;", - " public int Bottom;", - " }", - ' [DllImport("user32.dll")]', - " [return: MarshalAs(UnmanagedType.Bool)]", - " public static extern bool GetWindowRect(IntPtr hWnd, out RECT rect);", - "}", - '"@', - "$handle = [Int64]0", - "if ($windowId) {", - " $handle = [Int64]$windowId", - "}", - "if ($handle -le 0 -and $windowTitle) {", - ' $matchingProcess = Get-Process | Where-Object { $_.MainWindowTitle -eq $windowTitle -or $_.MainWindowTitle -like "*$windowTitle*" } | Select-Object -First 1', - " if ($matchingProcess) {", - " $handle = $matchingProcess.MainWindowHandle.ToInt64()", - " }", - "}", - "if ($handle -le 0) {", - " exit 1", - "}", - "$rect = New-Object RecordlyWindowBounds+RECT", - "if (-not [RecordlyWindowBounds]::GetWindowRect([IntPtr]$handle, [ref]$rect)) {", - " exit 1", - "}", - "@{ x = $rect.Left; y = $rect.Top; width = $rect.Right - $rect.Left; height = $rect.Bottom - $rect.Top } | ConvertTo-Json -Compress", - ].join("\n"); - - try { - const { stdout } = await execFileAsync( - "powershell.exe", - ["-NoProfile", "-Command", script, String(windowId ?? ""), windowTitle], - { timeout: 1500 }, - ); - const bounds = JSON.parse(stdout) as WindowBounds; - return bounds && bounds.width > 0 && bounds.height > 0 ? bounds : null; - } catch { - return null; - } -} - -async function buildFfmpegCaptureArgs(source: SelectedSource, outputPath: string) { - const commonOutputArgs = [ - "-an", - "-c:v", - "libx264", - "-preset", - "veryfast", - "-pix_fmt", - "yuv420p", - "-movflags", - "+faststart", - outputPath, - ]; - - if (process.platform === "win32") { - if (source?.id?.startsWith("window:")) { - const windowTitle = - typeof source.windowTitle === "string" - ? source.windowTitle.trim() - : source.name.trim(); - if (!windowTitle) { - throw new Error("Missing window title for FFmpeg window capture"); - } - - return [ - "-y", - "-f", - "gdigrab", - "-framerate", - "60", - "-draw_mouse", - "0", - "-i", - `title=${windowTitle}`, - ...commonOutputArgs, - ]; - } - - return [ - "-y", - "-f", - "gdigrab", - "-framerate", - "60", - "-draw_mouse", - "0", - "-i", - "desktop", - ...commonOutputArgs, - ]; - } - - if (process.platform === "linux") { - const displayEnv = process.env.DISPLAY || ":0.0"; - if (source?.id?.startsWith("window:")) { - const bounds = await resolveLinuxWindowBounds(source); - if (!bounds) { - throw new Error("Unable to resolve Linux window bounds for FFmpeg capture"); - } - - return [ - "-y", - "-f", - "x11grab", - "-framerate", - "60", - "-draw_mouse", - "0", - "-video_size", - `${Math.max(2, bounds.width)}x${Math.max(2, bounds.height)}`, - "-i", - `${displayEnv}+${Math.round(bounds.x)},${Math.round(bounds.y)}`, - ...commonOutputArgs, - ]; - } - - const bounds = getDisplayBoundsForSource(source); - return [ - "-y", - "-f", - "x11grab", - "-framerate", - "60", - "-draw_mouse", - "0", - "-video_size", - `${Math.max(2, bounds.width)}x${Math.max(2, bounds.height)}`, - "-i", - `${displayEnv}+${Math.round(bounds.x)},${Math.round(bounds.y)}`, - ...commonOutputArgs, - ]; - } - - if (process.platform === "darwin") { - return [ - "-y", - "-f", - "avfoundation", - "-capture_cursor", - "0", - "-framerate", - "60", - "-i", - "1:none", - ...commonOutputArgs, - ]; - } - - throw new Error(`FFmpeg capture is not supported on ${process.platform}`); -} - -function getWindowsCaptureExePath() { - return resolvePreferredWindowsNativeHelperPath("wgc-capture", "wgc-capture.exe"); -} - -function getCursorMonitorExePath() { - return resolvePreferredWindowsNativeHelperPath("cursor-monitor", "cursor-monitor.exe"); -} - -async function isNativeWindowsCaptureAvailable(): Promise { - if (process.platform !== "win32") return false; - - const helperPath = getWindowsCaptureExePath(); - const os = await import("node:os"); - const [major, , build] = os.release().split(".").map(Number); - const supported = major >= 10 && build >= 19041; - let helperExists = false; - - try { - await fs.access(helperPath, fsConstants.X_OK); - helperExists = true; - } catch { - recordNativeCaptureDiagnostics({ - backend: "windows-wgc", - phase: "availability", - helperPath, - helperExists, - osRelease: os.release(), - supported, - error: "Native Windows capture helper is missing or not executable.", - }); - return false; - } - - recordNativeCaptureDiagnostics({ - backend: "windows-wgc", - phase: "availability", - helperPath, - helperExists, - osRelease: os.release(), - supported, - }); - - return supported; -} - -function waitForWindowsCaptureStart(proc: ChildProcessWithoutNullStreams) { - return new Promise((resolve, reject) => { - const timer = setTimeout(() => { - cleanup(); - reject(new Error("Timed out waiting for native Windows capture to start")); - }, 12000); - - const onStdout = (chunk: Buffer) => { - const text = chunk.toString(); - if (text.includes("Recording started")) { - cleanup(); - resolve(); - } - }; - - const onError = (error: Error) => { - cleanup(); - reject(error); - }; - - const onExit = (code: number | null) => { - cleanup(); - reject( - new Error( - windowsCaptureOutputBuffer.trim() || - `Native Windows capture exited before recording started (code ${code ?? "unknown"})`, - ), - ); - }; - - const cleanup = () => { - clearTimeout(timer); - proc.stdout.off("data", onStdout); - proc.off("error", onError); - proc.off("exit", onExit); - }; - - proc.stdout.on("data", onStdout); - proc.once("error", onError); - proc.once("exit", onExit); - }); -} - -function waitForWindowsCaptureStop(proc: ChildProcessWithoutNullStreams) { - return new Promise((resolve, reject) => { - const onClose = (code: number | null) => { - cleanup(); - const match = windowsCaptureOutputBuffer.match(/Recording stopped\. Output path: (.+)/); - if (match?.[1]) { - resolve(match[1].trim()); - return; - } - if (code === 0 && windowsCaptureTargetPath) { - resolve(windowsCaptureTargetPath); - return; - } - reject( - new Error( - windowsCaptureOutputBuffer.trim() || - `Native Windows capture exited with code ${code ?? "unknown"}`, - ), - ); - }; - - const onError = (error: Error) => { - cleanup(); - reject(error); - }; - - const cleanup = () => { - proc.off("close", onClose); - proc.off("error", onError); - }; - - proc.once("close", onClose); - proc.once("error", onError); - }); -} - -function attachWindowsCaptureLifecycle(proc: ChildProcessWithoutNullStreams) { - proc.once("close", () => { - const wasActive = windowsNativeCaptureActive; - windowsCaptureProcess = null; - - if (!wasActive || windowsCaptureStopRequested) { - return; - } - - windowsNativeCaptureActive = false; - windowsCaptureTargetPath = null; - windowsCaptureStopRequested = false; - - const sourceName = selectedSource?.name ?? "Screen"; - BrowserWindow.getAllWindows().forEach((window) => { - if (!window.isDestroyed()) { - window.webContents.send("recording-state-changed", { - recording: false, - sourceName, - }); - } - }); - - emitRecordingInterrupted("capture-stopped", "Recording stopped unexpectedly."); - }); -} - -async function muxNativeWindowsVideoWithAudio( - videoPath: string, - systemAudioPath: string | null, - micAudioPath: string | null, - pauseSegments: PauseSegment[] = [], -) { - const ffmpegPath = getFfmpegBinaryPath(); - const inputs: string[] = ["-i", videoPath]; - const audioInputs: string[] = []; - const audioFilePaths: string[] = []; - - for (const [label, audioPath] of [ - ["system", systemAudioPath], - ["mic", micAudioPath], - ] as const) { - if (!audioPath) continue; - try { - const stat = await fs.stat(audioPath); - if (stat.size <= 0) { - console.warn(`[mux-win] Skipping ${label} audio: file is empty (${audioPath})`); - await fs.rm(audioPath, { force: true }).catch(() => undefined); - continue; - } - inputs.push("-i", audioPath); - audioInputs.push(label); - audioFilePaths.push(audioPath); - } catch { - console.warn(`[mux-win] Skipping ${label} audio: file not accessible (${audioPath})`); - } - } - - if (audioInputs.length === 0) return; - - // Match each audio track to the captured video duration. - // Small duration deltas are more often clock drift than a true late start, - // so prefer tempo correction there and reserve leading silence for larger gaps. - const videoDuration = await probeMediaDurationSeconds(videoPath); - const audioAdjustments: Map = new Map(); - - if (videoDuration > 0) { - for (let i = 0; i < audioFilePaths.length; i++) { - const audioDuration = await probeMediaDurationSeconds(audioFilePaths[i]); - const adjustment = getAudioSyncAdjustment(videoDuration, audioDuration); - audioAdjustments.set(audioInputs[i], adjustment); - if (adjustment.mode === "tempo") { - console.log( - `[mux-win] ${audioInputs[i]} audio differs from video by ${adjustment.durationDeltaMs}ms — applying tempo ratio ${adjustment.tempoRatio.toFixed(6)}`, - ); - } else if (adjustment.mode === "delay" && adjustment.delayMs > 0) { - console.log( - `[mux-win] ${audioInputs[i]} audio appears to start late by ${adjustment.delayMs}ms — adding leading silence`, - ); - } - } - } - - const mixedOutputPath = `${videoPath}.muxed.mp4`; - const normalizedPauseSegments = normalizePauseSegments(pauseSegments); - const systemAdjustment = audioAdjustments.get("system") ?? { - mode: "none", - delayMs: 0, - tempoRatio: 1, - durationDeltaMs: 0, - }; - const micAdjustment = audioAdjustments.get("mic") ?? { - mode: "none", - delayMs: 0, - tempoRatio: 1, - durationDeltaMs: 0, - }; - - if (audioInputs.length === 2) { - // Both system + mic audio: mix them - const filterParts: string[] = []; - const systemPauseFilter = buildPausedAudioFilter( - "1:a", - "system_trimmed", - normalizedPauseSegments, - ); - const micPauseFilter = buildPausedAudioFilter( - "2:a", - "mic_trimmed", - normalizedPauseSegments, - ); - - if (systemPauseFilter) { - filterParts.push(systemPauseFilter); - } - if (micPauseFilter) { - filterParts.push(micPauseFilter); - } - - const systemLabel = systemPauseFilter ? "[system_trimmed]" : "[1:a]"; - const micLabel = micPauseFilter ? "[mic_trimmed]" : "[2:a]"; - - appendSyncedAudioFilter(filterParts, systemLabel, "s", systemAdjustment); - appendSyncedAudioFilter(filterParts, micLabel, "m", micAdjustment); - filterParts.push("[s][m]amix=inputs=2:duration=longest:normalize=0[aout]"); - - await execFileAsync( - ffmpegPath, - [ - "-y", - ...inputs, - "-filter_complex", - filterParts.join(";"), - "-map", - "0:v:0", - "-map", - "[aout]", - "-c:v", - "copy", - "-c:a", - "aac", - "-b:a", - "192k", - "-shortest", - mixedOutputPath, - ], - { timeout: 120000, maxBuffer: 10 * 1024 * 1024 }, - ); - } else { - // Single audio track - const pauseFilter = buildPausedAudioFilter("1:a", "trimmed_audio", normalizedPauseSegments); - const singleAdjustment = audioAdjustments.get(audioInputs[0]) ?? { - mode: "none", - delayMs: 0, - tempoRatio: 1, - durationDeltaMs: 0, - }; - - if (pauseFilter || singleAdjustment.mode !== "none") { - const filterParts: string[] = []; - if (pauseFilter) { - filterParts.push(pauseFilter); - } - const srcLabel = pauseFilter ? "[trimmed_audio]" : "[1:a]"; - appendSyncedAudioFilter(filterParts, srcLabel, "aout", singleAdjustment); - - await execFileAsync( - ffmpegPath, - [ - "-y", - ...inputs, - "-filter_complex", - filterParts.join(";"), - "-map", - "0:v:0", - "-map", - "[aout]", - "-c:v", - "copy", - "-c:a", - "aac", - "-b:a", - "192k", - "-shortest", - mixedOutputPath, - ], - { timeout: 120000, maxBuffer: 10 * 1024 * 1024 }, - ); - } else { - await execFileAsync( - ffmpegPath, - [ - "-y", - ...inputs, - "-map", - "0:v:0", - "-map", - "1:a:0", - "-c:v", - "copy", - "-c:a", - "aac", - "-b:a", - "192k", - "-shortest", - mixedOutputPath, - ], - { timeout: 120000, maxBuffer: 10 * 1024 * 1024 }, - ); - } - } - - await moveFileWithOverwrite(mixedOutputPath, videoPath); - - // Clean up audio files - for (const audioPath of [systemAudioPath, micAudioPath]) { - if (audioPath) { - await fs.rm(audioPath, { force: true }).catch(() => undefined); - } - } -} - -function normalizePauseSegments(pauseSegments: PauseSegment[] | undefined): PauseSegment[] { - if (!Array.isArray(pauseSegments) || pauseSegments.length === 0) { - return []; - } - - const normalized = pauseSegments - .map((segment) => { - const startMs = Number(segment?.startMs); - const endMs = Number(segment?.endMs); - - if (!Number.isFinite(startMs) || !Number.isFinite(endMs)) { - return null; - } - - const clampedStart = Math.max(0, Math.round(startMs)); - const clampedEnd = Math.max(0, Math.round(endMs)); - if (clampedEnd <= clampedStart) { - return null; - } - - return { startMs: clampedStart, endMs: clampedEnd }; - }) - .filter((segment): segment is PauseSegment => !!segment) - .sort((left, right) => left.startMs - right.startMs); - - if (normalized.length <= 1) { - return normalized; - } - - const merged: PauseSegment[] = [{ ...normalized[0] }]; - - for (const segment of normalized.slice(1)) { - const previous = merged[merged.length - 1]; - if (segment.startMs <= previous.endMs) { - previous.endMs = Math.max(previous.endMs, segment.endMs); - } else { - merged.push({ ...segment }); - } - } - - return merged; -} - -function formatFfmpegSeconds(milliseconds: number) { - return (milliseconds / 1000).toFixed(3); -} - -function buildPausedAudioFilter( - inputLabel: string, - outputLabel: string, - pauseSegments: PauseSegment[], -) { - if (pauseSegments.length === 0) { - return null; - } - - const activeSegments: Array<{ startMs: number; endMs?: number }> = []; - let cursorMs = 0; - - for (const pauseSegment of pauseSegments) { - if (pauseSegment.startMs > cursorMs) { - activeSegments.push({ startMs: cursorMs, endMs: pauseSegment.startMs }); - } - cursorMs = Math.max(cursorMs, pauseSegment.endMs); - } - - activeSegments.push({ startMs: cursorMs }); - - const filterParts: string[] = []; - const segmentLabels: string[] = []; - - activeSegments.forEach((segment, index) => { - if (typeof segment.endMs === "number" && segment.endMs <= segment.startMs) { - return; - } - - const segmentLabel = `${outputLabel}_part${index}`; - const trimArgs = - typeof segment.endMs === "number" - ? `start=${formatFfmpegSeconds(segment.startMs)}:end=${formatFfmpegSeconds(segment.endMs)}` - : `start=${formatFfmpegSeconds(segment.startMs)}`; - - filterParts.push(`[${inputLabel}]atrim=${trimArgs},asetpts=PTS-STARTPTS[${segmentLabel}]`); - segmentLabels.push(`[${segmentLabel}]`); - }); - - if (segmentLabels.length === 0) { - return null; - } - - if (segmentLabels.length === 1) { - filterParts.push(`${segmentLabels[0]}anull[${outputLabel}]`); - } else { - filterParts.push( - `${segmentLabels.join("")}concat=n=${segmentLabels.length}:v=0:a=1[${outputLabel}]`, - ); - } - - return filterParts.join(";"); -} - -function waitForNativeCaptureStart(process: ChildProcessWithoutNullStreams) { - return new Promise((resolve, reject) => { - const timer = setTimeout(() => { - cleanup(); - reject(new Error("Timed out waiting for ScreenCaptureKit recorder to start")); - }, 12000); - - // Only check for the start pattern — the start handler already - // appends stdout/stderr to nativeCaptureOutputBuffer - const onStdout = (chunk: Buffer) => { - const text = chunk.toString(); - if (text.includes("Recording started")) { - cleanup(); - resolve(); - } - }; - - const onError = (error: Error) => { - cleanup(); - reject(error); - }; - - const onExit = (code: number | null) => { - cleanup(); - reject( - new Error( - nativeCaptureOutputBuffer.trim() || - `Native capture helper exited before recording started (code ${code ?? "unknown"})`, - ), - ); - }; - - const cleanup = () => { - clearTimeout(timer); - process.stdout.off("data", onStdout); - process.off("error", onError); - process.off("exit", onExit); - }; - - process.stdout.on("data", onStdout); - process.once("error", onError); - process.once("exit", onExit); - }); -} - -function waitForNativeCaptureStop(process: ChildProcessWithoutNullStreams) { - return new Promise((resolve, reject) => { - const onClose = (code: number | null) => { - cleanup(); - const match = nativeCaptureOutputBuffer.match(/Recording stopped\. Output path: (.+)/); - if (match?.[1]) { - resolve(match[1].trim()); - return; - } - // Fallback: if exit code was 0 and we know the target path, try to use it - if (code === 0 && nativeCaptureTargetPath) { - resolve(nativeCaptureTargetPath); - return; - } - reject( - new Error( - nativeCaptureOutputBuffer.trim() || - `Native capture helper exited with code ${code ?? "unknown"}`, - ), - ); - }; - - const onError = (error: Error) => { - cleanup(); - reject(error); - }; - - const cleanup = () => { - process.off("close", onClose); - process.off("error", onError); - }; - - process.once("close", onClose); - process.once("error", onError); - }); -} - -async function muxNativeMacRecordingWithAudio( - videoPath: string, - systemAudioPath?: string | null, - microphonePath?: string | null, -) { - const ffmpegPath = getFfmpegBinaryPath(); - const mixedOutputPath = `${videoPath}.mixed.mp4`; - - const inputs = ["-i", videoPath]; - const availableAudioInputs: string[] = []; - const audioFilePaths: string[] = []; - - for (const [label, audioPath] of [ - ["system", systemAudioPath], - ["microphone", microphonePath], - ] as const) { - if (!audioPath) continue; - try { - const stat = await fs.stat(audioPath); - if (stat.size <= 0) { - console.warn(`[mux] Skipping ${label} audio: file is empty (${audioPath})`); - await fs.rm(audioPath, { force: true }).catch(() => undefined); - continue; - } - inputs.push("-i", audioPath); - availableAudioInputs.push(label); - audioFilePaths.push(audioPath); - } catch { - console.warn(`[mux] Skipping ${label} audio: file not accessible (${audioPath})`); - } - } - - if (availableAudioInputs.length === 0) { - console.warn("[mux] No valid audio files to mux"); - return; - } - - // Match each audio track to the captured video duration. - const videoDuration = await probeMediaDurationSeconds(videoPath); - const audioAdjustments: Map = new Map(); - - if (videoDuration > 0) { - for (let i = 0; i < audioFilePaths.length; i++) { - const audioDuration = await probeMediaDurationSeconds(audioFilePaths[i]); - const adjustment = getAudioSyncAdjustment(videoDuration, audioDuration); - audioAdjustments.set(availableAudioInputs[i], adjustment); - if (adjustment.mode === "tempo") { - console.log( - `[mux] ${availableAudioInputs[i]} audio differs from video by ${adjustment.durationDeltaMs}ms — applying tempo ratio ${adjustment.tempoRatio.toFixed(6)}`, - ); - } else if (adjustment.mode === "delay" && adjustment.delayMs > 0) { - console.log( - `[mux] ${availableAudioInputs[i]} audio appears to start late by ${adjustment.delayMs}ms — adding leading silence`, - ); - } - } - } - - const systemAdjustment = audioAdjustments.get("system") ?? { - mode: "none", - delayMs: 0, - tempoRatio: 1, - durationDeltaMs: 0, - }; - const micAdjustment = audioAdjustments.get("microphone") ?? { - mode: "none", - delayMs: 0, - tempoRatio: 1, - durationDeltaMs: 0, - }; - const needsFilter = systemAdjustment.mode !== "none" || micAdjustment.mode !== "none"; - - let args: string[]; - if (availableAudioInputs.length === 2) { - if (needsFilter) { - const filterParts: string[] = []; - appendSyncedAudioFilter(filterParts, "[1:a]", "s", systemAdjustment); - appendSyncedAudioFilter(filterParts, "[2:a]", "m", micAdjustment); - filterParts.push("[s][m]amix=inputs=2:duration=longest:normalize=0[aout]"); - args = [ - "-y", - ...inputs, - "-filter_complex", - filterParts.join(";"), - "-map", - "0:v:0", - "-map", - "[aout]", - "-c:v", - "copy", - "-c:a", - "aac", - "-b:a", - "192k", - "-shortest", - mixedOutputPath, - ]; - } else { - args = [ - "-y", - ...inputs, - "-filter_complex", - "[1:a][2:a]amix=inputs=2:duration=longest:normalize=0[aout]", - "-map", - "0:v:0", - "-map", - "[aout]", - "-c:v", - "copy", - "-c:a", - "aac", - "-b:a", - "192k", - "-shortest", - mixedOutputPath, - ]; - } - } else { - const singleAdjustment = audioAdjustments.get(availableAudioInputs[0]) ?? { - mode: "none", - delayMs: 0, - tempoRatio: 1, - durationDeltaMs: 0, - }; - if (singleAdjustment.mode !== "none") { - const filterParts: string[] = []; - appendSyncedAudioFilter(filterParts, "[1:a]", "aout", singleAdjustment); - args = [ - "-y", - ...inputs, - "-filter_complex", - filterParts.join(";"), - "-map", - "0:v:0", - "-map", - "[aout]", - "-c:v", - "copy", - "-c:a", - "aac", - "-b:a", - "192k", - "-shortest", - mixedOutputPath, - ]; - } else { - args = [ - "-y", - ...inputs, - "-map", - "0:v:0", - "-map", - "1:a:0", - "-c:v", - "copy", - "-c:a", - "aac", - "-b:a", - "192k", - "-shortest", - mixedOutputPath, - ]; - } - } - - console.log("[mux] Running ffmpeg:", ffmpegPath, args.join(" ")); - - try { - await execFileAsync(ffmpegPath, args, { timeout: 120000, maxBuffer: 10 * 1024 * 1024 }); - } catch (error) { - const execError = error as NodeJS.ErrnoException & { stderr?: string }; - console.error("[mux] ffmpeg failed:", execError.stderr || execError.message); - throw error; - } - - await moveFileWithOverwrite(mixedOutputPath, videoPath); - console.log("[mux] Successfully muxed audio into video:", videoPath); - - for (const audioPath of [systemAudioPath, microphonePath]) { - if (audioPath) { - await fs.rm(audioPath, { force: true }).catch(() => undefined); - } - } -} - -function emitRecordingInterrupted(reason: string, message: string) { - BrowserWindow.getAllWindows().forEach((window) => { - if (!window.isDestroyed()) { - window.webContents.send("recording-interrupted", { reason, message }); - } - }); -} - -function emitCursorStateChanged(cursorType: CursorVisualType) { - BrowserWindow.getAllWindows().forEach((window) => { - if (!window.isDestroyed()) { - window.webContents.send("cursor-state-changed", { cursorType }); - } - }); -} - -function sampleCursorStateChange(cursorType: CursorVisualType) { - if (!isCursorCaptureActive) { - return; - } - - const point = getNormalizedCursorPoint(); - if (!point) { - return; - } - - pushCursorSample(point.cx, point.cy, Date.now() - cursorCaptureStartTimeMs, "move", cursorType); -} - -function attachNativeCaptureLifecycle(process: ChildProcessWithoutNullStreams) { - process.once("close", () => { - const wasActive = nativeScreenRecordingActive; - nativeCaptureProcess = null; - - if (!wasActive || nativeCaptureStopRequested) { - return; - } - - nativeScreenRecordingActive = false; - nativeCaptureTargetPath = null; - nativeCaptureStopRequested = false; - nativeCaptureSystemAudioPath = null; - nativeCaptureMicrophonePath = null; - - const sourceName = selectedSource?.name ?? "Screen"; - BrowserWindow.getAllWindows().forEach((window) => { - if (!window.isDestroyed()) { - window.webContents.send("recording-state-changed", { - recording: false, - sourceName, - }); - } - }); - - const reason = nativeCaptureOutputBuffer.includes("WINDOW_UNAVAILABLE") - ? "window-unavailable" - : "capture-stopped"; - const message = - reason === "window-unavailable" - ? "The selected window is no longer capturable. Please reselect a window." - : "Recording stopped unexpectedly."; - - emitRecordingInterrupted(reason, message); - }); -} +const execFileAsync = promisify(execFile); -async function ensureNativeCursorMonitorBinary() { - await ensureNativeHelperMigration(); - return ensureSwiftHelperBinary( - getNativeCursorMonitorSourcePath(), - getNativeCursorMonitorBinaryPath(), - "native cursor monitor helper", - "recordly-native-cursor-monitor", - ); +function normalizeRecordingTimeOffsetMs(value: unknown): number { + return typeof value === "number" && Number.isFinite(value) ? Math.round(value) : 0; } -function handleCursorMonitorStdout(chunk: Buffer) { - nativeCursorMonitorOutputBuffer += chunk.toString(); - const lines = nativeCursorMonitorOutputBuffer.split(/\r?\n/); - nativeCursorMonitorOutputBuffer = lines.pop() ?? ""; - - for (const line of lines) { - const match = line.match(/^STATE:(.+)$/); - if (!match) continue; - const next = match[1].trim() as CursorVisualType; - if ( - next === "arrow" || - next === "text" || - next === "pointer" || - next === "crosshair" || - next === "open-hand" || - next === "closed-hand" || - next === "resize-ew" || - next === "resize-ns" || - next === "not-allowed" - ) { - if (currentCursorVisualType !== next) { - currentCursorVisualType = next; - sampleCursorStateChange(next); - emitCursorStateChanged(next); - } +function broadcastSelectedSourceChange() { + for (const window of BrowserWindow.getAllWindows()) { + if (!window.isDestroyed()) { + window.webContents.send("selected-source-changed", selectedSource); } } } -async function startNativeCursorMonitor() { - stopNativeCursorMonitor(); - if (process.platform !== "darwin" && process.platform !== "win32") { - currentCursorVisualType = "arrow"; - return; - } - - try { - let helperPath: string; - if (process.platform === "win32") { - helperPath = getCursorMonitorExePath(); - try { - // Use F_OK on Windows — X_OK is meaningless and can give false positives - await fs.access(helperPath, fsConstants.F_OK); - } catch { - console.warn("Windows cursor monitor helper missing:", helperPath); - currentCursorVisualType = "arrow"; - return; - } - } else { - helperPath = await ensureNativeCursorMonitorBinary(); - } +/** Returns the currently selected source ID for setDisplayMediaRequestHandler */ - nativeCursorMonitorOutputBuffer = ""; - currentCursorVisualType = "arrow"; +export function getSelectedSourceId(): string | null { + return (selectedSource?.id as string | null) ?? null; +} +export function killWindowsCaptureProcess() { + if (windowsCaptureProcess) { try { - nativeCursorMonitorProcess = spawn(helperPath, [], { - stdio: ["pipe", "pipe", "pipe"], - }); - } catch (spawnError) { - console.warn("Failed to spawn cursor monitor:", spawnError); - nativeCursorMonitorProcess = null; - currentCursorVisualType = "arrow"; - return; + windowsCaptureProcess.kill(); + } catch { + /* ignore */ } - - nativeCursorMonitorProcess.once("error", (error) => { - console.warn("Native cursor monitor process error:", error); - nativeCursorMonitorProcess = null; - nativeCursorMonitorOutputBuffer = ""; - currentCursorVisualType = "arrow"; - }); - - nativeCursorMonitorProcess.stdout.on("data", handleCursorMonitorStdout); - - nativeCursorMonitorProcess.once("close", () => { - nativeCursorMonitorProcess = null; - nativeCursorMonitorOutputBuffer = ""; - currentCursorVisualType = "arrow"; - }); - } catch (error) { - console.warn("Failed to start native cursor monitor:", error); - nativeCursorMonitorProcess = null; - nativeCursorMonitorOutputBuffer = ""; - currentCursorVisualType = "arrow"; - } -} - -function stopNativeCursorMonitor() { - currentCursorVisualType = "arrow"; - - if (!nativeCursorMonitorProcess) { - return; - } - - try { - nativeCursorMonitorProcess.stdin.write("stop\n"); - } catch { - // ignore stop signal issues - } - try { - nativeCursorMonitorProcess.kill(); - } catch { - // ignore kill issues + setWindowsCaptureProcess(null); + setWindowsCaptureTargetPath(null); + setWindowsNativeCaptureActive(false); + setNativeScreenRecordingActive(false); + setWindowsCaptureStopRequested(false); + setWindowsCapturePaused(false); + setWindowsSystemAudioPath(null); + setWindowsMicAudioPath(null); + setWindowsPendingVideoPath(null); } - - nativeCursorMonitorProcess = null; - nativeCursorMonitorOutputBuffer = ""; } -async function moveFileWithOverwrite(sourcePath: string, destinationPath: string) { - await fs.mkdir(path.dirname(destinationPath), { recursive: true }); - await fs.rm(destinationPath, { force: true }); - - try { - await fs.rename(sourcePath, destinationPath); - } catch (error) { - const nodeError = error as NodeJS.ErrnoException; - if (nodeError.code !== "EXDEV") { - throw error; - } - - await fs.copyFile(sourcePath, destinationPath); - await fs.unlink(sourcePath); - } +function normalizeDesktopSourceName(value: string) { + return value.trim().replace(/\s+/g, " ").toLowerCase(); } -function isTrustedProjectPath(filePath?: string | null) { - if (!filePath || !currentProjectPath) { +function hasUsableSourceThumbnail( + thumbnail: + | { + isEmpty: () => boolean; + getSize: () => { width: number; height: number }; + } + | null + | undefined, +) { + if (!thumbnail || thumbnail.isEmpty()) { return false; } - return normalizePath(filePath) === normalizePath(currentProjectPath); -} - -const CURSOR_TELEMETRY_VERSION = 2; -const CURSOR_SAMPLE_INTERVAL_MS = 33; -const MAX_CURSOR_SAMPLES = 60 * 60 * 30; // 1 hour @ 30Hz - -type CursorInteractionType = - | "move" - | "click" - | "double-click" - | "right-click" - | "middle-click" - | "mouseup"; - -interface CursorTelemetryPoint { - timeMs: number; - cx: number; - cy: number; - interactionType?: CursorInteractionType; - cursorType?: CursorVisualType; -} - -let cursorCaptureInterval: NodeJS.Timeout | null = null; -let cursorCaptureStartTimeMs = 0; -let activeCursorSamples: CursorTelemetryPoint[] = []; -let pendingCursorSamples: CursorTelemetryPoint[] = []; -let isCursorCaptureActive = false; -let interactionCaptureCleanup: (() => void) | null = null; -let hasLoggedInteractionHookFailure = false; -let lastLeftClick: { timeMs: number; cx: number; cy: number } | null = null; -let linuxCursorScreenPoint: { x: number; y: number; updatedAt: number } | null = null; -let selectedWindowBounds: WindowBounds | null = null; -let windowBoundsCaptureInterval: NodeJS.Timeout | null = null; - -function normalizeHookMouseButton(rawButton: unknown): 1 | 2 | 3 { - if (typeof rawButton !== "number" || !Number.isFinite(rawButton)) { - return 1; - } - - // uiohook/libuiohook button codes are typically 1/2/3. Some wrappers may - // expose alternate constants depending on platform/runtime. - if (rawButton === 2 || rawButton === 39) { - return 2; - } - - if (rawButton === 3 || rawButton === 38) { - return 3; - } - - return 1; -} - -function getHookMouseButton(event: HookMouseEvent | null | undefined): 1 | 2 | 3 { - return normalizeHookMouseButton( - event?.button ?? event?.mouseButton ?? event?.data?.button ?? event?.data?.mouseButton, - ); -} - -function clamp(value: number, min: number, max: number) { - return Math.min(max, Math.max(min, value)); -} - -function stopCursorCapture() { - if (cursorCaptureInterval) { - clearInterval(cursorCaptureInterval); - cursorCaptureInterval = null; - } -} -function stopInteractionCapture() { - if (interactionCaptureCleanup) { - interactionCaptureCleanup(); - interactionCaptureCleanup = null; - } + const size = thumbnail.getSize(); + return size.width > 1 && size.height > 1; } -function stopWindowBoundsCapture() { - if (windowBoundsCaptureInterval) { - clearInterval(windowBoundsCaptureInterval); - windowBoundsCaptureInterval = null; - } - selectedWindowBounds = null; +function getMacPrivacySettingsUrl(pane: "screen" | "accessibility" | "microphone") { + if (pane === "screen") + return "x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture"; + if (pane === "microphone") + return "x-apple.systempreferences:com.apple.preference.security?Privacy_Microphone"; + return "x-apple.systempreferences:com.apple.preference.security?Privacy_Accessibility"; } -function getWindowBoundsFromNativeSource( - source?: NativeMacWindowSource | null, -): WindowBounds | null { - if (!source) { - return null; - } - - const { x, y, width, height } = source; - if ( - typeof x !== "number" || - !Number.isFinite(x) || - typeof y !== "number" || - !Number.isFinite(y) || - typeof width !== "number" || - !Number.isFinite(width) || - typeof height !== "number" || - !Number.isFinite(height) - ) { - return null; - } - - if (width <= 0 || height <= 0) { - return null; - } - - return { x, y, width, height }; -} -async function resolveMacWindowBounds(source: SelectedSource): Promise { - const windowId = parseWindowId(source.id); - if (!windowId) { - return null; +function approveUserPath(filePath: string | null | undefined) { + if (!filePath) { + return; } try { - const nativeSources = await getNativeMacWindowSources({ maxAgeMs: 250 }); - const matchedSource = nativeSources.find((entry) => parseWindowId(entry.id) === windowId); - return getWindowBoundsFromNativeSource(matchedSource); + approvedLocalReadPaths.add(path.resolve(filePath)); } catch { - return null; - } -} - -async function refreshSelectedWindowBounds() { - if (!selectedSource?.id?.startsWith("window:")) { - selectedWindowBounds = null; - return; - } - - let bounds: WindowBounds | null = null; - - if (process.platform === "darwin") { - bounds = await resolveMacWindowBounds(selectedSource); - } else if (process.platform === "win32") { - bounds = await resolveWindowsWindowBounds(selectedSource); - } else if (process.platform === "linux") { - bounds = await resolveLinuxWindowBounds(selectedSource); - } - - selectedWindowBounds = bounds; -} - -function startWindowBoundsCapture() { - stopWindowBoundsCapture(); - - if ( - !["darwin", "win32", "linux"].includes(process.platform) || - !selectedSource?.id?.startsWith("window:") - ) { - return; + // Ignore invalid paths; later reads will surface the underlying error. } - - void refreshSelectedWindowBounds(); - windowBoundsCaptureInterval = setInterval(() => { - void refreshSelectedWindowBounds(); - }, 250); } -function getNormalizedCursorPoint() { - const fallbackCursor = getScreen().getCursorScreenPoint(); - const linuxCursorCache = process.platform === "linux" ? linuxCursorScreenPoint : null; - const isLinuxCacheFresh = !!linuxCursorCache && Date.now() - linuxCursorCache.updatedAt <= 1000; - - // On Windows/Linux, platform APIs (iohook, GetWindowRect, xwininfo) return - // physical pixel coordinates, while Electron's getCursorScreenPoint() and - // display.bounds return DIP (logical) coordinates. Apply a DPI correction - // so all values are in the same coordinate space before normalizing. - // Use the display containing the window (or cursor) rather than the primary - // display so multi-monitor setups with different DPI scales work correctly. - const primarySf = - process.platform !== "darwin" ? getScreen().getPrimaryDisplay().scaleFactor || 1 : 1; - - const cursor = isLinuxCacheFresh - ? { x: linuxCursorCache.x / primarySf, y: linuxCursorCache.y / primarySf } - : fallbackCursor; - - const windowBounds = selectedSource?.id?.startsWith("window:") ? selectedWindowBounds : null; - if (windowBounds) { - // Resolve the scale factor for the display that contains the target window - // centre point, falling back to the primary display scale factor. - const sf = - process.platform !== "darwin" - ? getScreen().getDisplayNearestPoint({ - x: windowBounds.x / primarySf, - y: windowBounds.y / primarySf, - }).scaleFactor || 1 - : 1; - const width = Math.max(1, windowBounds.width / sf); - const height = Math.max(1, windowBounds.height / sf); - - return { - cx: clamp((cursor.x - windowBounds.x / sf) / width, 0, 1), - cy: clamp((cursor.y - windowBounds.y / sf) / height, 0, 1), - }; +async function getSystemCursorAssets() { + if (process.platform !== "darwin") { + setCachedSystemCursorAssets({}); + setCachedSystemCursorAssetsSourceMtimeMs(null); + return cachedSystemCursorAssets ?? {}; } - const sourceDisplayId = Number(selectedSource?.display_id); - const sourceDisplay = Number.isFinite(sourceDisplayId) - ? (getScreen() - .getAllDisplays() - .find((display) => display.id === sourceDisplayId) ?? null) - : null; - const display = sourceDisplay ?? getScreen().getDisplayNearestPoint(cursor); - const bounds = display.bounds; - const width = Math.max(1, bounds.width); - const height = Math.max(1, bounds.height); - - const cx = clamp((cursor.x - bounds.x) / width, 0, 1); - const cy = clamp((cursor.y - bounds.y) / height, 0, 1); - return { cx, cy }; -} - -function getHookCursorScreenPoint( - event: HookMouseEvent | null | undefined, -): { x: number; y: number } | null { - const rawX = event?.x ?? event?.data?.x ?? event?.screenX ?? event?.data?.screenX; - const rawY = event?.y ?? event?.data?.y ?? event?.screenY ?? event?.data?.screenY; - - if ( - typeof rawX !== "number" || - !Number.isFinite(rawX) || - typeof rawY !== "number" || - !Number.isFinite(rawY) - ) { - return null; + const sourcePath = getSystemCursorHelperSourcePath(); + const sourceStat = await fs.stat(sourcePath); + if (cachedSystemCursorAssets && cachedSystemCursorAssetsSourceMtimeMs === sourceStat.mtimeMs) { + return cachedSystemCursorAssets; } - return { x: rawX, y: rawY }; -} + const binaryPath = await ensureSwiftHelperBinary( + sourcePath, + getSystemCursorHelperBinaryPath(), + "system cursor helper", + "recordly-system-cursors", + ); -function pushCursorSample( - cx: number, - cy: number, - timeMs: number, - interactionType: CursorInteractionType = "move", - cursorType?: CursorVisualType, -) { - activeCursorSamples.push({ - timeMs: Math.max(0, timeMs), - cx, - cy, - interactionType, - cursorType: cursorType ?? currentCursorVisualType, + const { stdout } = await execFileAsync(binaryPath, [], { + timeout: 15000, + maxBuffer: 20 * 1024 * 1024, }); + const parsed = JSON.parse(stdout) as Record>; + const result = Object.fromEntries( + Object.entries(parsed).filter( + ([, asset]) => + typeof asset?.dataUrl === "string" && + typeof asset?.hotspotX === "number" && + typeof asset?.hotspotY === "number" && + typeof asset?.width === "number" && + typeof asset?.height === "number", + ), + ) as Record; + setCachedSystemCursorAssets(result); + setCachedSystemCursorAssetsSourceMtimeMs(sourceStat.mtimeMs); - if (activeCursorSamples.length > MAX_CURSOR_SAMPLES) { - activeCursorSamples.shift(); - } -} - -function sampleCursorPoint() { - const point = getNormalizedCursorPoint(); - if (!point) { - return; - } - - pushCursorSample(point.cx, point.cy, Date.now() - cursorCaptureStartTimeMs, "move"); -} - -async function persistPendingCursorTelemetry(videoPath: string) { - const telemetryPath = getTelemetryPathForVideo(videoPath); - if (pendingCursorSamples.length > 0) { - await fs.writeFile( - telemetryPath, - JSON.stringify( - { version: CURSOR_TELEMETRY_VERSION, samples: pendingCursorSamples }, - null, - 2, - ), - "utf-8", - ); - } - pendingCursorSamples = []; -} - -function snapshotCursorTelemetryForPersistence() { - if (activeCursorSamples.length === 0) { - return; - } - - if (pendingCursorSamples.length === 0) { - pendingCursorSamples = [...activeCursorSamples]; - return; - } - - const lastPendingTimeMs = pendingCursorSamples[pendingCursorSamples.length - 1]?.timeMs ?? -1; - pendingCursorSamples = [ - ...pendingCursorSamples, - ...activeCursorSamples.filter((sample) => sample.timeMs > lastPendingTimeMs), - ]; -} - -async function finalizeStoredVideo(videoPath: string) { - // Safety net: if companion audio files still exist, the mux was skipped — attempt it now - if (videoPath.endsWith(".mp4")) { - const companionCandidates = await getUsableCompanionAudioCandidates(videoPath); - for (const { systemPath, micPath, platform } of companionCandidates) { - if (platform === "mac" || platform === "win") { - console.log( - `[finalize] Detected un-muxed ${platform} audio files alongside video — attempting safety-net mux`, - ); - try { - if (platform === "win") { - await muxNativeWindowsVideoWithAudio(videoPath, systemPath, micPath); - } else { - await muxNativeMacRecordingWithAudio(videoPath, systemPath, micPath); - } - console.log("[finalize] Safety-net mux completed successfully"); - } catch (error) { - console.warn("[finalize] Safety-net mux failed:", error); - } - break; - } - } - } - - let validation: { fileSizeBytes: number; durationSeconds: number | null } | null = null; - try { - validation = await validateRecordedVideo(videoPath); - } catch (error) { - console.warn("Video validation failed (proceeding anyway):", error); - } - - snapshotCursorTelemetryForPersistence(); - currentVideoPath = videoPath; - currentProjectPath = null; - await persistPendingCursorTelemetry(videoPath); - if (isAutoRecordingPath(videoPath)) { - await pruneAutoRecordings([videoPath]); - } - - if (lastNativeCaptureDiagnostics?.backend === "mac-screencapturekit") { - recordNativeCaptureDiagnostics({ - backend: "mac-screencapturekit", - phase: "stop", - sourceId: lastNativeCaptureDiagnostics.sourceId ?? null, - sourceType: lastNativeCaptureDiagnostics.sourceType ?? "unknown", - displayId: lastNativeCaptureDiagnostics.displayId ?? null, - displayBounds: lastNativeCaptureDiagnostics.displayBounds ?? null, - windowHandle: lastNativeCaptureDiagnostics.windowHandle ?? null, - helperPath: lastNativeCaptureDiagnostics.helperPath ?? null, - outputPath: videoPath, - systemAudioPath: lastNativeCaptureDiagnostics.systemAudioPath ?? null, - microphonePath: lastNativeCaptureDiagnostics.microphonePath ?? null, - osRelease: lastNativeCaptureDiagnostics.osRelease, - supported: lastNativeCaptureDiagnostics.supported, - helperExists: lastNativeCaptureDiagnostics.helperExists, - processOutput: lastNativeCaptureDiagnostics.processOutput, - fileSizeBytes: validation?.fileSizeBytes ?? null, - }); - } - - return { - success: true, - path: videoPath, - message: - validation?.durationSeconds !== null && validation !== null - ? `Video stored successfully (${validation.fileSizeBytes} bytes, ${validation.durationSeconds.toFixed(2)}s)` - : `Video stored successfully`, - }; -} - -async function recoverNativeMacCaptureOutput() { - const macDiagnostics = - lastNativeCaptureDiagnostics?.backend === "mac-screencapturekit" - ? lastNativeCaptureDiagnostics - : null; - const diagnosticsPath = macDiagnostics?.outputPath ?? null; - const candidatePath = nativeCaptureTargetPath ?? diagnosticsPath; - const systemAudioPath = nativeCaptureSystemAudioPath ?? macDiagnostics?.systemAudioPath ?? null; - const microphonePath = nativeCaptureMicrophonePath ?? macDiagnostics?.microphonePath ?? null; - - if (!candidatePath) { - return null; - } - - try { - if (systemAudioPath || microphonePath) { - try { - await muxNativeMacRecordingWithAudio( - candidatePath, - systemAudioPath, - microphonePath, - ); - } catch (muxError) { - console.warn("Failed to mux audio during recovery:", muxError); - } - } - - return await finalizeStoredVideo(candidatePath); - } catch (error) { - recordNativeCaptureDiagnostics({ - backend: "mac-screencapturekit", - phase: "stop", - outputPath: candidatePath, - systemAudioPath, - microphonePath, - processOutput: nativeCaptureOutputBuffer.trim() || undefined, - fileSizeBytes: await getFileSizeIfPresent(candidatePath), - error: String(error), - }); - return null; - } + return result; } -async function startInteractionCapture() { - if (!isCursorCaptureActive) { - return; - } - - if (!["darwin", "win32", "linux"].includes(process.platform)) { - return; - } - - try { - const hook = loadUiohookModule(); - console.log( - "[CursorTelemetry] hook loaded:", - !!hook, - "has.on:", - typeof hook?.on, - "has.start:", - typeof hook?.start, - ); - if (!isCursorCaptureActive) { - return; - } - - if (!hook || typeof hook.on !== "function" || typeof hook.start !== "function") { - console.log("[CursorTelemetry] hook unusable — aborting interaction capture"); - return; - } - - const onMouseDown = (event: HookMouseEvent) => { - if (!isCursorCaptureActive) { - return; - } - - const point = getNormalizedCursorPoint(); - if (!point) { - return; - } - - const timeMs = Date.now() - cursorCaptureStartTimeMs; - const button = getHookMouseButton(event); - let interactionType: CursorInteractionType = "click"; - - if (button === 2) { - interactionType = "right-click"; - } else if (button === 3) { - interactionType = "middle-click"; - } else { - const thresholdMs = 350; - const distance = lastLeftClick - ? Math.hypot(point.cx - lastLeftClick.cx, point.cy - lastLeftClick.cy) - : Number.POSITIVE_INFINITY; - - if ( - lastLeftClick && - timeMs - lastLeftClick.timeMs <= thresholdMs && - distance <= 0.04 - ) { - interactionType = "double-click"; - } - - lastLeftClick = { timeMs, cx: point.cx, cy: point.cy }; - } - - pushCursorSample(point.cx, point.cy, timeMs, interactionType); - }; - - const onMouseUp = () => { - if (!isCursorCaptureActive) { - return; - } - - const point = getNormalizedCursorPoint(); - if (!point) { - return; - } - - const timeMs = Date.now() - cursorCaptureStartTimeMs; - pushCursorSample(point.cx, point.cy, timeMs, "mouseup"); - }; - - const onMouseMove = (event: HookMouseEvent) => { - if (process.platform !== "linux" || !isCursorCaptureActive) { - return; - } - - const point = getHookCursorScreenPoint(event); - if (!point) { - return; - } - - linuxCursorScreenPoint = { x: point.x, y: point.y, updatedAt: Date.now() }; - }; - - hook.on("mousedown", onMouseDown); - hook.on("mouseup", onMouseUp); - hook.on("mousemove", onMouseMove); - - hook.start(); - - interactionCaptureCleanup = () => { - try { - if (typeof hook.off === "function") { - hook.off("mousedown", onMouseDown); - hook.off("mouseup", onMouseUp); - hook.off("mousemove", onMouseMove); - } else if (typeof hook.removeListener === "function") { - hook.removeListener("mousedown", onMouseDown); - hook.removeListener("mouseup", onMouseUp); - hook.removeListener("mousemove", onMouseMove); - } - } catch { - // ignore listener cleanup errors - } - - try { - if (typeof hook.stop === "function") { - hook.stop(); - } - } catch { - // ignore hook shutdown errors - } - }; - } catch (error) { - if (!hasLoggedInteractionHookFailure) { - hasLoggedInteractionHookFailure = true; - console.warn("[CursorTelemetry] Global interaction capture unavailable:", error); - } +function isTrustedProjectPath(filePath?: string | null) { + if (!filePath || !currentProjectPath) { + return false; } + return normalizePath(filePath) === normalizePath(currentProjectPath); } export function registerIpcHandlers( @@ -4575,7 +515,7 @@ export function registerIpcHandlers( }); ipcMain.handle("select-source", (_, source: SelectedSource) => { - selectedSource = source; + setSelectedSource(source); broadcastSelectedSourceChange(); stopWindowBoundsCapture(); const sourceSelectorWin = getSourceSelectorWindow(); @@ -4764,15 +704,16 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} if (windowsCaptureProcess && !windowsNativeCaptureActive) { try { windowsCaptureProcess.kill() } catch { /* ignore */ } - windowsCaptureProcess = null - windowsCaptureTargetPath = null - windowsCaptureStopRequested = false + setWindowsCaptureProcess(null) + setWindowsCaptureTargetPath(null) + setWindowsCaptureStopRequested(false) } if (windowsCaptureProcess) { return { success: false, message: 'A native Windows screen recording is already active.' } } + let wcProc: ChildProcessWithoutNullStreams | null = null try { const exePath = getWindowsCaptureExePath() const recordingsDir = await getRecordingsDir() @@ -4789,7 +730,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} const audioPath = path.join(recordingsDir, `recording-${timestamp}.system.wav`) config.captureSystemAudio = true config.audioOutputPath = audioPath - windowsSystemAudioPath = audioPath + setWindowsSystemAudioPath(audioPath) } if (options?.capturesMicrophone) { @@ -4799,7 +740,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} if (options.microphoneLabel) { config.micDeviceName = options.microphoneLabel } - windowsMicAudioPath = micPath + setWindowsMicAudioPath(micPath) } const windowId = parseWindowId(source?.id) @@ -4835,26 +776,27 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} microphonePath: windowsMicAudioPath, }) - windowsCaptureOutputBuffer = '' - windowsCaptureTargetPath = outputPath - windowsCaptureStopRequested = false - windowsCapturePaused = false - windowsCaptureProcess = spawn(exePath, [JSON.stringify(config)], { + setWindowsCaptureOutputBuffer('') + setWindowsCaptureTargetPath(outputPath) + setWindowsCaptureStopRequested(false) + setWindowsCapturePaused(false) + wcProc = spawn(exePath, [JSON.stringify(config)], { cwd: recordingsDir, stdio: ['pipe', 'pipe', 'pipe'], }) - attachWindowsCaptureLifecycle(windowsCaptureProcess) + setWindowsCaptureProcess(wcProc) + attachWindowsCaptureLifecycle(wcProc) - windowsCaptureProcess.stdout.on('data', (chunk: Buffer) => { - windowsCaptureOutputBuffer += chunk.toString() + wcProc.stdout.on('data', (chunk: Buffer) => { + setWindowsCaptureOutputBuffer(windowsCaptureOutputBuffer + chunk.toString()) }) - windowsCaptureProcess.stderr.on('data', (chunk: Buffer) => { - windowsCaptureOutputBuffer += chunk.toString() + wcProc.stderr.on('data', (chunk: Buffer) => { + setWindowsCaptureOutputBuffer(windowsCaptureOutputBuffer + chunk.toString()) }) - await waitForWindowsCaptureStart(windowsCaptureProcess) - windowsNativeCaptureActive = true - nativeScreenRecordingActive = true + await waitForWindowsCaptureStart(wcProc) + setWindowsNativeCaptureActive(true) + setNativeScreenRecordingActive(true) recordNativeCaptureDiagnostics({ backend: 'windows-wgc', phase: 'start', @@ -4884,13 +826,13 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} error: String(error), }) console.error('Failed to start native Windows capture:', error) - try { windowsCaptureProcess?.kill() } catch { /* ignore */ } - windowsNativeCaptureActive = false - nativeScreenRecordingActive = false - windowsCaptureProcess = null - windowsCaptureTargetPath = null - windowsCaptureStopRequested = false - windowsCapturePaused = false + try { if (wcProc) wcProc.kill() } catch { /* ignore */ } + setWindowsNativeCaptureActive(false) + setNativeScreenRecordingActive(false) + setWindowsCaptureProcess(null) + setWindowsCaptureTargetPath(null) + setWindowsCaptureStopRequested(false) + setWindowsCapturePaused(false) return { success: false, message: 'Failed to start native Windows capture', @@ -4909,15 +851,16 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} } catch { // ignore stale helper cleanup failures } - nativeCaptureProcess = null - nativeCaptureTargetPath = null - nativeCaptureStopRequested = false + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureStopRequested(false) } if (nativeCaptureProcess) { return { success: false, message: 'A native screen recording is already active.' } } + let captProc: ChildProcessWithoutNullStreams | null = null try { const recordingsDir = await getRecordingsDir() @@ -4996,34 +939,35 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} config.displayId = Number(getScreen().getPrimaryDisplay().id) } - nativeCaptureOutputBuffer = '' - nativeCaptureTargetPath = outputPath - nativeCaptureSystemAudioPath = systemAudioOutputPath - nativeCaptureMicrophonePath = microphoneOutputPath - nativeCaptureStopRequested = false - nativeCapturePaused = false - nativeCaptureProcess = spawn(helperPath, [JSON.stringify(config)], { + setNativeCaptureOutputBuffer('') + setNativeCaptureTargetPath(outputPath) + setNativeCaptureSystemAudioPath(systemAudioOutputPath) + setNativeCaptureMicrophonePath(microphoneOutputPath) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) + captProc = spawn(helperPath, [JSON.stringify(config)], { cwd: recordingsDir, stdio: ['pipe', 'pipe', 'pipe'], }) - attachNativeCaptureLifecycle(nativeCaptureProcess) + setNativeCaptureProcess(captProc) + attachNativeCaptureLifecycle(captProc) - nativeCaptureProcess.stdout.on('data', (chunk: Buffer) => { - nativeCaptureOutputBuffer += chunk.toString() + captProc.stdout.on('data', (chunk: Buffer) => { + setNativeCaptureOutputBuffer(nativeCaptureOutputBuffer + chunk.toString()) }) - nativeCaptureProcess.stderr.on('data', (chunk: Buffer) => { - nativeCaptureOutputBuffer += chunk.toString() + captProc.stderr.on('data', (chunk: Buffer) => { + setNativeCaptureOutputBuffer(nativeCaptureOutputBuffer + chunk.toString()) }) - await waitForNativeCaptureStart(nativeCaptureProcess) - nativeScreenRecordingActive = true + await waitForNativeCaptureStart(captProc) + setNativeScreenRecordingActive(true) // If the native helper reported MICROPHONE_CAPTURE_UNAVAILABLE, it started // capture without microphone. Clear the mic path so the renderer can fall // back to a browser-side sidecar recording for the microphone track. const micUnavailableNatively = nativeCaptureOutputBuffer.includes('MICROPHONE_CAPTURE_UNAVAILABLE') if (micUnavailableNatively) { - nativeCaptureMicrophonePath = null + setNativeCaptureMicrophonePath(null) } recordNativeCaptureDiagnostics({ @@ -5057,14 +1001,14 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} if (response === 0) { await shell.openExternal(getMacPrivacySettingsUrl('screen')) } - try { nativeCaptureProcess?.kill() } catch { /* ignore */ } - nativeScreenRecordingActive = false - nativeCaptureProcess = null - nativeCaptureTargetPath = null - nativeCaptureSystemAudioPath = null - nativeCaptureMicrophonePath = null - nativeCaptureStopRequested = false - nativeCapturePaused = false + try { if (captProc) captProc.kill() } catch { /* ignore */ } + setNativeScreenRecordingActive(false) + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) return { success: false, message: 'Screen recording permission not granted. Please allow access in System Settings and restart the app.', @@ -5085,14 +1029,14 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} if (response === 0) { await shell.openExternal(getMacPrivacySettingsUrl('microphone')) } - try { nativeCaptureProcess?.kill() } catch { /* ignore */ } - nativeScreenRecordingActive = false - nativeCaptureProcess = null - nativeCaptureTargetPath = null - nativeCaptureSystemAudioPath = null - nativeCaptureMicrophonePath = null - nativeCaptureStopRequested = false - nativeCapturePaused = false + try { if (captProc) captProc.kill() } catch { /* ignore */ } + setNativeScreenRecordingActive(false) + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) return { success: false, message: 'Microphone permission not granted. Please allow access in System Settings.', @@ -5114,17 +1058,17 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} error: String(error), }) try { - nativeCaptureProcess?.kill() + if (captProc) captProc.kill() } catch { // ignore cleanup failures } - nativeScreenRecordingActive = false - nativeCaptureProcess = null - nativeCaptureTargetPath = null - nativeCaptureSystemAudioPath = null - nativeCaptureMicrophonePath = null - nativeCaptureStopRequested = false - nativeCapturePaused = false + setNativeScreenRecordingActive(false) + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) return { success: false, message: 'Failed to start native ScreenCaptureKit recording', @@ -5143,22 +1087,22 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} const proc = windowsCaptureProcess const preferredVideoPath = windowsCaptureTargetPath - windowsCaptureStopRequested = true + setWindowsCaptureStopRequested(true) proc.stdin.write('stop\n') const tempVideoPath = await waitForWindowsCaptureStop(proc) - windowsCaptureProcess = null - windowsNativeCaptureActive = false - nativeScreenRecordingActive = false - windowsCaptureTargetPath = null - windowsCaptureStopRequested = false - windowsCapturePaused = false + setWindowsCaptureProcess(null) + setWindowsNativeCaptureActive(false) + setNativeScreenRecordingActive(false) + setWindowsCaptureTargetPath(null) + setWindowsCaptureStopRequested(false) + setWindowsCapturePaused(false) const finalVideoPath = preferredVideoPath ?? tempVideoPath if (tempVideoPath !== finalVideoPath) { await moveFileWithOverwrite(tempVideoPath, finalVideoPath) } - windowsPendingVideoPath = finalVideoPath + setWindowsPendingVideoPath(finalVideoPath) recordNativeCaptureDiagnostics({ backend: 'windows-wgc', phase: 'stop', @@ -5172,20 +1116,20 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} } catch (error) { console.error('Failed to stop native Windows capture:', error) const fallbackPath = windowsCaptureTargetPath - windowsNativeCaptureActive = false - nativeScreenRecordingActive = false - windowsCaptureProcess = null - windowsCaptureTargetPath = null - windowsCaptureStopRequested = false - windowsCapturePaused = false - windowsSystemAudioPath = null - windowsMicAudioPath = null - windowsPendingVideoPath = null + setWindowsNativeCaptureActive(false) + setNativeScreenRecordingActive(false) + setWindowsCaptureProcess(null) + setWindowsCaptureTargetPath(null) + setWindowsCaptureStopRequested(false) + setWindowsCapturePaused(false) + setWindowsSystemAudioPath(null) + setWindowsMicAudioPath(null) + setWindowsPendingVideoPath(null) if (fallbackPath) { try { await fs.access(fallbackPath) - windowsPendingVideoPath = fallbackPath + setWindowsPendingVideoPath(fallbackPath) recordNativeCaptureDiagnostics({ backend: 'windows-wgc', phase: 'stop', @@ -5243,17 +1187,17 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} const preferredSystemAudioPath = nativeCaptureSystemAudioPath const preferredMicrophonePath = nativeCaptureMicrophonePath console.log('[stop-native] Audio paths — system:', preferredSystemAudioPath, 'mic:', preferredMicrophonePath) - nativeCaptureStopRequested = true + setNativeCaptureStopRequested(true) process.stdin.write('stop\n') const tempVideoPath = await waitForNativeCaptureStop(process) console.log('[stop-native] Helper stopped, tempVideoPath:', tempVideoPath) - nativeCaptureProcess = null - nativeScreenRecordingActive = false - nativeCaptureTargetPath = null - nativeCaptureSystemAudioPath = null - nativeCaptureMicrophonePath = null - nativeCaptureStopRequested = false - nativeCapturePaused = false + setNativeCaptureProcess(null) + setNativeScreenRecordingActive(false) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) const finalVideoPath = preferredVideoPath ?? tempVideoPath if (tempVideoPath !== finalVideoPath) { @@ -5279,13 +1223,13 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} const fallbackSystemAudioPath = nativeCaptureSystemAudioPath const fallbackMicrophonePath = nativeCaptureMicrophonePath const fallbackFileSizeBytes = await getFileSizeIfPresent(fallbackPath) - nativeScreenRecordingActive = false - nativeCaptureProcess = null - nativeCaptureTargetPath = null - nativeCaptureSystemAudioPath = null - nativeCaptureMicrophonePath = null - nativeCaptureStopRequested = false - nativeCapturePaused = false + setNativeScreenRecordingActive(false) + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) recordNativeCaptureDiagnostics({ backend: 'mac-screencapturekit', @@ -5370,7 +1314,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} try { windowsCaptureProcess.stdin.write('pause\n') - windowsCapturePaused = true + setWindowsCapturePaused(true) return { success: true } } catch (error) { return { success: false, message: 'Failed to pause native Windows capture', error: String(error) } @@ -5391,7 +1335,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} try { nativeCaptureProcess.stdin.write('pause\n') - nativeCapturePaused = true + setNativeCapturePaused(true) return { success: true } } catch (error) { return { success: false, message: 'Failed to pause native screen recording', error: String(error) } @@ -5410,7 +1354,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} try { windowsCaptureProcess.stdin.write('resume\n') - windowsCapturePaused = false + setWindowsCapturePaused(false) return { success: true } } catch (error) { return { success: false, message: 'Failed to resume native Windows capture', error: String(error) } @@ -5431,7 +1375,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} try { nativeCaptureProcess.stdin.write('resume\n') - nativeCapturePaused = false + setNativeCapturePaused(false) return { success: true } } catch (error) { return { success: false, message: 'Failed to resume native screen recording', error: String(error) } @@ -5475,7 +1419,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} ipcMain.handle('mux-native-windows-recording', async (_event, pauseSegments?: PauseSegment[]) => { const videoPath = windowsPendingVideoPath - windowsPendingVideoPath = null + setWindowsPendingVideoPath(null) if (!videoPath) { return { success: false, message: 'No native Windows video pending for mux' } @@ -5484,8 +1428,8 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} try { if (windowsSystemAudioPath || windowsMicAudioPath) { await muxNativeWindowsVideoWithAudio(videoPath, windowsSystemAudioPath, windowsMicAudioPath, pauseSegments ?? []) - windowsSystemAudioPath = null - windowsMicAudioPath = null + setWindowsSystemAudioPath(null) + setWindowsMicAudioPath(null) } recordNativeCaptureDiagnostics({ @@ -5506,8 +1450,8 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} fileSizeBytes: await getFileSizeIfPresent(videoPath), error: String(error), }) - windowsSystemAudioPath = null - windowsMicAudioPath = null + setWindowsSystemAudioPath(null) + setWindowsMicAudioPath(null) try { return await finalizeStoredVideo(videoPath) } catch { @@ -5527,28 +1471,29 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} const outputPath = path.join(recordingsDir, `recording-${Date.now()}.mp4`) const args = await buildFfmpegCaptureArgs(source, outputPath) - ffmpegCaptureOutputBuffer = '' - ffmpegCaptureTargetPath = outputPath - ffmpegCaptureProcess = spawn(ffmpegPath, args, { + setFfmpegCaptureOutputBuffer('') + setFfmpegCaptureTargetPath(outputPath) + const ffProc = spawn(ffmpegPath, args, { cwd: recordingsDir, stdio: ['pipe', 'pipe', 'pipe'], }) + setFfmpegCaptureProcess(ffProc) - ffmpegCaptureProcess.stdout.on('data', (chunk: Buffer) => { - ffmpegCaptureOutputBuffer += chunk.toString() + ffProc.stdout.on('data', (chunk: Buffer) => { + setFfmpegCaptureOutputBuffer(ffmpegCaptureOutputBuffer + chunk.toString()) }) - ffmpegCaptureProcess.stderr.on('data', (chunk: Buffer) => { - ffmpegCaptureOutputBuffer += chunk.toString() + ffProc.stderr.on('data', (chunk: Buffer) => { + setFfmpegCaptureOutputBuffer(ffmpegCaptureOutputBuffer + chunk.toString()) }) - await waitForFfmpegCaptureStart(ffmpegCaptureProcess) - ffmpegScreenRecordingActive = true + await waitForFfmpegCaptureStart(ffProc) + setFfmpegScreenRecordingActive(true) return { success: true } } catch (error) { console.error('Failed to start FFmpeg recording:', error) - ffmpegScreenRecordingActive = false - ffmpegCaptureProcess = null - ffmpegCaptureTargetPath = null + setFfmpegScreenRecordingActive(false) + setFfmpegCaptureProcess(null) + setFfmpegCaptureTargetPath(null) return { success: false, message: 'Failed to start FFmpeg recording', @@ -5572,16 +1517,16 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} process.stdin.write('q\n') const finalVideoPath = await waitForFfmpegCaptureStop(process, outputPath) - ffmpegCaptureProcess = null - ffmpegCaptureTargetPath = null - ffmpegScreenRecordingActive = false + setFfmpegCaptureProcess(null) + setFfmpegCaptureTargetPath(null) + setFfmpegScreenRecordingActive(false) return await finalizeStoredVideo(finalVideoPath) } catch (error) { console.error('Failed to stop FFmpeg recording:', error) - ffmpegCaptureProcess = null - ffmpegCaptureTargetPath = null - ffmpegScreenRecordingActive = false + setFfmpegCaptureProcess(null) + setFfmpegCaptureTargetPath(null) + setFfmpegScreenRecordingActive(false) return { success: false, message: 'Failed to stop FFmpeg recording', @@ -5648,25 +1593,25 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} stopInteractionCapture() startWindowBoundsCapture() void startNativeCursorMonitor() - isCursorCaptureActive = true - activeCursorSamples = [] - pendingCursorSamples = [] - cursorCaptureStartTimeMs = Date.now() - linuxCursorScreenPoint = null - lastLeftClick = null + setIsCursorCaptureActive(true) + setActiveCursorSamples([]) + setPendingCursorSamples([]) + setCursorCaptureStartTimeMs(Date.now()) + setLinuxCursorScreenPoint(null) + setLastLeftClick(null) sampleCursorPoint() - cursorCaptureInterval = setInterval(sampleCursorPoint, CURSOR_SAMPLE_INTERVAL_MS) + setCursorCaptureInterval(setInterval(sampleCursorPoint, CURSOR_SAMPLE_INTERVAL_MS)) void startInteractionCapture() } else { - isCursorCaptureActive = false + setIsCursorCaptureActive(false) stopCursorCapture() stopInteractionCapture() stopWindowBoundsCapture() stopNativeCursorMonitor() showCursor() - linuxCursorScreenPoint = null + setLinuxCursorScreenPoint(null) snapshotCursorTelemetryForPersistence() - activeCursorSamples = [] + setActiveCursorSamples([]) } const source = selectedSource || { name: 'Screen' } @@ -6302,7 +2247,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} } approveUserPath(result.filePaths[0]) - currentProjectPath = null + setCurrentProjectPath(null) return { success: true, path: result.filePaths[0] @@ -6568,7 +2513,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} if (trustedExistingProjectPath) { await fs.writeFile(trustedExistingProjectPath, JSON.stringify(projectData, null, 2), 'utf-8') - currentProjectPath = trustedExistingProjectPath + setCurrentProjectPath(trustedExistingProjectPath) await saveProjectThumbnail(trustedExistingProjectPath, thumbnailDataUrl) await rememberRecentProject(trustedExistingProjectPath) return { @@ -6602,7 +2547,7 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} } await fs.writeFile(result.filePath, JSON.stringify(projectData, null, 2), 'utf-8') - currentProjectPath = result.filePath + setCurrentProjectPath(result.filePath) await saveProjectThumbnail(result.filePath, thumbnailDataUrl) await rememberRecentProject(result.filePath) @@ -6727,16 +2672,16 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} } }) ipcMain.handle('set-current-video-path', async (_, path: string) => { - currentVideoPath = normalizeVideoSourcePath(path) ?? path + setCurrentVideoPath(normalizeVideoSourcePath(path) ?? path) approveUserPath(currentVideoPath) const resolvedSession = await resolveRecordingSession(currentVideoPath) ?? { - videoPath: currentVideoPath, + videoPath: currentVideoPath!, webcamPath: null, timeOffsetMs: 0, } - currentRecordingSession = resolvedSession + setCurrentRecordingSession(resolvedSession) await replaceApprovedSessionLocalReadPaths([ resolvedSession.videoPath, resolvedSession.webcamPath, @@ -6746,24 +2691,24 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} await persistRecordingSessionManifest(resolvedSession) } - currentProjectPath = null + setCurrentProjectPath(null) return { success: true, webcamPath: resolvedSession.webcamPath ?? null } }) ipcMain.handle('set-current-recording-session', async (_, session: { videoPath: string; webcamPath?: string | null; timeOffsetMs?: number }) => { const normalizedVideoPath = normalizeVideoSourcePath(session.videoPath) ?? session.videoPath - currentVideoPath = normalizedVideoPath - currentRecordingSession = { + setCurrentVideoPath(normalizedVideoPath) + setCurrentRecordingSession({ videoPath: normalizedVideoPath, webcamPath: normalizeVideoSourcePath(session.webcamPath ?? null), timeOffsetMs: normalizeRecordingTimeOffsetMs(session.timeOffsetMs), - } + }); await replaceApprovedSessionLocalReadPaths([ - currentRecordingSession.videoPath, - currentRecordingSession.webcamPath, + currentRecordingSession!.videoPath, + currentRecordingSession!.webcamPath, ]) - currentProjectPath = null - await persistRecordingSessionManifest(currentRecordingSession) + setCurrentProjectPath(null) + await persistRecordingSessionManifest(currentRecordingSession!) return { success: true } }) @@ -6783,8 +2728,8 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} }); ipcMain.handle('clear-current-video-path', () => { - currentVideoPath = null; - currentRecordingSession = null; + setCurrentVideoPath(null); + setCurrentRecordingSession(null); return { success: true }; }); @@ -6798,8 +2743,8 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} const telemetryPath = getTelemetryPathForVideo(filePath); await fs.unlink(telemetryPath).catch(() => {}); if (currentVideoPath === filePath) { - currentVideoPath = null; - currentRecordingSession = null; + setCurrentVideoPath(null); + setCurrentRecordingSession(null); } return { success: true }; } catch (error) { @@ -6907,9 +2852,9 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} return { success: false, error: 'Countdown already in progress' } } - countdownInProgress = true - countdownCancelled = false - countdownRemaining = seconds + setCountdownInProgress(true) + setCountdownCancelled(false) + setCountdownRemaining(seconds) const countdownWin = createCountdownWindow() @@ -6923,34 +2868,34 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} return new Promise<{ success: boolean; cancelled?: boolean }>((resolve) => { let remaining = seconds - countdownRemaining = remaining + setCountdownRemaining(remaining) countdownWin.webContents.send('countdown-tick', remaining) - countdownTimer = setInterval(() => { + setCountdownTimer(setInterval(() => { if (countdownCancelled) { if (countdownTimer) { clearInterval(countdownTimer) - countdownTimer = null + setCountdownTimer(null) } closeCountdownWindow() - countdownInProgress = false - countdownRemaining = null + setCountdownInProgress(false) + setCountdownRemaining(null) resolve({ success: false, cancelled: true }) return } remaining-- - countdownRemaining = remaining + setCountdownRemaining(remaining) if (remaining <= 0) { if (countdownTimer) { clearInterval(countdownTimer) - countdownTimer = null + setCountdownTimer(null) } closeCountdownWindow() - countdownInProgress = false - countdownRemaining = null + setCountdownInProgress(false) + setCountdownRemaining(null) resolve({ success: true }) } else { const win = getCountdownWindow() @@ -6958,17 +2903,17 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh} win.webContents.send('countdown-tick', remaining) } } - }, 1000) + }, 1000)) }) }) ipcMain.handle('cancel-countdown', () => { - countdownCancelled = true - countdownInProgress = false - countdownRemaining = null + setCountdownCancelled(true) + setCountdownInProgress(false) + setCountdownRemaining(null) if (countdownTimer) { clearInterval(countdownTimer) - countdownTimer = null + setCountdownTimer(null) } closeCountdownWindow() return { success: true } diff --git a/electron/ipc/paths/binaries.ts b/electron/ipc/paths/binaries.ts new file mode 100644 index 00000000..c43cb97c --- /dev/null +++ b/electron/ipc/paths/binaries.ts @@ -0,0 +1,249 @@ +import { spawnSync } from "node:child_process"; +import { existsSync, constants as fsConstants } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { app } from "electron"; +import { + nativeHelperMigrationPromise, + setNativeHelperMigrationPromise, +} from "../state"; + +/** + * Resolve a path within the app bundle, handling asar unpacking in production. + * Files listed in asarUnpack are extracted to app.asar.unpacked/ and must be + * accessed via that path instead of the asar virtual filesystem. + */ +export function resolveUnpackedAppPath(...segments: string[]): string { + const base = app.getAppPath(); + const resolved = path.join(base, ...segments); + if (app.isPackaged) { + return resolved.replace(/\.asar([/\\])/, ".asar.unpacked$1"); + } + return resolved; +} + +export function getNativeCaptureHelperSourcePath(): string { + return resolveUnpackedAppPath("electron", "native", "ScreenCaptureKitRecorder.swift"); +} + +export function getNativeArchTag(): string { + if (process.platform === "darwin") { + return process.arch === "arm64" ? "darwin-arm64" : "darwin-x64"; + } + + if (process.platform === "win32") { + return process.arch === "arm64" ? "win32-arm64" : "win32-x64"; + } + + if (process.platform === "linux") { + return process.arch === "arm64" ? "linux-arm64" : "linux-x64"; + } + + return `${process.platform}-${process.arch}`; +} + +export function getPrebundledNativeHelperPath(binaryName: string): string { + return resolveUnpackedAppPath("electron", "native", "bin", getNativeArchTag(), binaryName); +} + +export function resolvePreferredWindowsNativeHelperPath( + helperDirectory: string, + binaryName: string, +): string { + const buildOutputPath = resolveUnpackedAppPath( + "electron", + "native", + helperDirectory, + "build", + "Release", + binaryName, + ); + const prebundledPath = getPrebundledNativeHelperPath(binaryName); + + if (existsSync(buildOutputPath)) { + return buildOutputPath; + } + + if (existsSync(prebundledPath)) { + return prebundledPath; + } + + return buildOutputPath; +} + +export function getBundledWhisperExecutableCandidates(): string[] { + const binaryNames = + process.platform === "win32" + ? ["whisper-cli.exe", "whisper-cpp.exe", "whisper.exe", "main.exe"] + : ["whisper-cli", "whisper-cpp", "whisper", "main"]; + + return binaryNames.map((binaryName) => getPrebundledNativeHelperPath(binaryName)); +} + +export function getNativeCaptureHelperBinaryPath(): string { + return path.join(app.getPath("userData"), "native-tools", "recordly-screencapturekit-helper"); +} + +export function getSystemCursorHelperSourcePath(): string { + return resolveUnpackedAppPath("electron", "native", "SystemCursorAssets.swift"); +} + +export function getSystemCursorHelperBinaryPath(): string { + return path.join(app.getPath("userData"), "native-tools", "recordly-system-cursors"); +} + +export function getNativeCursorMonitorSourcePath(): string { + return resolveUnpackedAppPath("electron", "native", "NativeCursorMonitor.swift"); +} + +export function getNativeCursorMonitorBinaryPath(): string { + return path.join(app.getPath("userData"), "native-tools", "recordly-native-cursor-monitor"); +} + +export function getNativeWindowListSourcePath(): string { + return resolveUnpackedAppPath("electron", "native", "ScreenCaptureKitWindowList.swift"); +} + +export function getNativeWindowListBinaryPath(): string { + return path.join(app.getPath("userData"), "native-tools", "recordly-window-list"); +} + +export function getWindowsCaptureExePath(): string { + return resolvePreferredWindowsNativeHelperPath("wgc-capture", "wgc-capture.exe"); +} + +export function getCursorMonitorExePath(): string { + return resolvePreferredWindowsNativeHelperPath("cursor-monitor", "cursor-monitor.exe"); +} + +async function migrateLegacyNativeHelperBinaries(): Promise { + const legacyToCurrentPaths: Array<[string, string]> = [ + [ + path.join(app.getPath("userData"), "native-tools", "openscreen-screencapturekit-helper"), + getNativeCaptureHelperBinaryPath(), + ], + [ + path.join(app.getPath("userData"), "native-tools", "openscreen-window-list"), + getNativeWindowListBinaryPath(), + ], + [ + path.join(app.getPath("userData"), "native-tools", "openscreen-system-cursors"), + getSystemCursorHelperBinaryPath(), + ], + [ + path.join(app.getPath("userData"), "native-tools", "openscreen-native-cursor-monitor"), + getNativeCursorMonitorBinaryPath(), + ], + ]; + + for (const [legacyPath, currentPath] of legacyToCurrentPaths) { + if (legacyPath === currentPath || existsSync(currentPath) || !existsSync(legacyPath)) { + continue; + } + + try { + await fs.mkdir(path.dirname(currentPath), { recursive: true }); + await fs.rename(legacyPath, currentPath); + } catch (error) { + console.warn("[native-tools] Failed to migrate helper binary", { + legacyPath, + currentPath, + error, + }); + } + } +} + +export async function ensureNativeHelperMigration(): Promise { + if (!nativeHelperMigrationPromise) { + setNativeHelperMigrationPromise( + migrateLegacyNativeHelperBinaries().catch((error) => { + setNativeHelperMigrationPromise(null); + throw error; + }), + ); + } + + return nativeHelperMigrationPromise!; +} + +export async function ensureSwiftHelperBinary( + sourcePath: string, + binaryPath: string, + label: string, + prebundledBinaryName?: string, +): Promise { + if (prebundledBinaryName) { + const prebundledPath = getPrebundledNativeHelperPath(prebundledBinaryName); + try { + await fs.access(prebundledPath, fsConstants.X_OK); + return prebundledPath; + } catch { + if (app.isPackaged) { + throw new Error( + `${label} is missing from this app build (${prebundledPath}). Reinstall or update the app.`, + ); + } + } + } + + const helperDir = path.dirname(binaryPath); + await fs.mkdir(helperDir, { recursive: true }); + + let shouldCompile = false; + try { + const [sourceStat, binaryStat] = await Promise.all([ + fs.stat(sourcePath), + fs.stat(binaryPath).catch(() => null), + ]); + shouldCompile = !binaryStat || sourceStat.mtimeMs > binaryStat.mtimeMs; + } catch (error) { + throw new Error(`${label} source is unavailable: ${String(error)}`); + } + + if (!shouldCompile) { + return binaryPath; + } + + const result = spawnSync("swiftc", ["-O", sourcePath, "-o", binaryPath], { + encoding: "utf8", + timeout: 120000, + }); + + if (result.status !== 0) { + const details = [result.stderr, result.stdout].filter(Boolean).join("\n").trim(); + throw new Error(details || `Failed to compile ${label}`); + } + + return binaryPath; +} + +export async function ensureNativeCaptureHelperBinary(): Promise { + await ensureNativeHelperMigration(); + return ensureSwiftHelperBinary( + getNativeCaptureHelperSourcePath(), + getNativeCaptureHelperBinaryPath(), + "native ScreenCaptureKit helper", + "recordly-screencapturekit-helper", + ); +} + +export async function ensureNativeWindowListBinary(): Promise { + await ensureNativeHelperMigration(); + return ensureSwiftHelperBinary( + getNativeWindowListSourcePath(), + getNativeWindowListBinaryPath(), + "native ScreenCaptureKit window list helper", + "recordly-window-list", + ); +} + +export async function ensureNativeCursorMonitorBinary(): Promise { + await ensureNativeHelperMigration(); + return ensureSwiftHelperBinary( + getNativeCursorMonitorSourcePath(), + getNativeCursorMonitorBinaryPath(), + "native cursor monitor helper", + "recordly-native-cursor-monitor", + ); +} diff --git a/electron/ipc/project/manager.ts b/electron/ipc/project/manager.ts new file mode 100644 index 00000000..728988f9 --- /dev/null +++ b/electron/ipc/project/manager.ts @@ -0,0 +1,337 @@ +import { constants as fsConstants } from "node:fs"; +import { existsSync } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { app } from "electron"; +import { RECORDINGS_DIR, USER_DATA_PATH } from "../../appPaths"; +import { + PROJECT_FILE_EXTENSION, + LEGACY_PROJECT_FILE_EXTENSIONS, + PROJECTS_DIRECTORY_NAME, + PROJECT_THUMBNAIL_SUFFIX, + RECENT_PROJECTS_FILE, + MAX_RECENT_PROJECTS, + RECORDINGS_SETTINGS_FILE, +} from "../constants"; +import type { ProjectLibraryEntry, RecordingSessionData } from "../types"; +import { + currentProjectPath, + setCurrentProjectPath, + setCurrentVideoPath, + setCurrentRecordingSession, + approvedLocalReadPaths, + setCustomRecordingsDir, + setRecordingsDirLoaded, +} from "../state"; +import { + normalizePath, + normalizeVideoSourcePath, + getRecordingsDir, +} from "../utils"; + + +export { normalizePath, normalizeVideoSourcePath }; + +export function getAssetRootPath() { + if (app.isPackaged) { + return path.join(process.resourcesPath, "assets"); + } + + return path.join(app.getAppPath(), "public"); +} + +export function isPathInsideDirectory(candidatePath: string, directoryPath: string) { + const normalizedDirectoryPath = normalizePath(directoryPath); + return ( + candidatePath === normalizedDirectoryPath || + candidatePath.startsWith(`${normalizedDirectoryPath}${path.sep}`) + ); +} + +export function isAllowedLocalReadPath(candidatePath: string) { + const allowedPrefixes = [RECORDINGS_DIR, USER_DATA_PATH, getAssetRootPath(), app.getPath("temp")]; + + return ( + existsSync(candidatePath) || + allowedPrefixes.some((prefix) => isPathInsideDirectory(candidatePath, prefix)) || + approvedLocalReadPaths.has(candidatePath) + ); +} + +export async function rememberApprovedLocalReadPath(filePath?: string | null) { + const normalizedPath = normalizeVideoSourcePath(filePath); + if (!normalizedPath) { + return; + } + + const resolvedPath = normalizePath(normalizedPath); + approvedLocalReadPaths.add(resolvedPath); + + try { + approvedLocalReadPaths.add(await fs.realpath(resolvedPath)); + } catch { + // Ignore missing files; the eventual read will surface the real error. + } +} + +export async function replaceApprovedSessionLocalReadPaths(filePaths: Array) { + approvedLocalReadPaths.clear(); + await Promise.all(filePaths.map((filePath) => rememberApprovedLocalReadPath(filePath))); +} + +export async function resolveProjectMediaSources(project: unknown): Promise< + | { success: true; videoPath: string; webcamPath: string | null } + | { success: false; message: string } +> { + if (!project || typeof project !== "object") { + return { success: false, message: "Invalid project file format" }; + } + + const rawVideoPath = (project as { videoPath?: unknown }).videoPath; + if (typeof rawVideoPath !== "string") { + return { success: false, message: "Project file is missing a video path" }; + } + + const normalizedVideoPath = normalizeVideoSourcePath(rawVideoPath); + if (!normalizedVideoPath) { + return { success: false, message: "Project file is missing a valid video path" }; + } + + try { + await fs.access(normalizedVideoPath, fsConstants.F_OK); + } catch { + return { + success: false, + message: `Project video file not found: ${normalizedVideoPath}`, + }; + } + + const rawWebcamPath = + typeof (project as { editor?: { webcam?: { sourcePath?: unknown } } }).editor?.webcam + ?.sourcePath === "string" + ? ((project as { editor?: { webcam?: { sourcePath?: string } } }).editor?.webcam + ?.sourcePath ?? null) + : null; + const normalizedWebcamPath = normalizeVideoSourcePath(rawWebcamPath); + + if (!normalizedWebcamPath) { + return { + success: true, + videoPath: normalizedVideoPath, + webcamPath: null, + }; + } + + try { + await fs.access(normalizedWebcamPath, fsConstants.F_OK); + return { + success: true, + videoPath: normalizedVideoPath, + webcamPath: normalizedWebcamPath, + }; + } catch { + return { + success: true, + videoPath: normalizedVideoPath, + webcamPath: null, + }; + } +} + +export async function getProjectsDir() { + const projectsDir = path.join(await getRecordingsDir(), PROJECTS_DIRECTORY_NAME); + await fs.mkdir(projectsDir, { recursive: true }); + return projectsDir; +} + +export async function persistRecordingsDirectorySetting(nextDir: string) { + setCustomRecordingsDir(path.resolve(nextDir)); + setRecordingsDirLoaded(true); + await fs.writeFile( + RECORDINGS_SETTINGS_FILE, + JSON.stringify({ recordingsDir: path.resolve(nextDir) }, null, 2), + "utf-8", + ); +} + +export function hasProjectFileExtension(filePath: string) { + const extension = path.extname(filePath).replace(/^\./, "").toLowerCase(); + return [PROJECT_FILE_EXTENSION, ...LEGACY_PROJECT_FILE_EXTENSIONS].includes(extension); +} + +export function getProjectThumbnailPath(projectPath: string) { + return `${projectPath}${PROJECT_THUMBNAIL_SUFFIX}`; +} + +export async function saveProjectThumbnail(projectPath: string, thumbnailDataUrl?: string | null) { + const thumbnailPath = getProjectThumbnailPath(projectPath); + if (!thumbnailDataUrl) { + await fs.rm(thumbnailPath, { force: true }).catch(() => undefined); + return null; + } + + const match = thumbnailDataUrl.match(/^data:image\/png;base64,(.+)$/); + if (!match) { + throw new Error("Project thumbnail must be a PNG data URL."); + } + + await fs.writeFile(thumbnailPath, Buffer.from(match[1], "base64")); + return thumbnailPath; +} + +export async function loadRecentProjectPaths() { + try { + const content = await fs.readFile(RECENT_PROJECTS_FILE, "utf-8"); + const parsed = JSON.parse(content) as { paths?: unknown }; + return Array.isArray(parsed.paths) + ? parsed.paths.filter( + (value): value is string => + typeof value === "string" && value.trim().length > 0, + ) + : []; + } catch { + return []; + } +} + +export async function saveRecentProjectPaths(paths: string[]) { + const normalizedPaths = Array.from(new Set(paths.map((value) => normalizePath(value)))).slice( + 0, + MAX_RECENT_PROJECTS, + ); + await fs.writeFile( + RECENT_PROJECTS_FILE, + JSON.stringify({ paths: normalizedPaths }, null, 2), + "utf-8", + ); +} + +export async function rememberRecentProject(projectPath: string) { + if (!hasProjectFileExtension(projectPath)) { + return; + } + + const existingPaths = await loadRecentProjectPaths(); + await saveRecentProjectPaths([projectPath, ...existingPaths]); +} + +export async function buildProjectLibraryEntry( + projectPath: string, + projectsDir: string, +): Promise { + try { + const normalizedPath = normalizePath(projectPath); + if (!hasProjectFileExtension(normalizedPath)) { + return null; + } + + const stats = await fs.stat(normalizedPath); + if (!stats.isFile()) { + return null; + } + + const thumbnailPath = getProjectThumbnailPath(normalizedPath); + const thumbnailExists = await fs + .access(thumbnailPath, fsConstants.R_OK) + .then(() => true) + .catch(() => false); + + return { + path: normalizedPath, + name: path.basename(normalizedPath).replace(/\.(recordly|openscreen)$/i, ""), + updatedAt: stats.mtimeMs, + thumbnailPath: thumbnailExists ? thumbnailPath : null, + isCurrent: Boolean( + currentProjectPath && normalizePath(currentProjectPath) === normalizedPath, + ), + isInProjectsDirectory: path.dirname(normalizedPath) === normalizePath(projectsDir), + }; + } catch { + return null; + } +} + +export async function listProjectLibraryEntries() { + const projectsDir = await getProjectsDir(); + const projectPaths: string[] = []; + + try { + const entries = await fs.readdir(projectsDir, { withFileTypes: true }); + for (const entry of entries) { + if (!entry.isFile()) { + continue; + } + + const entryPath = path.join(projectsDir, entry.name); + if (hasProjectFileExtension(entryPath)) { + projectPaths.push(entryPath); + } + } + } catch { + // Ignore directory read failures and fall back to recent files. + } + + const recentProjectPaths = await loadRecentProjectPaths(); + const candidatePaths = Array.from(new Set([...projectPaths, ...recentProjectPaths])); + const entries = ( + await Promise.all( + candidatePaths.map((candidatePath) => + buildProjectLibraryEntry(candidatePath, projectsDir), + ), + ) + ) + .filter((entry): entry is ProjectLibraryEntry => entry != null) + .sort((left, right) => right.updatedAt - left.updatedAt); + + await saveRecentProjectPaths(entries.map((entry) => entry.path)); + + return { + projectsDir, + entries, + }; +} + +export async function loadProjectFromPath(projectPath: string) { + const normalizedPath = normalizePath(projectPath); + const content = await fs.readFile(normalizedPath, "utf-8"); + const project = JSON.parse(content); + const mediaSources = await resolveProjectMediaSources(project); + + if (!mediaSources.success) { + return { + success: false, + canceled: false, + message: mediaSources.message, + }; + } + + setCurrentProjectPath(normalizedPath); + setCurrentVideoPath(mediaSources.videoPath); + const projectObj = project as Record; + const editorObj = projectObj?.editor as Record | undefined; + const audioTracks = editorObj?.audioTracks as { sourcePath?: unknown }[] | undefined; + const approvedProjectPaths: Array = [ + mediaSources.videoPath, + mediaSources.webcamPath, + ]; + if (Array.isArray(audioTracks)) { + for (const track of audioTracks) { + if (typeof track?.sourcePath === "string") { + approvedProjectPaths.push(track.sourcePath); + } + } + } + await replaceApprovedSessionLocalReadPaths(approvedProjectPaths); + setCurrentRecordingSession({ + videoPath: mediaSources.videoPath, + webcamPath: mediaSources.webcamPath, + timeOffsetMs: 0, + } as RecordingSessionData); + await rememberRecentProject(normalizedPath); + + return { + success: true, + path: normalizedPath, + project, + }; +} diff --git a/electron/ipc/project/session.ts b/electron/ipc/project/session.ts new file mode 100644 index 00000000..429e7607 --- /dev/null +++ b/electron/ipc/project/session.ts @@ -0,0 +1,138 @@ +import { constants as fsConstants } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { RECORDING_SESSION_MANIFEST_SUFFIX } from "../constants"; +import type { RecordingSessionData, RecordingSessionManifest } from "../types"; +import { normalizeVideoSourcePath } from "../utils"; + +function normalizeRecordingTimeOffsetMs(value: unknown): number { + return typeof value === "number" && Number.isFinite(value) ? Math.round(value) : 0; +} + +export function getRecordingSessionManifestPath(videoPath: string) { + const extension = path.extname(videoPath); + const baseName = path.basename(videoPath, extension); + return path.join(path.dirname(videoPath), `${baseName}${RECORDING_SESSION_MANIFEST_SUFFIX}`); +} + +export async function persistRecordingSessionManifest(session: RecordingSessionData): Promise { + const normalizedVideoPath = normalizeVideoSourcePath(session.videoPath); + if (!normalizedVideoPath) { + return; + } + + const normalizedWebcamPath = normalizeVideoSourcePath(session.webcamPath ?? null); + const manifestPath = getRecordingSessionManifestPath(normalizedVideoPath); + + if (!normalizedWebcamPath) { + await fs.rm(manifestPath, { force: true }); + return; + } + + const manifest: RecordingSessionManifest = { + version: 2, + videoFileName: path.basename(normalizedVideoPath), + webcamFileName: path.basename(normalizedWebcamPath), + timeOffsetMs: normalizeRecordingTimeOffsetMs(session.timeOffsetMs), + }; + + await fs.writeFile(manifestPath, JSON.stringify(manifest, null, 2), "utf-8"); +} + +export async function resolveRecordingSessionManifest( + videoPath?: string | null, +): Promise { + const normalizedVideoPath = normalizeVideoSourcePath(videoPath); + if (!normalizedVideoPath) { + return null; + } + + const manifestPath = getRecordingSessionManifestPath(normalizedVideoPath); + + try { + const content = await fs.readFile(manifestPath, "utf-8"); + const parsed = JSON.parse(content) as Partial; + if (parsed.version !== 1 && parsed.version !== 2) { + return null; + } + + const webcamFileName = + typeof parsed.webcamFileName === "string" && parsed.webcamFileName.trim() + ? parsed.webcamFileName.trim() + : null; + + if (!webcamFileName) { + return { + videoPath: normalizedVideoPath, + webcamPath: null, + timeOffsetMs: 0, + }; + } + + const webcamPath = path.join(path.dirname(normalizedVideoPath), webcamFileName); + await fs.access(webcamPath, fsConstants.F_OK); + + return { + videoPath: normalizedVideoPath, + webcamPath, + timeOffsetMs: normalizeRecordingTimeOffsetMs(parsed.timeOffsetMs), + }; + } catch { + return null; + } +} + +export async function resolveLinkedWebcamPath(videoPath?: string | null): Promise { + const normalizedVideoPath = normalizeVideoSourcePath(videoPath); + if (!normalizedVideoPath) { + return null; + } + + const extension = path.extname(normalizedVideoPath); + const baseName = path.basename(normalizedVideoPath, extension); + if (!baseName || baseName.endsWith("-webcam")) { + return null; + } + + const candidateExtensions = Array.from( + new Set([extension, ".webm", ".mp4", ".mov", ".mkv", ".avi"].filter(Boolean)), + ); + + for (const candidateExtension of candidateExtensions) { + const candidatePath = path.join( + path.dirname(normalizedVideoPath), + `${baseName}-webcam${candidateExtension}`, + ); + + try { + await fs.access(candidatePath, fsConstants.F_OK); + return candidatePath; + } catch { + continue; + } + } + + return null; +} + +export async function resolveRecordingSession( + videoPath?: string | null, +): Promise { + const manifestSession = await resolveRecordingSessionManifest(videoPath); + if (manifestSession) { + return manifestSession; + } + + const normalizedVideoPath = normalizeVideoSourcePath(videoPath); + if (!normalizedVideoPath) { + return null; + } + + const linkedWebcamPath = await resolveLinkedWebcamPath(normalizedVideoPath); + return { + videoPath: normalizedVideoPath, + webcamPath: linkedWebcamPath, + }; +} + + diff --git a/electron/ipc/recording/diagnostics.ts b/electron/ipc/recording/diagnostics.ts new file mode 100644 index 00000000..07bdfe13 --- /dev/null +++ b/electron/ipc/recording/diagnostics.ts @@ -0,0 +1,175 @@ +import { execFile } from "node:child_process"; +import fs from "node:fs/promises"; +import { promisify } from "node:util"; +import { getFfmpegBinaryPath } from "../ffmpeg/binary"; +import { COMPANION_AUDIO_LAYOUTS } from "../constants"; +import type { NativeCaptureDiagnostics, CompanionAudioCandidate } from "../types"; +import { lastNativeCaptureDiagnostics, setLastNativeCaptureDiagnostics } from "../state"; + +const execFileAsync = promisify(execFile); + +export function recordNativeCaptureDiagnostics( + diagnostics: Omit, +) { + setLastNativeCaptureDiagnostics({ + timestamp: new Date().toISOString(), + ...diagnostics, + }); + + return lastNativeCaptureDiagnostics; +} + +export async function getFileSizeIfPresent(filePath: string | null | undefined) { + if (!filePath) { + return null; + } + + try { + const stat = await fs.stat(filePath); + return stat.size; + } catch { + return null; + } +} + +export function parseFfmpegDurationSeconds(stderr: string) { + const match = stderr.match(/Duration:\s+(\d+):(\d+):(\d+(?:\.\d+)?)/i); + if (!match) { + return null; + } + + const hours = Number(match[1]); + const minutes = Number(match[2]); + const seconds = Number(match[3]); + if (![hours, minutes, seconds].every(Number.isFinite)) { + return null; + } + + return hours * 3600 + minutes * 60 + seconds; +} + +/** Probe the duration of a media file (in seconds) using the container header. */ +export async function probeMediaDurationSeconds(filePath: string): Promise { + const ffmpegPath = getFfmpegBinaryPath(); + try { + await execFileAsync(ffmpegPath, ["-i", filePath, "-hide_banner"], { timeout: 5000 }); + } catch (error) { + const stderr = (error as NodeJS.ErrnoException & { stderr?: string })?.stderr ?? ""; + const match = stderr.match(/Duration:\s*(\d{2}):(\d{2}):(\d{2})\.(\d{2,3})/); + if (match) { + const h = Number(match[1]); + const m = Number(match[2]); + const s = Number(match[3]); + const frac = Number(match[4]) / (match[4].length === 3 ? 1000 : 100); + return h * 3600 + m * 60 + s + frac; + } + } + return 0; +} + +export async function getUsableCompanionAudioCandidates( + videoPath: string, +): Promise { + const basePath = videoPath.replace(/\.[^.]+$/u, ""); + const candidates: CompanionAudioCandidate[] = []; + + for (const layout of COMPANION_AUDIO_LAYOUTS) { + const systemPath = `${basePath}${layout.systemSuffix}`; + const micPath = `${basePath}${layout.micSuffix}`; + const usablePaths: string[] = []; + + for (const companionPath of [systemPath, micPath]) { + try { + const stat = await fs.stat(companionPath); + if (stat.size > 0) { + usablePaths.push(companionPath); + } + } catch { + // Missing companion audio is expected for many recordings. + } + } + + if (usablePaths.length > 0) { + candidates.push({ + platform: layout.platform, + systemPath, + micPath, + usablePaths, + }); + } + } + + return candidates; +} + +export async function hasEmbeddedAudioStream(videoPath: string) { + const ffmpegPath = getFfmpegBinaryPath(); + let stderr = ""; + + try { + const result = await execFileAsync( + ffmpegPath, + ["-hide_banner", "-i", videoPath, "-map", "0:a:0", "-frames:a", "1", "-f", "null", "-"], + { timeout: 20000, maxBuffer: 10 * 1024 * 1024 }, + ); + stderr = result.stderr; + } catch (error) { + stderr = (error as NodeJS.ErrnoException & { stderr?: string }).stderr ?? ""; + } + + return /Stream #.*Audio:/i.test(stderr); +} + +export async function getCompanionAudioFallbackPaths(videoPath: string) { + const companionCandidates = await getUsableCompanionAudioCandidates(videoPath); + if (companionCandidates.length === 0) { + return []; + } + + if (await hasEmbeddedAudioStream(videoPath)) { + return []; + } + + return companionCandidates.flatMap((candidate) => candidate.usablePaths); +} + +export async function validateRecordedVideo(videoPath: string) { + const stat = await fs.stat(videoPath); + if (!stat.isFile()) { + throw new Error(`Recorded output is not a file: ${videoPath}`); + } + + if (stat.size <= 0) { + throw new Error(`Recorded output is empty: ${videoPath}`); + } + + const ffmpegPath = getFfmpegBinaryPath(); + let stderr = ""; + + try { + const result = await execFileAsync( + ffmpegPath, + ["-hide_banner", "-i", videoPath, "-map", "0:v:0", "-frames:v", "1", "-f", "null", "-"], + { timeout: 20000, maxBuffer: 10 * 1024 * 1024 }, + ); + stderr = result.stderr; + } catch (error) { + const execError = error as NodeJS.ErrnoException & { stderr?: string }; + const output = execError.stderr?.trim(); + throw new Error(output || `Recorded output could not be decoded: ${videoPath}`); + } + + if (!/Stream #.*Video:/i.test(stderr)) { + throw new Error(`Recorded output does not contain a readable video stream: ${videoPath}`); + } + + const durationSeconds = parseFfmpegDurationSeconds(stderr); + if (durationSeconds !== null && durationSeconds <= 0) { + throw new Error(`Recorded output has an invalid duration: ${videoPath}`); + } + + return { + fileSizeBytes: stat.size, + durationSeconds, + }; +} diff --git a/electron/ipc/recording/events.ts b/electron/ipc/recording/events.ts new file mode 100644 index 00000000..5a2653c3 --- /dev/null +++ b/electron/ipc/recording/events.ts @@ -0,0 +1,9 @@ +import { BrowserWindow } from "electron"; + +export function emitRecordingInterrupted(reason: string, message: string) { + BrowserWindow.getAllWindows().forEach((window) => { + if (!window.isDestroyed()) { + window.webContents.send("recording-interrupted", { reason, message }); + } + }); +} diff --git a/electron/ipc/recording/ffmpeg.ts b/electron/ipc/recording/ffmpeg.ts new file mode 100644 index 00000000..62f49dd7 --- /dev/null +++ b/electron/ipc/recording/ffmpeg.ts @@ -0,0 +1,204 @@ +import type { ChildProcessWithoutNullStreams } from "node:child_process"; +import type { SelectedSource } from "../types"; +import { + ffmpegCaptureOutputBuffer, +} from "../state"; +import { getScreen } from "../utils"; +import { resolveWindowsCaptureDisplay } from "../windowsCaptureSelection"; +import { resolveLinuxWindowBounds } from "../cursor/bounds"; + +export function getDisplayBoundsForSource(source: SelectedSource) { + return resolveWindowsCaptureDisplay( + source, + getScreen().getAllDisplays(), + getScreen().getPrimaryDisplay(), + ).bounds; +} + +export async function buildFfmpegCaptureArgs(source: SelectedSource, outputPath: string) { + const commonOutputArgs = [ + "-an", + "-c:v", + "libx264", + "-preset", + "veryfast", + "-pix_fmt", + "yuv420p", + "-movflags", + "+faststart", + outputPath, + ]; + + if (process.platform === "win32") { + if (source?.id?.startsWith("window:")) { + const windowTitle = + typeof source.windowTitle === "string" + ? source.windowTitle.trim() + : source.name.trim(); + if (!windowTitle) { + throw new Error("Missing window title for FFmpeg window capture"); + } + + return [ + "-y", + "-f", + "gdigrab", + "-framerate", + "60", + "-draw_mouse", + "0", + "-i", + `title=${windowTitle}`, + ...commonOutputArgs, + ]; + } + + return [ + "-y", + "-f", + "gdigrab", + "-framerate", + "60", + "-draw_mouse", + "0", + "-i", + "desktop", + ...commonOutputArgs, + ]; + } + + if (process.platform === "linux") { + const displayEnv = process.env.DISPLAY || ":0.0"; + if (source?.id?.startsWith("window:")) { + const bounds = await resolveLinuxWindowBounds(source); + if (!bounds) { + throw new Error("Unable to resolve Linux window bounds for FFmpeg capture"); + } + + return [ + "-y", + "-f", + "x11grab", + "-framerate", + "60", + "-draw_mouse", + "0", + "-video_size", + `${Math.max(2, bounds.width)}x${Math.max(2, bounds.height)}`, + "-i", + `${displayEnv}+${Math.round(bounds.x)},${Math.round(bounds.y)}`, + ...commonOutputArgs, + ]; + } + + const bounds = getDisplayBoundsForSource(source); + return [ + "-y", + "-f", + "x11grab", + "-framerate", + "60", + "-draw_mouse", + "0", + "-video_size", + `${Math.max(2, bounds.width)}x${Math.max(2, bounds.height)}`, + "-i", + `${displayEnv}+${Math.round(bounds.x)},${Math.round(bounds.y)}`, + ...commonOutputArgs, + ]; + } + + if (process.platform === "darwin") { + return [ + "-y", + "-f", + "avfoundation", + "-capture_cursor", + "0", + "-framerate", + "60", + "-i", + "1:none", + ...commonOutputArgs, + ]; + } + + throw new Error(`FFmpeg capture is not supported on ${process.platform}`); +} + +export function waitForFfmpegCaptureStart(process: ChildProcessWithoutNullStreams) { + return new Promise((resolve, reject) => { + const onError = (error: Error) => { + cleanup(); + reject(error); + }; + + const onExit = (code: number | null) => { + cleanup(); + reject( + new Error( + ffmpegCaptureOutputBuffer.trim() || + `FFmpeg exited before recording started (code ${code ?? "unknown"})`, + ), + ); + }; + + const timer = setTimeout(() => { + cleanup(); + resolve(); + }, 900); + + const cleanup = () => { + clearTimeout(timer); + process.off("error", onError); + process.off("exit", onExit); + }; + + process.once("error", onError); + process.once("exit", onExit); + }); +} + +export function waitForFfmpegCaptureStop(process: ChildProcessWithoutNullStreams, outputPath: string) { + return new Promise((resolve, reject) => { + const onClose = async (code: number | null) => { + cleanup(); + + try { + const { access } = await import("node:fs/promises"); + await access(outputPath); + if (code === 0 || code === null) { + resolve(outputPath); + return; + } + + if (ffmpegCaptureOutputBuffer.includes("Exiting normally")) { + resolve(outputPath); + return; + } + } catch { + // handled below + } + + reject( + new Error( + ffmpegCaptureOutputBuffer.trim() || + `FFmpeg exited with code ${code ?? "unknown"}`, + ), + ); + }; + + const onError = (error: Error) => { + cleanup(); + reject(error); + }; + + const cleanup = () => { + process.off("close", onClose); + process.off("error", onError); + }; + + process.once("close", onClose); + process.once("error", onError); + }); +} diff --git a/electron/ipc/recording/mac.ts b/electron/ipc/recording/mac.ts new file mode 100644 index 00000000..b7cb8747 --- /dev/null +++ b/electron/ipc/recording/mac.ts @@ -0,0 +1,459 @@ +import type { ChildProcessWithoutNullStreams } from "node:child_process"; +import { execFile } from "node:child_process"; +import fs from "node:fs/promises"; +import { promisify } from "node:util"; +import { BrowserWindow } from "electron"; +import { getFfmpegBinaryPath } from "../ffmpeg/binary"; +import { + getAudioSyncAdjustment, + appendSyncedAudioFilter, +} from "../ffmpeg/filters"; +import type { AudioSyncAdjustment } from "../types"; +import { + nativeScreenRecordingActive, + setNativeScreenRecordingActive, + setNativeCaptureProcess, + nativeCaptureOutputBuffer, + nativeCaptureTargetPath, + setNativeCaptureTargetPath, + nativeCaptureStopRequested, + setNativeCaptureStopRequested, + nativeCaptureSystemAudioPath, + setNativeCaptureSystemAudioPath, + nativeCaptureMicrophonePath, + setNativeCaptureMicrophonePath, + lastNativeCaptureDiagnostics, + setCurrentVideoPath, + setCurrentProjectPath, + selectedSource, +} from "../state"; +import { moveFileWithOverwrite, isAutoRecordingPath } from "../utils"; +import { + recordNativeCaptureDiagnostics, + getFileSizeIfPresent, + validateRecordedVideo, + getUsableCompanionAudioCandidates, +} from "./diagnostics"; +import { probeMediaDurationSeconds } from "./diagnostics"; +import { emitRecordingInterrupted } from "./events"; +import { pruneAutoRecordings } from "./prune"; +import { + snapshotCursorTelemetryForPersistence, + persistPendingCursorTelemetry, +} from "../cursor/telemetry"; +import { muxNativeWindowsVideoWithAudio } from "./windows"; + +const execFileAsync = promisify(execFile); + +export function waitForNativeCaptureStart(process: ChildProcessWithoutNullStreams) { + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + cleanup(); + reject(new Error("Timed out waiting for ScreenCaptureKit recorder to start")); + }, 12000); + + const onStdout = (chunk: Buffer) => { + const text = chunk.toString(); + if (text.includes("Recording started")) { + cleanup(); + resolve(); + } + }; + + const onError = (error: Error) => { + cleanup(); + reject(error); + }; + + const onExit = (code: number | null) => { + cleanup(); + reject( + new Error( + nativeCaptureOutputBuffer.trim() || + `Native capture helper exited before recording started (code ${code ?? "unknown"})`, + ), + ); + }; + + const cleanup = () => { + clearTimeout(timer); + process.stdout.off("data", onStdout); + process.off("error", onError); + process.off("exit", onExit); + }; + + process.stdout.on("data", onStdout); + process.once("error", onError); + process.once("exit", onExit); + }); +} + +export function waitForNativeCaptureStop(process: ChildProcessWithoutNullStreams) { + return new Promise((resolve, reject) => { + const onClose = (code: number | null) => { + cleanup(); + const match = nativeCaptureOutputBuffer.match(/Recording stopped\. Output path: (.+)/); + if (match?.[1]) { + resolve(match[1].trim()); + return; + } + if (code === 0 && nativeCaptureTargetPath) { + resolve(nativeCaptureTargetPath); + return; + } + reject( + new Error( + nativeCaptureOutputBuffer.trim() || + `Native capture helper exited with code ${code ?? "unknown"}`, + ), + ); + }; + + const onError = (error: Error) => { + cleanup(); + reject(error); + }; + + const cleanup = () => { + process.off("close", onClose); + process.off("error", onError); + }; + + process.once("close", onClose); + process.once("error", onError); + }); +} + +export async function muxNativeMacRecordingWithAudio( + videoPath: string, + systemAudioPath?: string | null, + microphonePath?: string | null, +) { + const ffmpegPath = getFfmpegBinaryPath(); + const mixedOutputPath = `${videoPath}.mixed.mp4`; + + const inputs = ["-i", videoPath]; + const availableAudioInputs: string[] = []; + const audioFilePaths: string[] = []; + + for (const [label, audioPath] of [ + ["system", systemAudioPath], + ["microphone", microphonePath], + ] as const) { + if (!audioPath) continue; + try { + const stat = await fs.stat(audioPath); + if (stat.size <= 0) { + console.warn(`[mux] Skipping ${label} audio: file is empty (${audioPath})`); + await fs.rm(audioPath, { force: true }).catch(() => undefined); + continue; + } + inputs.push("-i", audioPath); + availableAudioInputs.push(label); + audioFilePaths.push(audioPath); + } catch { + console.warn(`[mux] Skipping ${label} audio: file not accessible (${audioPath})`); + } + } + + if (availableAudioInputs.length === 0) { + console.warn("[mux] No valid audio files to mux"); + return; + } + + const videoDuration = await probeMediaDurationSeconds(videoPath); + const audioAdjustments: Map = new Map(); + + if (videoDuration > 0) { + for (let i = 0; i < audioFilePaths.length; i++) { + const audioDuration = await probeMediaDurationSeconds(audioFilePaths[i]); + const adjustment = getAudioSyncAdjustment(videoDuration, audioDuration); + audioAdjustments.set(availableAudioInputs[i], adjustment); + if (adjustment.mode === "tempo") { + console.log( + `[mux] ${availableAudioInputs[i]} audio differs from video by ${adjustment.durationDeltaMs}ms — applying tempo ratio ${adjustment.tempoRatio.toFixed(6)}`, + ); + } else if (adjustment.mode === "delay" && adjustment.delayMs > 0) { + console.log( + `[mux] ${availableAudioInputs[i]} audio appears to start late by ${adjustment.delayMs}ms — adding leading silence`, + ); + } + } + } + + const systemAdjustment = audioAdjustments.get("system") ?? { + mode: "none", + delayMs: 0, + tempoRatio: 1, + durationDeltaMs: 0, + }; + const micAdjustment = audioAdjustments.get("microphone") ?? { + mode: "none", + delayMs: 0, + tempoRatio: 1, + durationDeltaMs: 0, + }; + const needsFilter = systemAdjustment.mode !== "none" || micAdjustment.mode !== "none"; + + let args: string[]; + if (availableAudioInputs.length === 2) { + if (needsFilter) { + const filterParts: string[] = []; + appendSyncedAudioFilter(filterParts, "[1:a]", "s", systemAdjustment); + appendSyncedAudioFilter(filterParts, "[2:a]", "m", micAdjustment); + filterParts.push("[s][m]amix=inputs=2:duration=longest:normalize=0[aout]"); + args = [ + "-y", + ...inputs, + "-filter_complex", + filterParts.join(";"), + "-map", + "0:v:0", + "-map", + "[aout]", + "-c:v", + "copy", + "-c:a", + "aac", + "-b:a", + "192k", + "-shortest", + mixedOutputPath, + ]; + } else { + args = [ + "-y", + ...inputs, + "-filter_complex", + "[1:a][2:a]amix=inputs=2:duration=longest:normalize=0[aout]", + "-map", + "0:v:0", + "-map", + "[aout]", + "-c:v", + "copy", + "-c:a", + "aac", + "-b:a", + "192k", + "-shortest", + mixedOutputPath, + ]; + } + } else { + const singleAdjustment = audioAdjustments.get(availableAudioInputs[0]) ?? { + mode: "none", + delayMs: 0, + tempoRatio: 1, + durationDeltaMs: 0, + }; + if (singleAdjustment.mode !== "none") { + const filterParts: string[] = []; + appendSyncedAudioFilter(filterParts, "[1:a]", "aout", singleAdjustment); + args = [ + "-y", + ...inputs, + "-filter_complex", + filterParts.join(";"), + "-map", + "0:v:0", + "-map", + "[aout]", + "-c:v", + "copy", + "-c:a", + "aac", + "-b:a", + "192k", + "-shortest", + mixedOutputPath, + ]; + } else { + args = [ + "-y", + ...inputs, + "-map", + "0:v:0", + "-map", + "1:a:0", + "-c:v", + "copy", + "-c:a", + "aac", + "-b:a", + "192k", + "-shortest", + mixedOutputPath, + ]; + } + } + + console.log("[mux] Running ffmpeg:", ffmpegPath, args.join(" ")); + + try { + await execFileAsync(ffmpegPath, args, { timeout: 120000, maxBuffer: 10 * 1024 * 1024 }); + } catch (error) { + const execError = error as NodeJS.ErrnoException & { stderr?: string }; + console.error("[mux] ffmpeg failed:", execError.stderr || execError.message); + throw error; + } + + await moveFileWithOverwrite(mixedOutputPath, videoPath); + console.log("[mux] Successfully muxed audio into video:", videoPath); + + for (const audioPath of [systemAudioPath, microphonePath]) { + if (audioPath) { + await fs.rm(audioPath, { force: true }).catch(() => undefined); + } + } +} + +export function attachNativeCaptureLifecycle(process: ChildProcessWithoutNullStreams) { + process.once("close", () => { + const wasActive = nativeScreenRecordingActive; + setNativeCaptureProcess(null); + + if (!wasActive || nativeCaptureStopRequested) { + return; + } + + setNativeScreenRecordingActive(false); + setNativeCaptureTargetPath(null); + setNativeCaptureStopRequested(false); + setNativeCaptureSystemAudioPath(null); + setNativeCaptureMicrophonePath(null); + + const sourceName = selectedSource?.name ?? "Screen"; + BrowserWindow.getAllWindows().forEach((window) => { + if (!window.isDestroyed()) { + window.webContents.send("recording-state-changed", { + recording: false, + sourceName, + }); + } + }); + + const reason = nativeCaptureOutputBuffer.includes("WINDOW_UNAVAILABLE") + ? "window-unavailable" + : "capture-stopped"; + const message = + reason === "window-unavailable" + ? "The selected window is no longer capturable. Please reselect a window." + : "Recording stopped unexpectedly."; + + emitRecordingInterrupted(reason, message); + }); +} + +export async function finalizeStoredVideo(videoPath: string) { + // Safety net: if companion audio files still exist, the mux was skipped — attempt it now + if (videoPath.endsWith(".mp4")) { + const companionCandidates = await getUsableCompanionAudioCandidates(videoPath); + for (const { systemPath, micPath, platform } of companionCandidates) { + if (platform === "mac" || platform === "win") { + console.log( + `[finalize] Detected un-muxed ${platform} audio files alongside video — attempting safety-net mux`, + ); + try { + if (platform === "win") { + await muxNativeWindowsVideoWithAudio(videoPath, systemPath, micPath); + } else { + await muxNativeMacRecordingWithAudio(videoPath, systemPath, micPath); + } + console.log("[finalize] Safety-net mux completed successfully"); + } catch (error) { + console.warn("[finalize] Safety-net mux failed:", error); + } + break; + } + } + } + + let validation: { fileSizeBytes: number; durationSeconds: number | null } | null = null; + try { + validation = await validateRecordedVideo(videoPath); + } catch (error) { + console.warn("Video validation failed (proceeding anyway):", error); + } + + snapshotCursorTelemetryForPersistence(); + setCurrentVideoPath(videoPath); + setCurrentProjectPath(null); + await persistPendingCursorTelemetry(videoPath); + if (isAutoRecordingPath(videoPath)) { + await pruneAutoRecordings([videoPath]); + } + + if (lastNativeCaptureDiagnostics?.backend === "mac-screencapturekit") { + recordNativeCaptureDiagnostics({ + backend: "mac-screencapturekit", + phase: "stop", + sourceId: lastNativeCaptureDiagnostics.sourceId ?? null, + sourceType: lastNativeCaptureDiagnostics.sourceType ?? "unknown", + displayId: lastNativeCaptureDiagnostics.displayId ?? null, + displayBounds: lastNativeCaptureDiagnostics.displayBounds ?? null, + windowHandle: lastNativeCaptureDiagnostics.windowHandle ?? null, + helperPath: lastNativeCaptureDiagnostics.helperPath ?? null, + outputPath: videoPath, + systemAudioPath: lastNativeCaptureDiagnostics.systemAudioPath ?? null, + microphonePath: lastNativeCaptureDiagnostics.microphonePath ?? null, + osRelease: lastNativeCaptureDiagnostics.osRelease, + supported: lastNativeCaptureDiagnostics.supported, + helperExists: lastNativeCaptureDiagnostics.helperExists, + processOutput: lastNativeCaptureDiagnostics.processOutput, + fileSizeBytes: validation?.fileSizeBytes ?? null, + }); + } + + return { + success: true, + path: videoPath, + message: + validation?.durationSeconds !== null && validation !== null + ? `Video stored successfully (${validation.fileSizeBytes} bytes, ${validation.durationSeconds.toFixed(2)}s)` + : `Video stored successfully`, + }; +} + +export async function recoverNativeMacCaptureOutput() { + const macDiagnostics = + lastNativeCaptureDiagnostics?.backend === "mac-screencapturekit" + ? lastNativeCaptureDiagnostics + : null; + const diagnosticsPath = macDiagnostics?.outputPath ?? null; + const candidatePath = nativeCaptureTargetPath ?? diagnosticsPath; + const systemAudioPath = nativeCaptureSystemAudioPath ?? macDiagnostics?.systemAudioPath ?? null; + const microphonePath = nativeCaptureMicrophonePath ?? macDiagnostics?.microphonePath ?? null; + + if (!candidatePath) { + return null; + } + + try { + if (systemAudioPath || microphonePath) { + try { + await muxNativeMacRecordingWithAudio( + candidatePath, + systemAudioPath, + microphonePath, + ); + } catch (muxError) { + console.warn("Failed to mux audio during recovery:", muxError); + } + } + + return await finalizeStoredVideo(candidatePath); + } catch (error) { + recordNativeCaptureDiagnostics({ + backend: "mac-screencapturekit", + phase: "stop", + outputPath: candidatePath, + systemAudioPath, + microphonePath, + processOutput: nativeCaptureOutputBuffer.trim() || undefined, + fileSizeBytes: await getFileSizeIfPresent(candidatePath), + error: String(error), + }); + return null; + } +} diff --git a/electron/ipc/recording/prune.ts b/electron/ipc/recording/prune.ts new file mode 100644 index 00000000..cfd8a22d --- /dev/null +++ b/electron/ipc/recording/prune.ts @@ -0,0 +1,91 @@ +import fs from "node:fs/promises"; +import path from "node:path"; +import { + AUTO_RECORDING_RETENTION_COUNT, + AUTO_RECORDING_MAX_AGE_MS, + PROJECT_FILE_EXTENSION, + LEGACY_PROJECT_FILE_EXTENSIONS, +} from "../constants"; +import { currentVideoPath } from "../state"; +import { normalizePath, getTelemetryPathForVideo, isAutoRecordingPath, getRecordingsDir } from "../utils"; + +export async function hasSiblingProjectFile(videoPath: string) { + const baseName = path.basename(videoPath, path.extname(videoPath)); + const candidateExtensions = [PROJECT_FILE_EXTENSION, ...LEGACY_PROJECT_FILE_EXTENSIONS]; + + for (const extension of candidateExtensions) { + const projectPath = path.join(path.dirname(videoPath), `${baseName}.${extension}`); + + try { + await fs.access(projectPath); + return true; + } catch { + continue; + } + } + + return false; +} + +export { isAutoRecordingPath }; + +export async function pruneAutoRecordings(exemptPaths: string[] = []) { + const recordingsDir = await getRecordingsDir(); + const exempt = new Set( + [currentVideoPath, ...exemptPaths] + .filter((value): value is string => Boolean(value)) + .map((value) => normalizePath(value)), + ); + + const entries = await fs.readdir(recordingsDir, { withFileTypes: true }); + const autoRecordingStats = await Promise.all( + entries + .filter((entry) => entry.isFile() && /^recording-.*\.(mp4|mov|webm)$/i.test(entry.name)) + .map(async (entry) => { + const filePath = path.join(recordingsDir, entry.name); + const stats = await fs.stat(filePath); + return { filePath, stats }; + }), + ); + + const sorted = autoRecordingStats.sort( + (left, right) => right.stats.mtimeMs - left.stats.mtimeMs, + ); + const now = Date.now(); + + for (const [index, entry] of sorted.entries()) { + const normalizedFilePath = normalizePath(entry.filePath); + if (exempt.has(normalizedFilePath)) { + continue; + } + + if (await hasSiblingProjectFile(entry.filePath)) { + continue; + } + + const tooOld = now - entry.stats.mtimeMs > AUTO_RECORDING_MAX_AGE_MS; + const overLimit = index >= AUTO_RECORDING_RETENTION_COUNT; + if (!tooOld && !overLimit) { + continue; + } + + try { + await fs.rm(entry.filePath, { force: true }); + await fs.rm(getTelemetryPathForVideo(entry.filePath), { force: true }); + // Clean up companion audio files left from recording (macOS .m4a, Windows .wav) + const base = entry.filePath.replace(/\.(mp4|mov|webm)$/i, ""); + for (const suffix of [ + ".system.m4a", + ".mic.m4a", + ".system.wav", + ".mic.wav", + ".mic.webm", + ".system.webm", + ]) { + await fs.rm(base + suffix, { force: true }).catch(() => undefined); + } + } catch (error) { + console.warn("Failed to prune old auto recording:", entry.filePath, error); + } + } +} diff --git a/electron/ipc/recording/windows.ts b/electron/ipc/recording/windows.ts new file mode 100644 index 00000000..76355b66 --- /dev/null +++ b/electron/ipc/recording/windows.ts @@ -0,0 +1,344 @@ +import type { ChildProcessWithoutNullStreams } from "node:child_process"; +import { constants as fsConstants } from "node:fs"; +import fs from "node:fs/promises"; +import { execFile } from "node:child_process"; +import { promisify } from "node:util"; +import { BrowserWindow } from "electron"; +import { getFfmpegBinaryPath } from "../ffmpeg/binary"; +import { + getAudioSyncAdjustment, + appendSyncedAudioFilter, + normalizePauseSegments, + buildPausedAudioFilter, +} from "../ffmpeg/filters"; +import type { PauseSegment, AudioSyncAdjustment } from "../types"; +import { + setWindowsCaptureProcess, + windowsCaptureOutputBuffer, + windowsCaptureTargetPath, + windowsNativeCaptureActive, + setWindowsNativeCaptureActive, + windowsCaptureStopRequested, + setWindowsCaptureStopRequested, + selectedSource, +} from "../state"; +import { moveFileWithOverwrite } from "../utils"; +import { probeMediaDurationSeconds } from "./diagnostics"; +import { emitRecordingInterrupted } from "./events"; +import { getWindowsCaptureExePath } from "../paths/binaries"; + +const execFileAsync = promisify(execFile); + +export async function isNativeWindowsCaptureAvailable(): Promise { + if (process.platform !== "win32") return false; + + const helperPath = getWindowsCaptureExePath(); + const os = await import("node:os"); + const [major, , build] = os.release().split(".").map(Number); + const supported = major >= 10 && build >= 19041; + let helperExists = false; + + try { + await fs.access(helperPath, fsConstants.X_OK); + helperExists = true; + } catch { + return false; + } + + void helperExists; + return supported; +} + +export function waitForWindowsCaptureStart(proc: ChildProcessWithoutNullStreams) { + return new Promise((resolve, reject) => { + const timer = setTimeout(() => { + cleanup(); + reject(new Error("Timed out waiting for native Windows capture to start")); + }, 12000); + + const onStdout = (chunk: Buffer) => { + const text = chunk.toString(); + if (text.includes("Recording started")) { + cleanup(); + resolve(); + } + }; + + const onError = (error: Error) => { + cleanup(); + reject(error); + }; + + const onExit = (code: number | null) => { + cleanup(); + reject( + new Error( + windowsCaptureOutputBuffer.trim() || + `Native Windows capture exited before recording started (code ${code ?? "unknown"})`, + ), + ); + }; + + const cleanup = () => { + clearTimeout(timer); + proc.stdout.off("data", onStdout); + proc.off("error", onError); + proc.off("exit", onExit); + }; + + proc.stdout.on("data", onStdout); + proc.once("error", onError); + proc.once("exit", onExit); + }); +} + +export function waitForWindowsCaptureStop(proc: ChildProcessWithoutNullStreams) { + return new Promise((resolve, reject) => { + const onClose = (code: number | null) => { + cleanup(); + const match = windowsCaptureOutputBuffer.match(/Recording stopped\. Output path: (.+)/); + if (match?.[1]) { + resolve(match[1].trim()); + return; + } + if (code === 0 && windowsCaptureTargetPath) { + resolve(windowsCaptureTargetPath); + return; + } + reject( + new Error( + windowsCaptureOutputBuffer.trim() || + `Native Windows capture exited with code ${code ?? "unknown"}`, + ), + ); + }; + + const onError = (error: Error) => { + cleanup(); + reject(error); + }; + + const cleanup = () => { + proc.off("close", onClose); + proc.off("error", onError); + }; + + proc.once("close", onClose); + proc.once("error", onError); + }); +} + +export function attachWindowsCaptureLifecycle(proc: ChildProcessWithoutNullStreams) { + proc.once("close", () => { + const wasActive = windowsNativeCaptureActive; + setWindowsCaptureProcess(null); + + if (!wasActive || windowsCaptureStopRequested) { + return; + } + + setWindowsNativeCaptureActive(false); + setWindowsCaptureStopRequested(false); + + const sourceName = selectedSource?.name ?? "Screen"; + BrowserWindow.getAllWindows().forEach((window) => { + if (!window.isDestroyed()) { + window.webContents.send("recording-state-changed", { + recording: false, + sourceName, + }); + } + }); + + emitRecordingInterrupted("capture-stopped", "Recording stopped unexpectedly."); + }); +} + +export async function muxNativeWindowsVideoWithAudio( + videoPath: string, + systemAudioPath: string | null, + micAudioPath: string | null, + pauseSegments: PauseSegment[] = [], +) { + const ffmpegPath = getFfmpegBinaryPath(); + const inputs: string[] = ["-i", videoPath]; + const audioInputs: string[] = []; + const audioFilePaths: string[] = []; + + for (const [label, audioPath] of [ + ["system", systemAudioPath], + ["mic", micAudioPath], + ] as const) { + if (!audioPath) continue; + try { + const stat = await fs.stat(audioPath); + if (stat.size <= 0) { + console.warn(`[mux-win] Skipping ${label} audio: file is empty (${audioPath})`); + await fs.rm(audioPath, { force: true }).catch(() => undefined); + continue; + } + inputs.push("-i", audioPath); + audioInputs.push(label); + audioFilePaths.push(audioPath); + } catch { + console.warn(`[mux-win] Skipping ${label} audio: file not accessible (${audioPath})`); + } + } + + if (audioInputs.length === 0) return; + + const videoDuration = await probeMediaDurationSeconds(videoPath); + const audioAdjustments: Map = new Map(); + + if (videoDuration > 0) { + for (let i = 0; i < audioFilePaths.length; i++) { + const audioDuration = await probeMediaDurationSeconds(audioFilePaths[i]); + const adjustment = getAudioSyncAdjustment(videoDuration, audioDuration); + audioAdjustments.set(audioInputs[i], adjustment); + if (adjustment.mode === "tempo") { + console.log( + `[mux-win] ${audioInputs[i]} audio differs from video by ${adjustment.durationDeltaMs}ms — applying tempo ratio ${adjustment.tempoRatio.toFixed(6)}`, + ); + } else if (adjustment.mode === "delay" && adjustment.delayMs > 0) { + console.log( + `[mux-win] ${audioInputs[i]} audio appears to start late by ${adjustment.delayMs}ms — adding leading silence`, + ); + } + } + } + + const mixedOutputPath = `${videoPath}.muxed.mp4`; + const normalizedPauseSegments = normalizePauseSegments(pauseSegments); + const systemAdjustment = audioAdjustments.get("system") ?? { + mode: "none", + delayMs: 0, + tempoRatio: 1, + durationDeltaMs: 0, + }; + const micAdjustment = audioAdjustments.get("mic") ?? { + mode: "none", + delayMs: 0, + tempoRatio: 1, + durationDeltaMs: 0, + }; + + if (audioInputs.length === 2) { + const filterParts: string[] = []; + const systemPauseFilter = buildPausedAudioFilter( + "1:a", + "system_trimmed", + normalizedPauseSegments, + ); + const micPauseFilter = buildPausedAudioFilter( + "2:a", + "mic_trimmed", + normalizedPauseSegments, + ); + + if (systemPauseFilter) { + filterParts.push(systemPauseFilter); + } + if (micPauseFilter) { + filterParts.push(micPauseFilter); + } + + const systemLabel = systemPauseFilter ? "[system_trimmed]" : "[1:a]"; + const micLabel = micPauseFilter ? "[mic_trimmed]" : "[2:a]"; + + appendSyncedAudioFilter(filterParts, systemLabel, "s", systemAdjustment); + appendSyncedAudioFilter(filterParts, micLabel, "m", micAdjustment); + filterParts.push("[s][m]amix=inputs=2:duration=longest:normalize=0[aout]"); + + await execFileAsync( + ffmpegPath, + [ + "-y", + ...inputs, + "-filter_complex", + filterParts.join(";"), + "-map", + "0:v:0", + "-map", + "[aout]", + "-c:v", + "copy", + "-c:a", + "aac", + "-b:a", + "192k", + "-shortest", + mixedOutputPath, + ], + { timeout: 120000, maxBuffer: 10 * 1024 * 1024 }, + ); + } else { + const pauseFilter = buildPausedAudioFilter("1:a", "trimmed_audio", normalizedPauseSegments); + const singleAdjustment = audioAdjustments.get(audioInputs[0]) ?? { + mode: "none", + delayMs: 0, + tempoRatio: 1, + durationDeltaMs: 0, + }; + + if (pauseFilter || singleAdjustment.mode !== "none") { + const filterParts: string[] = []; + if (pauseFilter) { + filterParts.push(pauseFilter); + } + const srcLabel = pauseFilter ? "[trimmed_audio]" : "[1:a]"; + appendSyncedAudioFilter(filterParts, srcLabel, "aout", singleAdjustment); + + await execFileAsync( + ffmpegPath, + [ + "-y", + ...inputs, + "-filter_complex", + filterParts.join(";"), + "-map", + "0:v:0", + "-map", + "[aout]", + "-c:v", + "copy", + "-c:a", + "aac", + "-b:a", + "192k", + "-shortest", + mixedOutputPath, + ], + { timeout: 120000, maxBuffer: 10 * 1024 * 1024 }, + ); + } else { + await execFileAsync( + ffmpegPath, + [ + "-y", + ...inputs, + "-map", + "0:v:0", + "-map", + "1:a:0", + "-c:v", + "copy", + "-c:a", + "aac", + "-b:a", + "192k", + "-shortest", + mixedOutputPath, + ], + { timeout: 120000, maxBuffer: 10 * 1024 * 1024 }, + ); + } + } + + await moveFileWithOverwrite(mixedOutputPath, videoPath); + + for (const audioPath of [systemAudioPath, micAudioPath]) { + if (audioPath) { + await fs.rm(audioPath, { force: true }).catch(() => undefined); + } + } +} diff --git a/electron/ipc/state.ts b/electron/ipc/state.ts new file mode 100644 index 00000000..75b83644 --- /dev/null +++ b/electron/ipc/state.ts @@ -0,0 +1,169 @@ +import type { ChildProcessWithoutNullStreams } from "node:child_process"; +import type { + CursorInteractionType, + CursorTelemetryPoint, + CursorVisualType, + NativeCaptureDiagnostics, + RecordingSessionData, + SelectedSource, + SystemCursorAsset, + WindowBounds, +} from "./types"; + +// ── Source selection ────────────────────────────────────────────────────────── +export let selectedSource: SelectedSource | null = null; + +// ── Project / video state ───────────────────────────────────────────────────── +export let currentProjectPath: string | null = null; +export let currentVideoPath: string | null = null; +export let currentRecordingSession: RecordingSessionData | null = null; + +// ── Security: approved read paths ───────────────────────────────────────────── +export const approvedLocalReadPaths = new Set(); + +// ── Native macOS capture ────────────────────────────────────────────────────── +export let nativeScreenRecordingActive = false; +export let nativeCaptureProcess: ChildProcessWithoutNullStreams | null = null; +export let nativeCaptureOutputBuffer = ""; +export let nativeCaptureTargetPath: string | null = null; +export let nativeCaptureStopRequested = false; +export let nativeCaptureSystemAudioPath: string | null = null; +export let nativeCaptureMicrophonePath: string | null = null; +export let nativeCapturePaused = false; + +// ── Native cursor monitor ───────────────────────────────────────────────────── +export let nativeCursorMonitorProcess: ChildProcessWithoutNullStreams | null = null; +export let nativeCursorMonitorOutputBuffer = ""; + +// ── Windows native capture ──────────────────────────────────────────────────── +export let windowsCaptureProcess: ChildProcessWithoutNullStreams | null = null; +export let windowsCaptureOutputBuffer = ""; +export let windowsCaptureTargetPath: string | null = null; +export let windowsNativeCaptureActive = false; +export let windowsCaptureStopRequested = false; +export let windowsCapturePaused = false; +export let windowsSystemAudioPath: string | null = null; +export let windowsMicAudioPath: string | null = null; +export let windowsPendingVideoPath: string | null = null; + +// ── Diagnostics ─────────────────────────────────────────────────────────────── +export let lastNativeCaptureDiagnostics: NativeCaptureDiagnostics | null = null; + +// ── FFmpeg capture ──────────────────────────────────────────────────────────── +export let ffmpegScreenRecordingActive = false; +export let ffmpegCaptureProcess: ChildProcessWithoutNullStreams | null = null; +export let ffmpegCaptureOutputBuffer = ""; +export let ffmpegCaptureTargetPath: string | null = null; + +// ── Recordings directory ────────────────────────────────────────────────────── +export let customRecordingsDir: string | null = null; +export let recordingsDirLoaded = false; + +// ── System cursor assets cache ──────────────────────────────────────────────── +export let cachedSystemCursorAssets: Record | null = null; +export let cachedSystemCursorAssetsSourceMtimeMs: number | null = null; + +// ── Countdown ───────────────────────────────────────────────────────────────── +export let countdownTimer: ReturnType | null = null; +export let countdownCancelled = false; +export let countdownInProgress = false; +export let countdownRemaining: number | null = null; + +// ── Cursor visual type ──────────────────────────────────────────────────────── +export let currentCursorVisualType: CursorVisualType | undefined = undefined; + +// ── Cursor telemetry ────────────────────────────────────────────────────────── +export let cursorCaptureInterval: NodeJS.Timeout | null = null; +export let cursorCaptureStartTimeMs = 0; +export let activeCursorSamples: CursorTelemetryPoint[] = []; +export let pendingCursorSamples: CursorTelemetryPoint[] = []; +export let isCursorCaptureActive = false; +export let interactionCaptureCleanup: (() => void) | null = null; +export let hasLoggedInteractionHookFailure = false; +export let lastLeftClick: { timeMs: number; cx: number; cy: number } | null = null; +export let linuxCursorScreenPoint: { x: number; y: number; updatedAt: number } | null = null; +export let selectedWindowBounds: WindowBounds | null = null; +export let windowBoundsCaptureInterval: NodeJS.Timeout | null = null; + +// ── Native macOS window source cache ───────────────────────────────────────── +export let cachedNativeMacWindowSources: import("./types").NativeMacWindowSource[] | null = null; +export let cachedNativeMacWindowSourcesAtMs = 0; + +// ── Native video export ─────────────────────────────────────────────────────── +export let cachedNativeVideoEncoder: { ffmpegPath: string; encoderName: string } | null = null; + +// ── Native helper migration ─────────────────────────────────────────────────── +export let nativeHelperMigrationPromise: Promise | null = null; + +// ── Cursor interaction capture types ───────────────────────────────────────── +export type { CursorInteractionType, CursorTelemetryPoint }; + +// ── Setters (for modules that need to reassign exported lets) ───────────────── +// TypeScript exported `let` can be reassigned by the owning module but importers +// cannot assign to them directly. Provide simple setters for cross-module writes. + +export function setSelectedSource(v: SelectedSource | null) { selectedSource = v; } +export function setCurrentProjectPath(v: string | null) { currentProjectPath = v; } +export function setCurrentVideoPath(v: string | null) { currentVideoPath = v; } +export function setCurrentRecordingSession(v: RecordingSessionData | null) { currentRecordingSession = v; } + +export function setNativeScreenRecordingActive(v: boolean) { nativeScreenRecordingActive = v; } +export function setNativeCaptureProcess(v: ChildProcessWithoutNullStreams | null) { nativeCaptureProcess = v; } +export function setNativeCaptureOutputBuffer(v: string) { nativeCaptureOutputBuffer = v; } +export function setNativeCaptureTargetPath(v: string | null) { nativeCaptureTargetPath = v; } +export function setNativeCaptureStopRequested(v: boolean) { nativeCaptureStopRequested = v; } +export function setNativeCaptureSystemAudioPath(v: string | null) { nativeCaptureSystemAudioPath = v; } +export function setNativeCaptureMicrophonePath(v: string | null) { nativeCaptureMicrophonePath = v; } +export function setNativeCapturePaused(v: boolean) { nativeCapturePaused = v; } + +export function setNativeCursorMonitorProcess(v: ChildProcessWithoutNullStreams | null) { nativeCursorMonitorProcess = v; } +export function setNativeCursorMonitorOutputBuffer(v: string) { nativeCursorMonitorOutputBuffer = v; } + +export function setWindowsCaptureProcess(v: ChildProcessWithoutNullStreams | null) { windowsCaptureProcess = v; } +export function setWindowsCaptureOutputBuffer(v: string) { windowsCaptureOutputBuffer = v; } +export function setWindowsCaptureTargetPath(v: string | null) { windowsCaptureTargetPath = v; } +export function setWindowsNativeCaptureActive(v: boolean) { windowsNativeCaptureActive = v; } +export function setWindowsCaptureStopRequested(v: boolean) { windowsCaptureStopRequested = v; } +export function setWindowsCapturePaused(v: boolean) { windowsCapturePaused = v; } +export function setWindowsSystemAudioPath(v: string | null) { windowsSystemAudioPath = v; } +export function setWindowsMicAudioPath(v: string | null) { windowsMicAudioPath = v; } +export function setWindowsPendingVideoPath(v: string | null) { windowsPendingVideoPath = v; } + +export function setLastNativeCaptureDiagnostics(v: NativeCaptureDiagnostics | null) { lastNativeCaptureDiagnostics = v; } + +export function setFfmpegScreenRecordingActive(v: boolean) { ffmpegScreenRecordingActive = v; } +export function setFfmpegCaptureProcess(v: ChildProcessWithoutNullStreams | null) { ffmpegCaptureProcess = v; } +export function setFfmpegCaptureOutputBuffer(v: string) { ffmpegCaptureOutputBuffer = v; } +export function setFfmpegCaptureTargetPath(v: string | null) { ffmpegCaptureTargetPath = v; } + +export function setCustomRecordingsDir(v: string | null) { customRecordingsDir = v; } +export function setRecordingsDirLoaded(v: boolean) { recordingsDirLoaded = v; } + +export function setCachedSystemCursorAssets(v: Record | null) { cachedSystemCursorAssets = v; } +export function setCachedSystemCursorAssetsSourceMtimeMs(v: number | null) { cachedSystemCursorAssetsSourceMtimeMs = v; } + +export function setCountdownTimer(v: ReturnType | null) { countdownTimer = v; } +export function setCountdownCancelled(v: boolean) { countdownCancelled = v; } +export function setCountdownInProgress(v: boolean) { countdownInProgress = v; } +export function setCountdownRemaining(v: number | null) { countdownRemaining = v; } + +export function setCurrentCursorVisualType(v: CursorVisualType | undefined) { currentCursorVisualType = v; } + +export function setCursorCaptureInterval(v: NodeJS.Timeout | null) { cursorCaptureInterval = v; } +export function setCursorCaptureStartTimeMs(v: number) { cursorCaptureStartTimeMs = v; } +export function setActiveCursorSamples(v: CursorTelemetryPoint[]) { activeCursorSamples = v; } +export function setPendingCursorSamples(v: CursorTelemetryPoint[]) { pendingCursorSamples = v; } +export function setIsCursorCaptureActive(v: boolean) { isCursorCaptureActive = v; } +export function setInteractionCaptureCleanup(v: (() => void) | null) { interactionCaptureCleanup = v; } +export function setHasLoggedInteractionHookFailure(v: boolean) { hasLoggedInteractionHookFailure = v; } +export function setLastLeftClick(v: { timeMs: number; cx: number; cy: number } | null) { lastLeftClick = v; } +export function setLinuxCursorScreenPoint(v: { x: number; y: number; updatedAt: number } | null) { linuxCursorScreenPoint = v; } +export function setSelectedWindowBounds(v: WindowBounds | null) { selectedWindowBounds = v; } +export function setWindowBoundsCaptureInterval(v: NodeJS.Timeout | null) { windowBoundsCaptureInterval = v; } + +export function setCachedNativeMacWindowSources(v: import("./types").NativeMacWindowSource[] | null) { cachedNativeMacWindowSources = v; } +export function setCachedNativeMacWindowSourcesAtMs(v: number) { cachedNativeMacWindowSourcesAtMs = v; } + +export function setCachedNativeVideoEncoder(v: { ffmpegPath: string; encoderName: string } | null) { cachedNativeVideoEncoder = v; } + +export function setNativeHelperMigrationPromise(v: Promise | null) { nativeHelperMigrationPromise = v; } diff --git a/electron/ipc/types.ts b/electron/ipc/types.ts new file mode 100644 index 00000000..5922fb0e --- /dev/null +++ b/electron/ipc/types.ts @@ -0,0 +1,202 @@ +export type SelectedSource = { + id?: string; + name: string; + display_id?: string; + sourceType?: "screen" | "window"; + appName?: string; + windowTitle?: string; + [key: string]: unknown; +}; + +export type NativeMacRecordingOptions = { + capturesSystemAudio?: boolean; + capturesMicrophone?: boolean; + microphoneDeviceId?: string; + microphoneLabel?: string; +}; + +export type WindowBounds = { + x: number; + y: number; + width: number; + height: number; +}; + +export type NativeCaptureDiagnostics = { + backend: "windows-wgc" | "mac-screencapturekit" | "browser-store" | "ffmpeg"; + phase: "availability" | "start" | "stop" | "mux"; + timestamp: string; + sourceId?: string | null; + sourceType?: SelectedSource["sourceType"] | "unknown"; + displayId?: number | null; + displayBounds?: WindowBounds | null; + windowHandle?: number | null; + helperPath?: string | null; + outputPath?: string | null; + systemAudioPath?: string | null; + microphonePath?: string | null; + osRelease?: string; + supported?: boolean; + helperExists?: boolean; + fileSizeBytes?: number | null; + processOutput?: string; + error?: string; +}; + +export type RecordingSessionData = { + videoPath: string; + webcamPath?: string | null; + timeOffsetMs?: number; +}; + +export type PauseSegment = { + startMs: number; + endMs: number; +}; + +export type RecordingSessionManifest = { + version: 1 | 2; + videoFileName: string; + webcamFileName?: string | null; + timeOffsetMs?: number; +}; + +export type ProjectLibraryEntry = { + path: string; + name: string; + updatedAt: number; + thumbnailPath: string | null; + isCurrent: boolean; + isInProjectsDirectory: boolean; +}; + +export type SystemCursorAsset = { + dataUrl: string; + hotspotX: number; + hotspotY: number; + width: number; + height: number; +}; + +export type CursorVisualType = + | "arrow" + | "text" + | "pointer" + | "crosshair" + | "open-hand" + | "closed-hand" + | "resize-ew" + | "resize-ns" + | "not-allowed"; + +export type CursorInteractionType = + | "move" + | "click" + | "double-click" + | "right-click" + | "middle-click" + | "mouseup"; + +export interface CursorTelemetryPoint { + timeMs: number; + cx: number; + cy: number; + interactionType?: CursorInteractionType; + cursorType?: CursorVisualType; +} + +export type NativeMacWindowSource = { + id: string; + name: string; + display_id?: string; + appName?: string; + windowTitle?: string; + bundleId?: string; + appIcon?: string | null; + x?: number; + y?: number; + width?: number; + height?: number; +}; + +export type HookEventName = "mousedown" | "mouseup" | "mousemove"; + +export type HookMouseEvent = { + button?: number; + mouseButton?: number; + x?: number; + y?: number; + screenX?: number; + screenY?: number; + data?: { + button?: number; + mouseButton?: number; + x?: number; + y?: number; + screenX?: number; + screenY?: number; + }; +}; + +export type HookEventListener = (event: HookMouseEvent) => void; + +export type UiohookLike = { + on: (eventName: HookEventName, listener: HookEventListener) => void; + off?: (eventName: HookEventName, listener: HookEventListener) => void; + removeListener?: (eventName: HookEventName, listener: HookEventListener) => void; + start: () => void; + stop?: () => void; +}; + +export type UiohookModuleNamespace = { + uIOhook?: UiohookLike; + uiohook?: UiohookLike; + Uiohook?: UiohookLike; + default?: UiohookLike | UiohookModuleNamespace; +}; + +export type AudioSyncAdjustment = { + mode: "none" | "tempo" | "delay"; + delayMs: number; + tempoRatio: number; + durationDeltaMs: number; +}; + +export type CompanionAudioCandidate = { + platform: "mac" | "win"; + systemPath: string; + micPath: string; + usablePaths: string[]; +}; + +export type CaptionWordPayload = { + text: string; + startMs: number; + endMs: number; + leadingSpace?: boolean; +}; + +export type CaptionCuePayload = { + id: string; + startMs: number; + endMs: number; + text: string; + words?: CaptionWordPayload[]; +}; + +export type WhisperJsonToken = { + text?: unknown; + offsets?: { + from?: unknown; + to?: unknown; + }; +}; + +export type WhisperJsonSegment = { + text?: unknown; + offsets?: { + from?: unknown; + to?: unknown; + }; + tokens?: unknown; +}; diff --git a/electron/ipc/utils.ts b/electron/ipc/utils.ts new file mode 100644 index 00000000..ccb28750 --- /dev/null +++ b/electron/ipc/utils.ts @@ -0,0 +1,105 @@ +import { createRequire } from "node:module"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import { app } from "electron"; +import { RECORDINGS_DIR } from "../appPaths"; +import { RECORDINGS_SETTINGS_FILE, AUTO_RECORDING_PREFIX } from "./constants"; +import { + customRecordingsDir, + setCustomRecordingsDir, + recordingsDirLoaded, + setRecordingsDirLoaded, +} from "./state"; + +const nodeRequire = createRequire(import.meta.url); + +export function getScreen() { + if (!app.isReady()) { + throw new Error( + "getScreen() called before app is ready. Ensure all screen access happens after app.whenReady().", + ); + } + return nodeRequire("electron").screen as typeof import("electron").screen; +} + +export function normalizePath(filePath: string) { + return path.resolve(filePath); +} + +export function normalizeVideoSourcePath(videoPath?: string | null): string | null { + if (typeof videoPath !== "string") { + return null; + } + + const trimmed = videoPath.trim(); + if (!trimmed) { + return null; + } + + if (/^file:\/\//i.test(trimmed)) { + try { + return fileURLToPath(trimmed); + } catch { + // Fall through and keep best-effort string path below. + } + } + + return trimmed; +} + +export function parseWindowId(sourceId?: string) { + if (!sourceId) return null; + const match = sourceId.match(/^window:(\d+)/); + return match ? Number.parseInt(match[1], 10) : null; +} + +export function getTelemetryPathForVideo(videoPath: string) { + return `${videoPath}.cursor.json`; +} + +export function isAutoRecordingPath(filePath: string) { + return path.basename(filePath).startsWith(AUTO_RECORDING_PREFIX); +} + +export async function moveFileWithOverwrite(sourcePath: string, destinationPath: string) { + await fs.mkdir(path.dirname(destinationPath), { recursive: true }); + await fs.rm(destinationPath, { force: true }); + + try { + await fs.rename(sourcePath, destinationPath); + } catch (error) { + const nodeError = error as NodeJS.ErrnoException; + if (nodeError.code !== "EXDEV") { + throw error; + } + + await fs.copyFile(sourcePath, destinationPath); + await fs.unlink(sourcePath); + } +} + +async function loadRecordingsDirectorySetting() { + if (recordingsDirLoaded) { + return; + } + + setRecordingsDirLoaded(true); + + try { + const content = await fs.readFile(RECORDINGS_SETTINGS_FILE, "utf-8"); + const parsed = JSON.parse(content) as { recordingsDir?: unknown }; + if (typeof parsed.recordingsDir === "string" && parsed.recordingsDir.trim()) { + setCustomRecordingsDir(path.resolve(parsed.recordingsDir)); + } + } catch { + setCustomRecordingsDir(null); + } +} + +export async function getRecordingsDir() { + await loadRecordingsDirectorySetting(); + const targetDir = customRecordingsDir ?? RECORDINGS_DIR; + await fs.mkdir(targetDir, { recursive: true }); + return targetDir; +} From 673dfdadd325777ac498277088a038a644ee5df9 Mon Sep 17 00:00:00 2001 From: webadderall <131426131+webadderall@users.noreply.github.com> Date: Fri, 17 Apr 2026 20:13:47 +1000 Subject: [PATCH 2/6] refactor: extract registerIpcHandlers into 8 focused register/ modules MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit handlers.ts reduced from 2930 → 65 lines (pure delegation). New files under electron/ipc/register/: - sources.ts — get-sources, select-source, show-source-highlight, open-source-selector - recording.ts — start/stop/pause native + ffmpeg, mux, store, set-recording-state, get-cursor-telemetry - permissions.ts — accessibility/screen permissions, open-external-url - assets.ts — wallpaper thumbnails, asset-base-path, list-asset-dir, read-local-file - export.ts — native-video-export-*, save-exported-video - captions.ts — whisper model, file pickers, generate-auto-captions - project.ts — project files, recordings dir, video/session state, delete-recording - settings.ts — shortcuts, recording prefs, countdown, platform info Also moved shared helpers: - getMacPrivacySettingsUrl, approveUserPath → utils.ts - isTrustedProjectPath → project/manager.ts --- electron/ipc/handlers.ts | 2901 +------------------------- electron/ipc/project/manager.ts | 6 + electron/ipc/register/assets.ts | 123 ++ electron/ipc/register/captions.ts | 207 ++ electron/ipc/register/export.ts | 383 ++++ electron/ipc/register/permissions.ts | 87 + electron/ipc/register/project.ts | 371 ++++ electron/ipc/register/recording.ts | 1159 ++++++++++ electron/ipc/register/settings.ts | 195 ++ electron/ipc/register/sources.ts | 448 ++++ electron/ipc/utils.ts | 19 + 11 files changed, 3017 insertions(+), 2882 deletions(-) create mode 100644 electron/ipc/register/assets.ts create mode 100644 electron/ipc/register/captions.ts create mode 100644 electron/ipc/register/export.ts create mode 100644 electron/ipc/register/permissions.ts create mode 100644 electron/ipc/register/project.ts create mode 100644 electron/ipc/register/recording.ts create mode 100644 electron/ipc/register/settings.ts create mode 100644 electron/ipc/register/sources.ts diff --git a/electron/ipc/handlers.ts b/electron/ipc/handlers.ts index db91ccb7..760bbe23 100644 --- a/electron/ipc/handlers.ts +++ b/electron/ipc/handlers.ts @@ -1,185 +1,29 @@ -import type { ChildProcessByStdio, ChildProcessWithoutNullStreams } from "node:child_process"; -import { execFile, spawn } from "node:child_process"; -import { existsSync, constants as fsConstants } from "node:fs"; -import fs from "node:fs/promises"; -import path from "node:path"; -import type { Readable, Writable } from "node:stream"; -import { pathToFileURL } from "node:url"; -import { promisify } from "node:util"; -import type { SaveDialogOptions } from "electron"; +import { BrowserWindow } from "electron"; import { - app, - BrowserWindow, - desktopCapturer, - dialog, - ipcMain, - shell, - systemPreferences, -} from "electron"; -import { RECORDINGS_DIR, USER_DATA_PATH } from "../appPaths"; -import { hideCursor, showCursor } from "../cursorHider"; -import { closeCountdownWindow, createCountdownWindow, getCountdownWindow } from "../windows"; -import { - buildNativeH264StreamExportArgs, - buildNativeVideoExportArgs, - getNativeVideoInputByteSize, - type NativeExportEncodingMode, - type NativeVideoExportFinishOptions, -} from "./nativeVideoExport"; -import { resolveWindowsCaptureDisplay } from "./windowsCaptureSelection"; -import { - PROJECT_FILE_EXTENSION, - LEGACY_PROJECT_FILE_EXTENSIONS, - SHORTCUTS_FILE, - RECORDINGS_SETTINGS_FILE, - COUNTDOWN_SETTINGS_FILE, - ALLOW_RECORDLY_WINDOW_CAPTURE, - CURSOR_SAMPLE_INTERVAL_MS, -} from "./constants"; -import type { - SelectedSource, - NativeMacRecordingOptions, - PauseSegment, - SystemCursorAsset, - CursorTelemetryPoint, -} from "./types"; -import { - selectedSource, - setSelectedSource, - currentProjectPath, - setCurrentProjectPath, - nativeScreenRecordingActive, - setNativeScreenRecordingActive, - currentVideoPath, - setCurrentVideoPath, - currentRecordingSession, - setCurrentRecordingSession, - approvedLocalReadPaths, - nativeCaptureProcess, - setNativeCaptureProcess, - nativeCaptureOutputBuffer, - setNativeCaptureOutputBuffer, - nativeCaptureTargetPath, - setNativeCaptureTargetPath, - setNativeCaptureStopRequested, - nativeCaptureSystemAudioPath, - setNativeCaptureSystemAudioPath, - nativeCaptureMicrophonePath, - setNativeCaptureMicrophonePath, - nativeCapturePaused, - setNativeCapturePaused, windowsCaptureProcess, setWindowsCaptureProcess, - windowsCaptureOutputBuffer, - setWindowsCaptureOutputBuffer, - windowsCaptureTargetPath, setWindowsCaptureTargetPath, - windowsNativeCaptureActive, setWindowsNativeCaptureActive, + setNativeScreenRecordingActive, setWindowsCaptureStopRequested, - windowsCapturePaused, setWindowsCapturePaused, - windowsSystemAudioPath, setWindowsSystemAudioPath, - windowsMicAudioPath, setWindowsMicAudioPath, - windowsPendingVideoPath, setWindowsPendingVideoPath, - lastNativeCaptureDiagnostics, - ffmpegScreenRecordingActive, - setFfmpegScreenRecordingActive, - ffmpegCaptureProcess, - setFfmpegCaptureProcess, - ffmpegCaptureOutputBuffer, - setFfmpegCaptureOutputBuffer, - ffmpegCaptureTargetPath, - setFfmpegCaptureTargetPath, - cachedSystemCursorAssets, - setCachedSystemCursorAssets, - cachedSystemCursorAssetsSourceMtimeMs, - setCachedSystemCursorAssetsSourceMtimeMs, - countdownTimer, - setCountdownTimer, - countdownCancelled, - setCountdownCancelled, - countdownInProgress, - setCountdownInProgress, - countdownRemaining, - setCountdownRemaining, - setCursorCaptureInterval, - setCursorCaptureStartTimeMs, - setActiveCursorSamples, - setPendingCursorSamples, - setIsCursorCaptureActive, - setLastLeftClick, - setLinuxCursorScreenPoint, + selectedSource, } from "./state"; -import { getFfmpegBinaryPath } from "./ffmpeg/binary"; -import { - sendWhisperModelDownloadProgress, - getWhisperSmallModelStatus, - downloadWhisperSmallModel, - deleteWhisperSmallModel, -} from "./captions/whisper"; -import { - getNativeCaptureHelperBinaryPath, - getSystemCursorHelperSourcePath, - getSystemCursorHelperBinaryPath, - ensureSwiftHelperBinary, - getWindowsCaptureExePath, - ensureNativeCaptureHelperBinary, -} from "./paths/binaries"; -import { - stopNativeCursorMonitor, - startNativeCursorMonitor, -} from "./cursor/monitor"; -import { getScreen, normalizePath, normalizeVideoSourcePath, parseWindowId, getTelemetryPathForVideo, isAutoRecordingPath, moveFileWithOverwrite, getRecordingsDir } from "./utils"; -import { recordNativeCaptureDiagnostics, getFileSizeIfPresent, getCompanionAudioFallbackPaths } from "./recording/diagnostics"; -import { getProjectsDir, persistRecordingsDirectorySetting, saveProjectThumbnail, rememberRecentProject, listProjectLibraryEntries, loadProjectFromPath, isAllowedLocalReadPath, rememberApprovedLocalReadPath, replaceApprovedSessionLocalReadPaths, getAssetRootPath } from "./project/manager"; -import { persistRecordingSessionManifest, resolveRecordingSession } from "./project/session"; -import { - nativeVideoExportSessions, - getNativeVideoExportMaxQueuedWriteBytes, - isHardwareAcceleratedVideoEncoder, - removeTemporaryExportFile, - getNativeVideoExportSessionError, - sendNativeVideoExportWriteFrameResult, - settleNativeVideoExportWriteFrameRequest, - flushNativeVideoExportPendingWriteRequests, - isIgnorableNativeVideoExportStreamError, - enqueueNativeVideoExportFrameWrite, - resolveNativeVideoEncoder, - muxNativeVideoExportAudio, - muxExportedVideoAudioBuffer, - type NativeVideoExportSession, -} from "./export/native-video"; -import { generateAutoCaptionsFromVideo } from "./captions/generate"; -import { buildFfmpegCaptureArgs, waitForFfmpegCaptureStart, waitForFfmpegCaptureStop, getDisplayBoundsForSource } from "./recording/ffmpeg"; -import { isNativeWindowsCaptureAvailable, waitForWindowsCaptureStart, waitForWindowsCaptureStop, attachWindowsCaptureLifecycle, muxNativeWindowsVideoWithAudio } from "./recording/windows"; -import { waitForNativeCaptureStart, waitForNativeCaptureStop, muxNativeMacRecordingWithAudio, attachNativeCaptureLifecycle, finalizeStoredVideo, recoverNativeMacCaptureOutput } from "./recording/mac"; -import { clamp, stopCursorCapture, sampleCursorPoint, snapshotCursorTelemetryForPersistence } from "./cursor/telemetry"; -import { getNativeMacWindowSources, stopWindowBoundsCapture, resolveMacWindowBounds, startWindowBoundsCapture, resolveLinuxWindowBounds, resolveWindowsWindowBounds } from "./cursor/bounds"; -import { startInteractionCapture, stopInteractionCapture } from "./cursor/interaction"; +import { registerSourceHandlers } from "./register/sources"; +import { registerRecordingHandlers } from "./register/recording"; +import { registerPermissionHandlers } from "./register/permissions"; +import { registerAssetHandlers } from "./register/assets"; +import { registerExportHandlers } from "./register/export"; +import { registerCaptionHandlers } from "./register/captions"; +import { registerProjectHandlers } from "./register/project"; +import { registerSettingsHandlers } from "./register/settings"; export { cleanupNativeVideoExportSessions } from "./export/native-video"; -const execFileAsync = promisify(execFile); - -function normalizeRecordingTimeOffsetMs(value: unknown): number { - return typeof value === "number" && Number.isFinite(value) ? Math.round(value) : 0; -} - -function broadcastSelectedSourceChange() { - for (const window of BrowserWindow.getAllWindows()) { - if (!window.isDestroyed()) { - window.webContents.send("selected-source-changed", selectedSource); - } - } -} - - /** Returns the currently selected source ID for setDisplayMediaRequestHandler */ - export function getSelectedSourceId(): string | null { return (selectedSource?.id as string | null) ?? null; } @@ -203,96 +47,6 @@ export function killWindowsCaptureProcess() { } } -function normalizeDesktopSourceName(value: string) { - return value.trim().replace(/\s+/g, " ").toLowerCase(); -} - -function hasUsableSourceThumbnail( - thumbnail: - | { - isEmpty: () => boolean; - getSize: () => { width: number; height: number }; - } - | null - | undefined, -) { - if (!thumbnail || thumbnail.isEmpty()) { - return false; - } - - const size = thumbnail.getSize(); - return size.width > 1 && size.height > 1; -} - -function getMacPrivacySettingsUrl(pane: "screen" | "accessibility" | "microphone") { - if (pane === "screen") - return "x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture"; - if (pane === "microphone") - return "x-apple.systempreferences:com.apple.preference.security?Privacy_Microphone"; - return "x-apple.systempreferences:com.apple.preference.security?Privacy_Accessibility"; -} - - -function approveUserPath(filePath: string | null | undefined) { - if (!filePath) { - return; - } - - try { - approvedLocalReadPaths.add(path.resolve(filePath)); - } catch { - // Ignore invalid paths; later reads will surface the underlying error. - } -} - -async function getSystemCursorAssets() { - if (process.platform !== "darwin") { - setCachedSystemCursorAssets({}); - setCachedSystemCursorAssetsSourceMtimeMs(null); - return cachedSystemCursorAssets ?? {}; - } - - const sourcePath = getSystemCursorHelperSourcePath(); - const sourceStat = await fs.stat(sourcePath); - if (cachedSystemCursorAssets && cachedSystemCursorAssetsSourceMtimeMs === sourceStat.mtimeMs) { - return cachedSystemCursorAssets; - } - - const binaryPath = await ensureSwiftHelperBinary( - sourcePath, - getSystemCursorHelperBinaryPath(), - "system cursor helper", - "recordly-system-cursors", - ); - - const { stdout } = await execFileAsync(binaryPath, [], { - timeout: 15000, - maxBuffer: 20 * 1024 * 1024, - }); - const parsed = JSON.parse(stdout) as Record>; - const result = Object.fromEntries( - Object.entries(parsed).filter( - ([, asset]) => - typeof asset?.dataUrl === "string" && - typeof asset?.hotspotX === "number" && - typeof asset?.hotspotY === "number" && - typeof asset?.width === "number" && - typeof asset?.height === "number", - ), - ) as Record; - setCachedSystemCursorAssets(result); - setCachedSystemCursorAssetsSourceMtimeMs(sourceStat.mtimeMs); - - return result; -} - -function isTrustedProjectPath(filePath?: string | null) { - if (!filePath || !currentProjectPath) { - return false; - } - return normalizePath(filePath) === normalizePath(currentProjectPath); -} - export function registerIpcHandlers( createEditorWindow: () => void, createSourceSelectorWindow: () => BrowserWindow, @@ -300,2629 +54,12 @@ export function registerIpcHandlers( getSourceSelectorWindow: () => BrowserWindow | null, onRecordingStateChange?: (recording: boolean, sourceName: string) => void, ) { - ipcMain.handle("get-sources", async (_, opts) => { - const includeScreens = Array.isArray(opts?.types) ? opts.types.includes("screen") : true; - const includeWindows = Array.isArray(opts?.types) ? opts.types.includes("window") : true; - const electronTypes = [ - ...(includeScreens ? ["screen" as const] : []), - ...(includeWindows ? ["window" as const] : []), - ]; - const electronSources = - electronTypes.length > 0 - ? await desktopCapturer - .getSources({ - ...opts, - types: electronTypes, - }) - .catch((error) => { - console.warn( - "desktopCapturer.getSources failed (screen recording permission may be missing):", - error, - ); - return []; - }) - : []; - const ownWindowNames = new Set( - [ - app.getName(), - "Recordly", - ...BrowserWindow.getAllWindows().flatMap((win) => { - const title = win.getTitle().trim(); - return title ? [title] : []; - }), - ] - .map((name) => normalizeDesktopSourceName(name)) - .filter(Boolean), - ); - const ownAppName = normalizeDesktopSourceName(app.getName()); - - const displays = includeScreens - ? [...getScreen().getAllDisplays()].sort( - (left, right) => - left.bounds.x - right.bounds.x || - left.bounds.y - right.bounds.y || - left.id - right.id, - ) - : []; - const primaryDisplayId = includeScreens ? String(getScreen().getPrimaryDisplay().id) : ""; - const electronScreenSourcesByDisplayId = new Map( - electronSources - .filter((source) => source.id.startsWith("screen:")) - .map((source) => [String(source.display_id ?? ""), source] as const), - ); - - const screenSources = displays.map((display, index) => { - const displayId = String(display.id); - const matchedSource = electronScreenSourcesByDisplayId.get(displayId); - const displayName = - displayId === primaryDisplayId - ? `Screen ${index + 1} (Primary)` - : `Screen ${index + 1}`; - - return { - id: matchedSource?.id ?? `screen:fallback:${displayId}`, - name: displayName, - originalName: matchedSource?.name ?? displayName, - display_id: displayId, - thumbnail: matchedSource?.thumbnail ? matchedSource.thumbnail.toDataURL() : null, - appIcon: matchedSource?.appIcon ? matchedSource.appIcon.toDataURL() : null, - sourceType: "screen" as const, - }; - }); - - if (process.platform !== "darwin" || !includeWindows) { - const windowSources = electronSources - .filter((source) => source.id.startsWith("window:")) - .filter((source) => hasUsableSourceThumbnail(source.thumbnail)) - .filter((source) => { - const normalizedName = normalizeDesktopSourceName(source.name); - if (!normalizedName) { - return true; - } - - if (ALLOW_RECORDLY_WINDOW_CAPTURE && normalizedName.includes("recordly")) { - return true; - } - - for (const ownName of ownWindowNames) { - if (!ownName) continue; - if (normalizedName === ownName) { - return false; - } - } - - return true; - }) - .map((source) => ({ - id: source.id, - name: source.name, - originalName: source.name, - display_id: source.display_id, - thumbnail: source.thumbnail ? source.thumbnail.toDataURL() : null, - appIcon: source.appIcon ? source.appIcon.toDataURL() : null, - sourceType: "window" as const, - })); - - return [...screenSources, ...windowSources]; - } - - try { - const nativeWindowSources = await getNativeMacWindowSources(); - const electronWindowSourceMap = new Map( - electronSources - .filter((source) => source.id.startsWith("window:")) - .map((source) => [source.id, source] as const), - ); - - const mergedWindowSources = nativeWindowSources - .filter((source) => { - const normalizedWindowName = normalizeDesktopSourceName( - source.windowTitle ?? source.name, - ); - const normalizedAppName = normalizeDesktopSourceName(source.appName ?? ""); - - if ( - !ALLOW_RECORDLY_WINDOW_CAPTURE && - normalizedAppName && - normalizedAppName === ownAppName - ) { - return false; - } - - if ( - ALLOW_RECORDLY_WINDOW_CAPTURE && - (normalizedAppName === "recordly" || - normalizedWindowName?.includes("recordly")) - ) { - return true; - } - - if (!normalizedWindowName) { - return true; - } - - for (const ownName of ownWindowNames) { - if (!ownName) continue; - if (normalizedWindowName === ownName) { - return false; - } - } - - return true; - }) - .map((source) => { - const electronWindowSource = electronWindowSourceMap.get(source.id); - return { - id: source.id, - name: source.name, - originalName: source.name, - display_id: source.display_id ?? electronWindowSource?.display_id ?? "", - thumbnail: electronWindowSource?.thumbnail - ? electronWindowSource.thumbnail.toDataURL() - : null, - appIcon: - source.appIcon ?? - (electronWindowSource?.appIcon - ? electronWindowSource.appIcon.toDataURL() - : null), - appName: source.appName, - windowTitle: source.windowTitle, - sourceType: "window" as const, - }; - }); - - return [...screenSources, ...mergedWindowSources]; - } catch (error) { - console.warn("Falling back to Electron window enumeration on macOS:", error); - - const windowSources = electronSources - .filter((source) => source.id.startsWith("window:")) - .filter((source) => { - const normalizedName = normalizeDesktopSourceName(source.name); - if (!normalizedName) { - return true; - } - - if (ALLOW_RECORDLY_WINDOW_CAPTURE && normalizedName.includes("recordly")) { - return true; - } - - for (const ownName of ownWindowNames) { - if (!ownName) continue; - if ( - normalizedName === ownName || - normalizedName.includes(ownName) || - ownName.includes(normalizedName) - ) { - return false; - } - } - - return true; - }) - .map((source) => ({ - id: source.id, - name: source.name, - originalName: source.name, - display_id: source.display_id, - thumbnail: source.thumbnail ? source.thumbnail.toDataURL() : null, - appIcon: source.appIcon ? source.appIcon.toDataURL() : null, - sourceType: "window" as const, - })); - - return [...screenSources, ...windowSources]; - } - }); - - ipcMain.handle("select-source", (_, source: SelectedSource) => { - setSelectedSource(source); - broadcastSelectedSourceChange(); - stopWindowBoundsCapture(); - const sourceSelectorWin = getSourceSelectorWindow(); - if (sourceSelectorWin) { - sourceSelectorWin.close(); - } - return selectedSource; - }); - - ipcMain.handle("show-source-highlight", async (_, source: SelectedSource) => { - try { - const isWindow = source.id?.startsWith("window:"); - const windowId = isWindow ? parseWindowId(source.id) : null; - - // ── 1. Bring window to front ── - if (isWindow && process.platform === "darwin") { - const appName = source.appName || source.name?.split(" — ")[0]?.trim(); - if (appName) { - try { - await execFileAsync( - "osascript", - ["-e", `tell application "${appName}" to activate`], - { timeout: 2000 }, - ); - await new Promise((resolve) => setTimeout(resolve, 350)); - } catch { - /* ignore */ - } - } - } else if (windowId && process.platform === "linux") { - try { - await execFileAsync("wmctrl", ["-i", "-a", `0x${windowId.toString(16)}`], { - timeout: 1500, - }); - } catch { - try { - await execFileAsync("xdotool", ["windowactivate", String(windowId)], { - timeout: 1500, - }); - } catch { - /* not available */ - } - } - await new Promise((resolve) => setTimeout(resolve, 250)); - } - - // ── 2. Resolve bounds ── - let bounds: { x: number; y: number; width: number; height: number } | null = null; - - if (source.id?.startsWith("screen:")) { - bounds = getDisplayBoundsForSource(source); - } else if (isWindow) { - if (process.platform === "darwin") { - bounds = await resolveMacWindowBounds(source); - } else if (process.platform === "win32") { - bounds = await resolveWindowsWindowBounds(source); - } else if (process.platform === "linux") { - bounds = await resolveLinuxWindowBounds(source); - } - } - - if (!bounds || bounds.width <= 0 || bounds.height <= 0) { - bounds = getDisplayBoundsForSource(source); - } - - // ── 3. Show traveling wave highlight ── - const pad = 6; - const highlightWin = new BrowserWindow({ - x: bounds.x - pad, - y: bounds.y - pad, - width: bounds.width + pad * 2, - height: bounds.height + pad * 2, - frame: false, - transparent: true, - alwaysOnTop: true, - skipTaskbar: true, - hasShadow: false, - resizable: false, - focusable: false, - webPreferences: { nodeIntegration: false, contextIsolation: true }, - }); - - highlightWin.setIgnoreMouseEvents(true); - - const html = ` - -
-
-` - - await highlightWin.loadURL(`data:text/html;charset=utf-8,${encodeURIComponent(html)}`) - - setTimeout(() => { - if (!highlightWin.isDestroyed()) highlightWin.close() - }, 1700) - - return { success: true } - } catch (error) { - console.error('Failed to show source highlight:', error) - return { success: false } - } - }) - - ipcMain.handle('get-selected-source', () => { - return selectedSource - }) - - ipcMain.handle('open-source-selector', () => { - const sourceSelectorWin = getSourceSelectorWindow() - if (sourceSelectorWin) { - sourceSelectorWin.focus() - return - } - createSourceSelectorWindow() - }) - ipcMain.handle('switch-to-editor', () => { - console.log('[switch-to-editor] Opening editor window') - const sourceSelectorWin = getSourceSelectorWindow() - if (sourceSelectorWin && !sourceSelectorWin.isDestroyed()) { - sourceSelectorWin.close() - } - createEditorWindow() - }) - - ipcMain.handle('start-native-screen-recording', async (_, source: SelectedSource, options?: NativeMacRecordingOptions) => { - // Windows native capture path - if (process.platform === 'win32') { - const windowsCaptureAvailable = await isNativeWindowsCaptureAvailable() - if (!windowsCaptureAvailable) { - return { success: false, message: 'Native Windows capture is not available on this system.' } - } - - if (windowsCaptureProcess && !windowsNativeCaptureActive) { - try { windowsCaptureProcess.kill() } catch { /* ignore */ } - setWindowsCaptureProcess(null) - setWindowsCaptureTargetPath(null) - setWindowsCaptureStopRequested(false) - } - - if (windowsCaptureProcess) { - return { success: false, message: 'A native Windows screen recording is already active.' } - } - - let wcProc: ChildProcessWithoutNullStreams | null = null - try { - const exePath = getWindowsCaptureExePath() - const recordingsDir = await getRecordingsDir() - const timestamp = Date.now() - const outputPath = path.join(recordingsDir, `recording-${timestamp}.mp4`) - const displayBounds = source?.id?.startsWith('window:') ? null : getDisplayBoundsForSource(source) - - const config: Record = { - outputPath, - fps: 60, - } - - if (options?.capturesSystemAudio) { - const audioPath = path.join(recordingsDir, `recording-${timestamp}.system.wav`) - config.captureSystemAudio = true - config.audioOutputPath = audioPath - setWindowsSystemAudioPath(audioPath) - } - - if (options?.capturesMicrophone) { - const micPath = path.join(recordingsDir, `recording-${timestamp}.mic.wav`) - config.captureMic = true - config.micOutputPath = micPath - if (options.microphoneLabel) { - config.micDeviceName = options.microphoneLabel - } - setWindowsMicAudioPath(micPath) - } - - const windowId = parseWindowId(source?.id) - if (windowId && source?.id?.startsWith('window:')) { - config.windowHandle = windowId - } else { - const resolvedDisplay = resolveWindowsCaptureDisplay( - source, - getScreen().getAllDisplays(), - getScreen().getPrimaryDisplay(), - ) - config.displayId = resolvedDisplay.displayId - - // Monitor handle IDs can drift across Electron/Windows capture boundaries, - // so also provide display bounds for a coordinate-based native fallback. - config.displayX = Math.round(resolvedDisplay.bounds.x) - config.displayY = Math.round(resolvedDisplay.bounds.y) - config.displayW = Math.round(resolvedDisplay.bounds.width) - config.displayH = Math.round(resolvedDisplay.bounds.height) - } - - recordNativeCaptureDiagnostics({ - backend: 'windows-wgc', - phase: 'start', - sourceId: source?.id ?? null, - sourceType: source?.sourceType ?? 'unknown', - displayId: typeof config.displayId === 'number' ? config.displayId : null, - displayBounds, - windowHandle: typeof config.windowHandle === 'number' ? config.windowHandle : null, - helperPath: exePath, - outputPath, - systemAudioPath: windowsSystemAudioPath, - microphonePath: windowsMicAudioPath, - }) - - setWindowsCaptureOutputBuffer('') - setWindowsCaptureTargetPath(outputPath) - setWindowsCaptureStopRequested(false) - setWindowsCapturePaused(false) - wcProc = spawn(exePath, [JSON.stringify(config)], { - cwd: recordingsDir, - stdio: ['pipe', 'pipe', 'pipe'], - }) - setWindowsCaptureProcess(wcProc) - attachWindowsCaptureLifecycle(wcProc) - - wcProc.stdout.on('data', (chunk: Buffer) => { - setWindowsCaptureOutputBuffer(windowsCaptureOutputBuffer + chunk.toString()) - }) - wcProc.stderr.on('data', (chunk: Buffer) => { - setWindowsCaptureOutputBuffer(windowsCaptureOutputBuffer + chunk.toString()) - }) - - await waitForWindowsCaptureStart(wcProc) - setWindowsNativeCaptureActive(true) - setNativeScreenRecordingActive(true) - recordNativeCaptureDiagnostics({ - backend: 'windows-wgc', - phase: 'start', - sourceId: source?.id ?? null, - sourceType: source?.sourceType ?? 'unknown', - displayId: typeof config.displayId === 'number' ? config.displayId : null, - displayBounds, - windowHandle: typeof config.windowHandle === 'number' ? config.windowHandle : null, - helperPath: exePath, - outputPath, - systemAudioPath: windowsSystemAudioPath, - microphonePath: windowsMicAudioPath, - processOutput: windowsCaptureOutputBuffer.trim() || undefined, - }) - return { success: true } - } catch (error) { - recordNativeCaptureDiagnostics({ - backend: 'windows-wgc', - phase: 'start', - sourceId: source?.id ?? null, - sourceType: source?.sourceType ?? 'unknown', - helperPath: windowsCaptureTargetPath ? getWindowsCaptureExePath() : null, - outputPath: windowsCaptureTargetPath, - systemAudioPath: windowsSystemAudioPath, - microphonePath: windowsMicAudioPath, - processOutput: windowsCaptureOutputBuffer.trim() || undefined, - error: String(error), - }) - console.error('Failed to start native Windows capture:', error) - try { if (wcProc) wcProc.kill() } catch { /* ignore */ } - setWindowsNativeCaptureActive(false) - setNativeScreenRecordingActive(false) - setWindowsCaptureProcess(null) - setWindowsCaptureTargetPath(null) - setWindowsCaptureStopRequested(false) - setWindowsCapturePaused(false) - return { - success: false, - message: 'Failed to start native Windows capture', - error: String(error), - } - } - } - - if (process.platform !== 'darwin') { - return { success: false, message: 'Native screen recording is only available on macOS.' } - } - - if (nativeCaptureProcess && !nativeScreenRecordingActive) { - try { - nativeCaptureProcess.kill() - } catch { - // ignore stale helper cleanup failures - } - setNativeCaptureProcess(null) - setNativeCaptureTargetPath(null) - setNativeCaptureStopRequested(false) - } - - if (nativeCaptureProcess) { - return { success: false, message: 'A native screen recording is already active.' } - } - - let captProc: ChildProcessWithoutNullStreams | null = null - try { - const recordingsDir = await getRecordingsDir() - - // Warm up TCC: trigger an Electron-level screen capture API call so macOS - // activates the screen-recording grant for this process tree before the - // native helper binary spawns and calls SCStream.startCapture(). - try { - await desktopCapturer.getSources({ types: ['screen'], thumbnailSize: { width: 1, height: 1 } }) - } catch { - // non-fatal – the helper will report its own TCC status - } - - // Ensure microphone TCC is granted for this process tree when mic capture - // is requested, so the child helper inherits the grant. - if (options?.capturesMicrophone) { - const micStatus = systemPreferences.getMediaAccessStatus('microphone') - if (micStatus !== 'granted') { - await systemPreferences.askForMediaAccess('microphone') - } - } - - const appName = normalizeDesktopSourceName(String(source?.appName ?? '')) - const ownAppName = normalizeDesktopSourceName(app.getName()) - if ( - !ALLOW_RECORDLY_WINDOW_CAPTURE - && - source?.id?.startsWith('window:') - && appName - && (appName === ownAppName || appName === 'recordly') - ) { - return { success: false, message: 'Cannot record Recordly windows. Please select another app window.' } - } - - const helperPath = await ensureNativeCaptureHelperBinary() - const timestamp = Date.now() - const outputPath = path.join(recordingsDir, `recording-${timestamp}.mp4`) - const capturesSystemAudio = Boolean(options?.capturesSystemAudio) - const capturesMicrophone = Boolean(options?.capturesMicrophone) - const systemAudioOutputPath = capturesSystemAudio - ? path.join(recordingsDir, `recording-${timestamp}.system.m4a`) - : null - const microphoneOutputPath = capturesMicrophone - ? path.join(recordingsDir, `recording-${timestamp}.mic.m4a`) - : null - const config: Record = { - fps: 60, - outputPath, - capturesSystemAudio, - capturesMicrophone, - } - - if (options?.microphoneDeviceId) { - config.microphoneDeviceId = options.microphoneDeviceId - } - - if (options?.microphoneLabel) { - config.microphoneLabel = options.microphoneLabel - } - - if (systemAudioOutputPath) { - config.systemAudioOutputPath = systemAudioOutputPath - } - - if (microphoneOutputPath) { - config.microphoneOutputPath = microphoneOutputPath - } - - const windowId = parseWindowId(source?.id) - const screenId = Number(source?.display_id) - - if (Number.isFinite(windowId) && windowId && source?.id?.startsWith('window:')) { - config.windowId = windowId - } else if (Number.isFinite(screenId) && screenId > 0) { - config.displayId = screenId - } else { - config.displayId = Number(getScreen().getPrimaryDisplay().id) - } - - setNativeCaptureOutputBuffer('') - setNativeCaptureTargetPath(outputPath) - setNativeCaptureSystemAudioPath(systemAudioOutputPath) - setNativeCaptureMicrophonePath(microphoneOutputPath) - setNativeCaptureStopRequested(false) - setNativeCapturePaused(false) - captProc = spawn(helperPath, [JSON.stringify(config)], { - cwd: recordingsDir, - stdio: ['pipe', 'pipe', 'pipe'], - }) - setNativeCaptureProcess(captProc) - attachNativeCaptureLifecycle(captProc) - - captProc.stdout.on('data', (chunk: Buffer) => { - setNativeCaptureOutputBuffer(nativeCaptureOutputBuffer + chunk.toString()) - }) - captProc.stderr.on('data', (chunk: Buffer) => { - setNativeCaptureOutputBuffer(nativeCaptureOutputBuffer + chunk.toString()) - }) - - await waitForNativeCaptureStart(captProc) - setNativeScreenRecordingActive(true) - - // If the native helper reported MICROPHONE_CAPTURE_UNAVAILABLE, it started - // capture without microphone. Clear the mic path so the renderer can fall - // back to a browser-side sidecar recording for the microphone track. - const micUnavailableNatively = nativeCaptureOutputBuffer.includes('MICROPHONE_CAPTURE_UNAVAILABLE') - if (micUnavailableNatively) { - setNativeCaptureMicrophonePath(null) - } - - recordNativeCaptureDiagnostics({ - backend: 'mac-screencapturekit', - phase: 'start', - sourceId: source?.id ?? null, - sourceType: source?.sourceType ?? 'unknown', - displayId: typeof config.displayId === 'number' ? config.displayId : null, - helperPath, - outputPath, - systemAudioPath: systemAudioOutputPath, - microphonePath: nativeCaptureMicrophonePath, - processOutput: nativeCaptureOutputBuffer.trim() || undefined, - }) - return { success: true, microphoneFallbackRequired: micUnavailableNatively } - } catch (error) { - console.error('Failed to start native ScreenCaptureKit recording:', error) - const errorStr = String(error) - - // Detect TCC (screen recording permission) errors and show a helpful dialog - if (errorStr.includes('declined TCC') || errorStr.includes('declined TCCs') || errorStr.includes('SCREEN_RECORDING_PERMISSION_DENIED')) { - const { response } = await dialog.showMessageBox({ - type: 'warning', - title: 'Screen Recording Permission Required', - message: 'Recordly needs screen recording permission to capture your screen.', - detail: 'Please open System Settings > Privacy & Security > Screen Recording, make sure Recordly is toggled ON, then try recording again.', - buttons: ['Open System Settings', 'Cancel'], - defaultId: 0, - cancelId: 1, - }) - if (response === 0) { - await shell.openExternal(getMacPrivacySettingsUrl('screen')) - } - try { if (captProc) captProc.kill() } catch { /* ignore */ } - setNativeScreenRecordingActive(false) - setNativeCaptureProcess(null) - setNativeCaptureTargetPath(null) - setNativeCaptureSystemAudioPath(null) - setNativeCaptureMicrophonePath(null) - setNativeCaptureStopRequested(false) - setNativeCapturePaused(false) - return { - success: false, - message: 'Screen recording permission not granted. Please allow access in System Settings and restart the app.', - userNotified: true, - } - } - - if (errorStr.includes('MICROPHONE_PERMISSION_DENIED')) { - const { response } = await dialog.showMessageBox({ - type: 'warning', - title: 'Microphone Permission Required', - message: 'Recordly needs microphone permission to record audio.', - detail: 'Please open System Settings > Privacy & Security > Microphone, make sure Recordly is toggled ON, then try recording again.', - buttons: ['Open System Settings', 'Cancel'], - defaultId: 0, - cancelId: 1, - }) - if (response === 0) { - await shell.openExternal(getMacPrivacySettingsUrl('microphone')) - } - try { if (captProc) captProc.kill() } catch { /* ignore */ } - setNativeScreenRecordingActive(false) - setNativeCaptureProcess(null) - setNativeCaptureTargetPath(null) - setNativeCaptureSystemAudioPath(null) - setNativeCaptureMicrophonePath(null) - setNativeCaptureStopRequested(false) - setNativeCapturePaused(false) - return { - success: false, - message: 'Microphone permission not granted. Please allow access in System Settings.', - userNotified: true, - } - } - - recordNativeCaptureDiagnostics({ - backend: 'mac-screencapturekit', - phase: 'start', - sourceId: source?.id ?? null, - sourceType: source?.sourceType ?? 'unknown', - helperPath: getNativeCaptureHelperBinaryPath(), - outputPath: nativeCaptureTargetPath, - systemAudioPath: nativeCaptureSystemAudioPath, - microphonePath: nativeCaptureMicrophonePath, - processOutput: nativeCaptureOutputBuffer.trim() || undefined, - fileSizeBytes: await getFileSizeIfPresent(nativeCaptureTargetPath), - error: String(error), - }) - try { - if (captProc) captProc.kill() - } catch { - // ignore cleanup failures - } - setNativeScreenRecordingActive(false) - setNativeCaptureProcess(null) - setNativeCaptureTargetPath(null) - setNativeCaptureSystemAudioPath(null) - setNativeCaptureMicrophonePath(null) - setNativeCaptureStopRequested(false) - setNativeCapturePaused(false) - return { - success: false, - message: 'Failed to start native ScreenCaptureKit recording', - error: String(error), - } - } - }) - - ipcMain.handle('stop-native-screen-recording', async () => { - // Windows native capture stop path - if (process.platform === 'win32' && windowsNativeCaptureActive) { - try { - if (!windowsCaptureProcess) { - throw new Error('Native Windows capture process is not running') - } - - const proc = windowsCaptureProcess - const preferredVideoPath = windowsCaptureTargetPath - setWindowsCaptureStopRequested(true) - proc.stdin.write('stop\n') - const tempVideoPath = await waitForWindowsCaptureStop(proc) - setWindowsCaptureProcess(null) - setWindowsNativeCaptureActive(false) - setNativeScreenRecordingActive(false) - setWindowsCaptureTargetPath(null) - setWindowsCaptureStopRequested(false) - setWindowsCapturePaused(false) - - const finalVideoPath = preferredVideoPath ?? tempVideoPath - if (tempVideoPath !== finalVideoPath) { - await moveFileWithOverwrite(tempVideoPath, finalVideoPath) - } - - setWindowsPendingVideoPath(finalVideoPath) - recordNativeCaptureDiagnostics({ - backend: 'windows-wgc', - phase: 'stop', - outputPath: finalVideoPath, - systemAudioPath: windowsSystemAudioPath, - microphonePath: windowsMicAudioPath, - processOutput: windowsCaptureOutputBuffer.trim() || undefined, - fileSizeBytes: await getFileSizeIfPresent(finalVideoPath), - }) - return { success: true, path: finalVideoPath } - } catch (error) { - console.error('Failed to stop native Windows capture:', error) - const fallbackPath = windowsCaptureTargetPath - setWindowsNativeCaptureActive(false) - setNativeScreenRecordingActive(false) - setWindowsCaptureProcess(null) - setWindowsCaptureTargetPath(null) - setWindowsCaptureStopRequested(false) - setWindowsCapturePaused(false) - setWindowsSystemAudioPath(null) - setWindowsMicAudioPath(null) - setWindowsPendingVideoPath(null) - - if (fallbackPath) { - try { - await fs.access(fallbackPath) - setWindowsPendingVideoPath(fallbackPath) - recordNativeCaptureDiagnostics({ - backend: 'windows-wgc', - phase: 'stop', - outputPath: fallbackPath, - systemAudioPath: windowsSystemAudioPath, - microphonePath: windowsMicAudioPath, - processOutput: windowsCaptureOutputBuffer.trim() || undefined, - fileSizeBytes: await getFileSizeIfPresent(fallbackPath), - error: String(error), - }) - return { success: true, path: fallbackPath } - } catch { - // File doesn't exist - } - } - - recordNativeCaptureDiagnostics({ - backend: 'windows-wgc', - phase: 'stop', - outputPath: fallbackPath, - systemAudioPath: windowsSystemAudioPath, - microphonePath: windowsMicAudioPath, - processOutput: windowsCaptureOutputBuffer.trim() || undefined, - error: String(error), - }) - - return { - success: false, - message: 'Failed to stop native Windows capture', - error: String(error), - } - } - } - - if (process.platform !== 'darwin') { - return { success: false, message: 'Native screen recording is only available on macOS.' } - } - - if (!nativeScreenRecordingActive) { - const recovered = await recoverNativeMacCaptureOutput() - if (recovered) { - return recovered - } - - return { success: false, message: 'No native screen recording is active.' } - } - - try { - if (!nativeCaptureProcess) { - throw new Error('Native capture helper process is not running') - } - - const process = nativeCaptureProcess - const preferredVideoPath = nativeCaptureTargetPath - const preferredSystemAudioPath = nativeCaptureSystemAudioPath - const preferredMicrophonePath = nativeCaptureMicrophonePath - console.log('[stop-native] Audio paths — system:', preferredSystemAudioPath, 'mic:', preferredMicrophonePath) - setNativeCaptureStopRequested(true) - process.stdin.write('stop\n') - const tempVideoPath = await waitForNativeCaptureStop(process) - console.log('[stop-native] Helper stopped, tempVideoPath:', tempVideoPath) - setNativeCaptureProcess(null) - setNativeScreenRecordingActive(false) - setNativeCaptureTargetPath(null) - setNativeCaptureSystemAudioPath(null) - setNativeCaptureMicrophonePath(null) - setNativeCaptureStopRequested(false) - setNativeCapturePaused(false) - - const finalVideoPath = preferredVideoPath ?? tempVideoPath - if (tempVideoPath !== finalVideoPath) { - await moveFileWithOverwrite(tempVideoPath, finalVideoPath) - } - - if (preferredSystemAudioPath || preferredMicrophonePath) { - console.log('[stop-native] Attempting audio mux (merging separate tracks) into:', finalVideoPath) - try { - await muxNativeMacRecordingWithAudio(finalVideoPath, preferredSystemAudioPath, preferredMicrophonePath) - console.log('[stop-native] Audio mux completed successfully') - } catch (error) { - console.warn('[stop-native] Audio mux failed (video still has inline audio):', error) - } - } else { - console.log('[stop-native] No separate audio tracks to mux') - } - - return await finalizeStoredVideo(finalVideoPath) - } catch (error) { - console.error('Failed to stop native ScreenCaptureKit recording:', error) - const fallbackPath = nativeCaptureTargetPath - const fallbackSystemAudioPath = nativeCaptureSystemAudioPath - const fallbackMicrophonePath = nativeCaptureMicrophonePath - const fallbackFileSizeBytes = await getFileSizeIfPresent(fallbackPath) - setNativeScreenRecordingActive(false) - setNativeCaptureProcess(null) - setNativeCaptureTargetPath(null) - setNativeCaptureSystemAudioPath(null) - setNativeCaptureMicrophonePath(null) - setNativeCaptureStopRequested(false) - setNativeCapturePaused(false) - - recordNativeCaptureDiagnostics({ - backend: 'mac-screencapturekit', - phase: 'stop', - sourceId: lastNativeCaptureDiagnostics?.sourceId ?? null, - sourceType: lastNativeCaptureDiagnostics?.sourceType ?? 'unknown', - displayId: lastNativeCaptureDiagnostics?.displayId ?? null, - displayBounds: lastNativeCaptureDiagnostics?.displayBounds ?? null, - windowHandle: lastNativeCaptureDiagnostics?.windowHandle ?? null, - helperPath: lastNativeCaptureDiagnostics?.helperPath ?? null, - outputPath: fallbackPath, - systemAudioPath: fallbackSystemAudioPath, - microphonePath: fallbackMicrophonePath, - osRelease: lastNativeCaptureDiagnostics?.osRelease, - supported: lastNativeCaptureDiagnostics?.supported, - helperExists: lastNativeCaptureDiagnostics?.helperExists, - processOutput: nativeCaptureOutputBuffer.trim() || undefined, - fileSizeBytes: fallbackFileSizeBytes, - error: String(error), - }) - - // Try to recover: if the target file exists on disk, finalize with it - if (fallbackPath) { - try { - await fs.access(fallbackPath) - console.log('[stop-native-screen-recording] Recovering with fallback path:', fallbackPath) - if (fallbackSystemAudioPath || fallbackMicrophonePath) { - try { - await muxNativeMacRecordingWithAudio( - fallbackPath, - fallbackSystemAudioPath, - fallbackMicrophonePath, - ) - } catch (muxError) { - console.warn('Failed to mux recovered native macOS audio into capture:', muxError) - } - } - return await finalizeStoredVideo(fallbackPath) - } catch { - // File doesn't exist or isn't accessible - } - } - - const recovered = await recoverNativeMacCaptureOutput() - if (recovered) { - return recovered - } - - return { - success: false, - message: 'Failed to stop native ScreenCaptureKit recording', - error: String(error), - } - } - }) - - ipcMain.handle('recover-native-screen-recording', async () => { - if (process.platform !== 'darwin') { - return { success: false, message: 'Native screen recording recovery is only available on macOS.' } - } - - const recovered = await recoverNativeMacCaptureOutput() - if (recovered) { - return recovered - } - - return { - success: false, - message: 'No recoverable native macOS recording output was found.', - } - }) - - ipcMain.handle('pause-native-screen-recording', async () => { - if (process.platform === 'win32') { - if (!windowsNativeCaptureActive || !windowsCaptureProcess) { - return { success: false, message: 'No native Windows screen recording is active.' } - } - - if (windowsCapturePaused) { - return { success: true } - } - - try { - windowsCaptureProcess.stdin.write('pause\n') - setWindowsCapturePaused(true) - return { success: true } - } catch (error) { - return { success: false, message: 'Failed to pause native Windows capture', error: String(error) } - } - } - - if (process.platform !== 'darwin') { - return { success: false, message: 'Native screen recording is only available on macOS.' } - } - - if (!nativeScreenRecordingActive || !nativeCaptureProcess) { - return { success: false, message: 'No native screen recording is active.' } - } - - if (nativeCapturePaused) { - return { success: true } - } - - try { - nativeCaptureProcess.stdin.write('pause\n') - setNativeCapturePaused(true) - return { success: true } - } catch (error) { - return { success: false, message: 'Failed to pause native screen recording', error: String(error) } - } - }) - - ipcMain.handle('resume-native-screen-recording', async () => { - if (process.platform === 'win32') { - if (!windowsNativeCaptureActive || !windowsCaptureProcess) { - return { success: false, message: 'No native Windows screen recording is active.' } - } - - if (!windowsCapturePaused) { - return { success: true } - } - - try { - windowsCaptureProcess.stdin.write('resume\n') - setWindowsCapturePaused(false) - return { success: true } - } catch (error) { - return { success: false, message: 'Failed to resume native Windows capture', error: String(error) } - } - } - - if (process.platform !== 'darwin') { - return { success: false, message: 'Native screen recording is only available on macOS.' } - } - - if (!nativeScreenRecordingActive || !nativeCaptureProcess) { - return { success: false, message: 'No native screen recording is active.' } - } - - if (!nativeCapturePaused) { - return { success: true } - } - - try { - nativeCaptureProcess.stdin.write('resume\n') - setNativeCapturePaused(false) - return { success: true } - } catch (error) { - return { success: false, message: 'Failed to resume native screen recording', error: String(error) } - } - }) - - ipcMain.handle('get-system-cursor-assets', async () => { - try { - return { success: true, cursors: await getSystemCursorAssets() } - } catch (error) { - console.error('Failed to load system cursor assets:', error) - return { success: false, cursors: {}, error: String(error) } - } - }) - - ipcMain.handle('is-native-windows-capture-available', async () => { - return { available: await isNativeWindowsCaptureAvailable() } - }) - - ipcMain.handle('get-last-native-capture-diagnostics', async () => { - return { success: true, diagnostics: lastNativeCaptureDiagnostics } - }) - - ipcMain.handle('get-video-audio-fallback-paths', async (_event, videoPath: string) => { - if (!videoPath) { - return { success: true, paths: [] } - } - - try { - const paths = await getCompanionAudioFallbackPaths(videoPath) - await Promise.all([ - rememberApprovedLocalReadPath(videoPath), - ...paths.map((fallbackPath) => rememberApprovedLocalReadPath(fallbackPath)), - ]) - return { success: true, paths } - } catch (error) { - console.error('Failed to resolve companion audio fallback paths:', error) - return { success: false, paths: [], error: String(error) } - } - }) - - ipcMain.handle('mux-native-windows-recording', async (_event, pauseSegments?: PauseSegment[]) => { - const videoPath = windowsPendingVideoPath - setWindowsPendingVideoPath(null) - - if (!videoPath) { - return { success: false, message: 'No native Windows video pending for mux' } - } - - try { - if (windowsSystemAudioPath || windowsMicAudioPath) { - await muxNativeWindowsVideoWithAudio(videoPath, windowsSystemAudioPath, windowsMicAudioPath, pauseSegments ?? []) - setWindowsSystemAudioPath(null) - setWindowsMicAudioPath(null) - } - - recordNativeCaptureDiagnostics({ - backend: 'windows-wgc', - phase: 'mux', - outputPath: videoPath, - fileSizeBytes: await getFileSizeIfPresent(videoPath), - }) - return await finalizeStoredVideo(videoPath) - } catch (error) { - console.error('Failed to mux native Windows recording:', error) - recordNativeCaptureDiagnostics({ - backend: 'windows-wgc', - phase: 'mux', - outputPath: videoPath, - systemAudioPath: windowsSystemAudioPath, - microphonePath: windowsMicAudioPath, - fileSizeBytes: await getFileSizeIfPresent(videoPath), - error: String(error), - }) - setWindowsSystemAudioPath(null) - setWindowsMicAudioPath(null) - try { - return await finalizeStoredVideo(videoPath) - } catch { - return { success: false, message: 'Failed to mux native Windows recording', error: String(error) } - } - } - }) - - ipcMain.handle('start-ffmpeg-recording', async (_, source: SelectedSource) => { - if (ffmpegCaptureProcess) { - return { success: false, message: 'An FFmpeg recording is already active.' } - } - - try { - const recordingsDir = await getRecordingsDir() - const ffmpegPath = getFfmpegBinaryPath() - const outputPath = path.join(recordingsDir, `recording-${Date.now()}.mp4`) - const args = await buildFfmpegCaptureArgs(source, outputPath) - - setFfmpegCaptureOutputBuffer('') - setFfmpegCaptureTargetPath(outputPath) - const ffProc = spawn(ffmpegPath, args, { - cwd: recordingsDir, - stdio: ['pipe', 'pipe', 'pipe'], - }) - setFfmpegCaptureProcess(ffProc) - - ffProc.stdout.on('data', (chunk: Buffer) => { - setFfmpegCaptureOutputBuffer(ffmpegCaptureOutputBuffer + chunk.toString()) - }) - ffProc.stderr.on('data', (chunk: Buffer) => { - setFfmpegCaptureOutputBuffer(ffmpegCaptureOutputBuffer + chunk.toString()) - }) - - await waitForFfmpegCaptureStart(ffProc) - setFfmpegScreenRecordingActive(true) - return { success: true } - } catch (error) { - console.error('Failed to start FFmpeg recording:', error) - setFfmpegScreenRecordingActive(false) - setFfmpegCaptureProcess(null) - setFfmpegCaptureTargetPath(null) - return { - success: false, - message: 'Failed to start FFmpeg recording', - error: String(error), - } - } - }) - - ipcMain.handle('stop-ffmpeg-recording', async () => { - if (!ffmpegScreenRecordingActive) { - return { success: false, message: 'No FFmpeg recording is active.' } - } - - try { - if (!ffmpegCaptureProcess || !ffmpegCaptureTargetPath) { - throw new Error('FFmpeg process is not running') - } - - const process = ffmpegCaptureProcess - const outputPath = ffmpegCaptureTargetPath - process.stdin.write('q\n') - const finalVideoPath = await waitForFfmpegCaptureStop(process, outputPath) - - setFfmpegCaptureProcess(null) - setFfmpegCaptureTargetPath(null) - setFfmpegScreenRecordingActive(false) - - return await finalizeStoredVideo(finalVideoPath) - } catch (error) { - console.error('Failed to stop FFmpeg recording:', error) - setFfmpegCaptureProcess(null) - setFfmpegCaptureTargetPath(null) - setFfmpegScreenRecordingActive(false) - return { - success: false, - message: 'Failed to stop FFmpeg recording', - error: String(error), - } - } - }) - - - - ipcMain.handle('store-microphone-sidecar', async (_, audioData: ArrayBuffer, videoPath: string) => { - try { - const baseName = videoPath.replace(/\.[^.]+$/, '') - const sidecarPath = `${baseName}.mic.webm` - await fs.writeFile(sidecarPath, Buffer.from(audioData)) - return { success: true, path: sidecarPath } - } catch (error) { - console.error('Failed to store microphone sidecar:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('store-recorded-video', async (_, videoData: ArrayBuffer, fileName: string) => { - try { - const recordingsDir = await getRecordingsDir() - const videoPath = path.join(recordingsDir, fileName) - await fs.writeFile(videoPath, Buffer.from(videoData)) - return await finalizeStoredVideo(videoPath) - } catch (error) { - console.error('Failed to store video:', error) - return { - success: false, - message: 'Failed to store video', - error: String(error) - } - } - }) - - - - ipcMain.handle('get-recorded-video-path', async () => { - try { - const recordingsDir = await getRecordingsDir() - const files = await fs.readdir(recordingsDir) - const videoFiles = files.filter(file => /\.(webm|mov|mp4)$/i.test(file)) - - if (videoFiles.length === 0) { - return { success: false, message: 'No recorded video found' } - } - - const latestVideo = videoFiles.sort().reverse()[0] - const videoPath = path.join(recordingsDir, latestVideo) - - return { success: true, path: videoPath } - } catch (error) { - console.error('Failed to get video path:', error) - return { success: false, message: 'Failed to get video path', error: String(error) } - } - }) - - ipcMain.handle('set-recording-state', (_, recording: boolean) => { - if (recording) { - stopCursorCapture() - stopInteractionCapture() - startWindowBoundsCapture() - void startNativeCursorMonitor() - setIsCursorCaptureActive(true) - setActiveCursorSamples([]) - setPendingCursorSamples([]) - setCursorCaptureStartTimeMs(Date.now()) - setLinuxCursorScreenPoint(null) - setLastLeftClick(null) - sampleCursorPoint() - setCursorCaptureInterval(setInterval(sampleCursorPoint, CURSOR_SAMPLE_INTERVAL_MS)) - void startInteractionCapture() - } else { - setIsCursorCaptureActive(false) - stopCursorCapture() - stopInteractionCapture() - stopWindowBoundsCapture() - stopNativeCursorMonitor() - showCursor() - setLinuxCursorScreenPoint(null) - snapshotCursorTelemetryForPersistence() - setActiveCursorSamples([]) - } - - const source = selectedSource || { name: 'Screen' } - BrowserWindow.getAllWindows().forEach((window) => { - if (!window.isDestroyed()) { - window.webContents.send('recording-state-changed', { - recording, - sourceName: source.name, - }) - } - }) - - if (onRecordingStateChange) { - onRecordingStateChange(recording, source.name) - } - }) - - ipcMain.handle('get-cursor-telemetry', async (_, videoPath?: string) => { - const targetVideoPath = normalizeVideoSourcePath(videoPath ?? currentVideoPath) - if (!targetVideoPath) { - return { success: true, samples: [] } - } - - const telemetryPath = getTelemetryPathForVideo(targetVideoPath) - try { - const content = await fs.readFile(telemetryPath, 'utf-8') - const parsed = JSON.parse(content) - const rawSamples = Array.isArray(parsed) - ? parsed - : (Array.isArray(parsed?.samples) ? parsed.samples : []) - - const samples: CursorTelemetryPoint[] = rawSamples - .filter((sample: unknown) => Boolean(sample && typeof sample === 'object')) - .map((sample: unknown) => { - const point = sample as Partial - return { - timeMs: typeof point.timeMs === 'number' && Number.isFinite(point.timeMs) ? Math.max(0, point.timeMs) : 0, - cx: typeof point.cx === 'number' && Number.isFinite(point.cx) ? clamp(point.cx, 0, 1) : 0.5, - cy: typeof point.cy === 'number' && Number.isFinite(point.cy) ? clamp(point.cy, 0, 1) : 0.5, - interactionType: point.interactionType === 'click' - || point.interactionType === 'double-click' - || point.interactionType === 'right-click' - || point.interactionType === 'middle-click' - || point.interactionType === 'move' - || point.interactionType === 'mouseup' - ? point.interactionType - : undefined, - cursorType: point.cursorType === 'arrow' - || point.cursorType === 'text' - || point.cursorType === 'pointer' - || point.cursorType === 'crosshair' - || point.cursorType === 'open-hand' - || point.cursorType === 'closed-hand' - || point.cursorType === 'resize-ew' - || point.cursorType === 'resize-ns' - || point.cursorType === 'not-allowed' - ? point.cursorType - : undefined, - } - }) - .sort((a: CursorTelemetryPoint, b: CursorTelemetryPoint) => a.timeMs - b.timeMs) - - return { success: true, samples } - } catch (error) { - const nodeError = error as NodeJS.ErrnoException - if (nodeError.code === 'ENOENT') { - return { success: true, samples: [] } - } - console.error('Failed to load cursor telemetry:', error) - return { success: false, message: 'Failed to load cursor telemetry', error: String(error), samples: [] } - } - }) - - - ipcMain.handle('open-external-url', async (_, url: string) => { - try { - // Security: only allow http/https URLs to prevent file:// or custom protocol abuse - const parsed = new URL(url) - if (parsed.protocol !== 'https:' && parsed.protocol !== 'http:') { - return { success: false, error: `Blocked non-HTTP URL: ${parsed.protocol}` } - } - await shell.openExternal(url) - return { success: true } - } catch (error) { - console.error('Failed to open URL:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('get-accessibility-permission-status', () => { - if (process.platform !== 'darwin') { - return { success: true, trusted: true, prompted: false } - } - - return { - success: true, - trusted: systemPreferences.isTrustedAccessibilityClient(false), - prompted: false, - } - }) - - ipcMain.handle('request-accessibility-permission', () => { - if (process.platform !== 'darwin') { - return { success: true, trusted: true, prompted: false } - } - - return { - success: true, - trusted: systemPreferences.isTrustedAccessibilityClient(true), - prompted: true, - } - }) - - ipcMain.handle('get-screen-recording-permission-status', () => { - if (process.platform !== 'darwin') { - return { success: true, status: 'granted' } - } - - try { - return { - success: true, - status: systemPreferences.getMediaAccessStatus('screen'), - } - } catch (error) { - console.error('Failed to get screen recording permission status:', error) - return { success: false, status: 'unknown', error: String(error) } - } - }) - - ipcMain.handle('open-screen-recording-preferences', async () => { - if (process.platform !== 'darwin') { - return { success: true } - } - - try { - await shell.openExternal(getMacPrivacySettingsUrl('screen')) - return { success: true } - } catch (error) { - console.error('Failed to open Screen Recording preferences:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('open-accessibility-preferences', async () => { - if (process.platform !== 'darwin') { - return { success: true } - } - - try { - await shell.openExternal(getMacPrivacySettingsUrl('accessibility')) - return { success: true } - } catch (error) { - console.error('Failed to open Accessibility preferences:', error) - return { success: false, error: String(error) } - } - }) - - // Generate a tiny thumbnail for a wallpaper image and cache it in userData. - // Returns the cached thumbnail as raw JPEG bytes for fast grid rendering. - // Serialized to prevent concurrent nativeImage operations from eating memory. - const THUMB_SIZE = 96 - const thumbCacheDir = path.join(USER_DATA_PATH, 'wallpaper-thumbs') - let thumbGenerationQueue: Promise = Promise.resolve() - - ipcMain.handle('generate-wallpaper-thumbnail', async (_, filePath: string) => { - try { - const resolved = normalizePath(filePath) - const realResolved = await fs.realpath(resolved).catch(() => resolved) - - if (!isAllowedLocalReadPath(resolved) && !isAllowedLocalReadPath(realResolved)) { - return { success: false, error: 'Access denied' } - } - - // Deterministic cache key from file path + mtime - const stat = await fs.stat(resolved) - const cacheKey = Buffer.from(`${resolved}:${stat.mtimeMs}`).toString('base64url') - const thumbPath = path.join(thumbCacheDir, `${cacheKey}.jpg`) - - // Return cached thumbnail if it exists (no queue needed) - if (existsSync(thumbPath)) { - const data = await fs.readFile(thumbPath) - return { success: true, data } - } - - // Serialize nativeImage operations to avoid OOM from concurrent full-res decodes - let jpegData: Buffer - const generation = thumbGenerationQueue.then(async () => { - const { nativeImage } = await import('electron') - const img = nativeImage.createFromPath(resolved) - if (img.isEmpty()) { - throw new Error('Failed to load image') - } - const { width, height } = img.getSize() - const scale = THUMB_SIZE / Math.min(width, height) - const resized = img.resize({ - width: Math.round(width * scale), - height: Math.round(height * scale), - quality: 'good', - }) - jpegData = resized.toJPEG(70) - - // Cache to disk - await fs.mkdir(thumbCacheDir, { recursive: true }) - await fs.writeFile(thumbPath, jpegData) - }) - // Keep the queue moving even if one fails - thumbGenerationQueue = generation.catch(() => {}) - await generation - - return { success: true, data: jpegData! } - } catch (error) { - return { success: false, error: String(error) } - } - }) - - // Return base path for assets so renderer can resolve file:// paths in production - ipcMain.handle('get-asset-base-path', () => { - try { - const assetPath = getAssetRootPath() - return pathToFileURL(`${assetPath}${path.sep}`).toString() - } catch (err) { - console.error('Failed to resolve asset base path:', err) - return null - } - }) - - ipcMain.handle('list-asset-directory', async (_, relativeDir: string) => { - try { - const normalizedRelativeDir = String(relativeDir ?? '') - .replace(/\\/g, '/') - .replace(/^\/+/, '') - - const assetRootPath = path.resolve(getAssetRootPath()) - const targetDirPath = path.resolve(assetRootPath, normalizedRelativeDir) - if (targetDirPath !== assetRootPath && !targetDirPath.startsWith(`${assetRootPath}${path.sep}`)) { - return { success: false, error: 'Invalid asset directory' } - } - - const entries = await fs.readdir(targetDirPath, { withFileTypes: true }) - const files = entries - .filter((entry) => entry.isFile()) - .map((entry) => entry.name) - .sort(new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare) - - return { success: true, files } - } catch (error) { - console.error('Failed to list asset directory:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('read-local-file', async (_, filePath: string) => { - try { - const resolved = normalizePath(filePath) - const realResolved = await fs.realpath(resolved).catch(() => resolved) - if (!isAllowedLocalReadPath(resolved) && !isAllowedLocalReadPath(realResolved)) { - console.warn(`[read-local-file] Blocked read outside allowed directories: ${resolved}`) - return { success: false, error: 'Access denied: path outside allowed directories' } - } - - const data = await fs.readFile(resolved) - return { success: true, data } - } catch (error) { - console.error('Failed to read local file:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle( - 'native-video-export-start', - async ( - event, - options: { - width: number - height: number - frameRate: number - bitrate: number - encodingMode: NativeExportEncodingMode - inputMode?: 'rawvideo' | 'h264-stream' - }, - ) => { - try { - if (options.width % 2 !== 0 || options.height % 2 !== 0) { - throw new Error('Native export requires even output dimensions') - } - - const ffmpegPath = getFfmpegBinaryPath() - const inputMode = options.inputMode ?? 'rawvideo' - const sessionId = `recordly-export-${Date.now()}-${Math.random().toString(36).slice(2, 8)}` - const outputPath = path.join(app.getPath('temp'), `${sessionId}.mp4`) - - let encoderName: string - let ffmpegArgs: string[] - - if (inputMode === 'h264-stream') { - // Pre-encoded H.264 Annex B from browser VideoEncoder — just stream-copy into MP4 - encoderName = 'h264-stream-copy' - ffmpegArgs = buildNativeH264StreamExportArgs({ frameRate: options.frameRate, outputPath }) - } else { - encoderName = await resolveNativeVideoEncoder(ffmpegPath, options.encodingMode) - ffmpegArgs = buildNativeVideoExportArgs(encoderName, options, outputPath) - } - - const ffmpegProcess = spawn(ffmpegPath, ffmpegArgs, { - stdio: ['pipe', 'ignore', 'pipe'], - }) as ChildProcessByStdio - // For rawvideo, frames are a fixed RGBA size. For h264-stream, chunks are variable. - const inputByteSize = inputMode === 'rawvideo' ? getNativeVideoInputByteSize(options.width, options.height) : 0 - - const session: NativeVideoExportSession = { - ffmpegProcess, - outputPath, - inputByteSize, - inputMode, - maxQueuedWriteBytes: inputMode === 'h264-stream' ? 8 * 1024 * 1024 : getNativeVideoExportMaxQueuedWriteBytes(inputByteSize), - stderrOutput: '', - encoderName, - processError: null, - stdinError: null, - terminating: false, - writeSequence: Promise.resolve(), - sender: event.sender, - pendingWriteRequestIds: new Set(), - completionPromise: new Promise((resolve, reject) => { - ffmpegProcess.once('error', (error) => { - const processError = error instanceof Error ? error : new Error(String(error)) - if (session.terminating) { - resolve() - return - } - - session.processError = processError - reject(processError) - }) - ffmpegProcess.stdin.once('error', (error) => { - const stdinError = error instanceof Error ? error : new Error(String(error)) - if (session.terminating && isIgnorableNativeVideoExportStreamError(stdinError)) { - return - } - - session.stdinError = stdinError - }) - ffmpegProcess.once('close', (code, signal) => { - if (session.terminating) { - resolve() - return - } - - if (code === 0) { - resolve() - return - } - - reject( - new Error( - getNativeVideoExportSessionError( - session, - `FFmpeg exited with code ${code ?? 'unknown'}${signal ? ` (signal ${signal})` : ''}`, - ), - ), - ) - }) - }), - } - void session.completionPromise.catch(() => undefined) - - ffmpegProcess.stderr.on('data', (chunk: Buffer) => { - session.stderrOutput += chunk.toString() - }) - - nativeVideoExportSessions.set(sessionId, session) - - console.log( - `[native-export] Started ${isHardwareAcceleratedVideoEncoder(encoderName) ? 'hardware' : 'software'} session ${sessionId} with ${encoderName}`, - ) - - return { - success: true, - sessionId, - encoderName, - } - } catch (error) { - console.error('[native-export] Failed to start native video export session:', error) - return { - success: false, - error: String(error), - } - } - }, - ) - - ipcMain.on( - 'native-video-export-write-frame-async', - ( - event, - payload: { - sessionId: string - requestId: number - frameData: Uint8Array - }, - ) => { - const sessionId = payload?.sessionId - const requestId = payload?.requestId - const frameData = payload?.frameData - - if (typeof sessionId !== 'string' || typeof requestId !== 'number' || !frameData) { - return - } - - const session = nativeVideoExportSessions.get(sessionId) - if (!session) { - sendNativeVideoExportWriteFrameResult(event.sender, sessionId, requestId, { - success: false, - error: 'Invalid native export session', - }) - return - } - - session.sender = event.sender - session.pendingWriteRequestIds.add(requestId) - - if (session.terminating) { - settleNativeVideoExportWriteFrameRequest(sessionId, session, requestId, { - success: false, - error: 'Native video export session was cancelled', - }) - return - } - - if (session.inputMode !== 'h264-stream' && frameData.byteLength !== session.inputByteSize) { - settleNativeVideoExportWriteFrameRequest(sessionId, session, requestId, { - success: false, - error: `Native video export expected ${session.inputByteSize} bytes per frame but received ${frameData.byteLength}`, - }) - return - } - - void enqueueNativeVideoExportFrameWrite(session, frameData) - .then(() => { - settleNativeVideoExportWriteFrameRequest(sessionId, session, requestId, { - success: true, - }) - }) - .catch((error) => { - session.stdinError = error instanceof Error ? error : new Error(String(error)) - settleNativeVideoExportWriteFrameRequest(sessionId, session, requestId, { - success: false, - error: getNativeVideoExportSessionError( - session, - session.stdinError.message, - ), - }) - }) - }, - ) - - ipcMain.handle( - 'native-video-export-finish', - async (_, sessionId: string, options?: NativeVideoExportFinishOptions) => { - const session = nativeVideoExportSessions.get(sessionId) - if (!session) { - return { success: false, error: 'Invalid native export session' } - } - - try { - await session.writeSequence - if (!session.ffmpegProcess.stdin.destroyed && !session.ffmpegProcess.stdin.writableEnded) { - session.ffmpegProcess.stdin.end() - } - await session.completionPromise - - const finalizedPath = await muxNativeVideoExportAudio(session.outputPath, options ?? {}) - const data = await fs.readFile(finalizedPath) - nativeVideoExportSessions.delete(sessionId) - await removeTemporaryExportFile(finalizedPath) - - return { - success: true, - data: new Uint8Array(data), - encoderName: session.encoderName, - } - } catch (error) { - flushNativeVideoExportPendingWriteRequests( - sessionId, - session, - String(error), - ) - nativeVideoExportSessions.delete(sessionId) - await removeTemporaryExportFile(session.outputPath) - const finalizedSuffix = session.outputPath.replace(/\.mp4$/, '-final.mp4') - await removeTemporaryExportFile(finalizedSuffix) - return { - success: false, - error: String(error), - } - } - }, - ) - - ipcMain.handle( - 'mux-exported-video-audio', - async (_, videoData: ArrayBuffer, options?: NativeVideoExportFinishOptions) => { - try { - const data = await muxExportedVideoAudioBuffer(videoData, options ?? {}) - return { - success: true, - data, - } - } catch (error) { - return { - success: false, - error: String(error), - } - } - }, - ) - - ipcMain.handle('native-video-export-cancel', async (_, sessionId: string) => { - const session = nativeVideoExportSessions.get(sessionId) - if (!session) { - return { success: true } - } - - session.terminating = true - nativeVideoExportSessions.delete(sessionId) - flushNativeVideoExportPendingWriteRequests( - sessionId, - session, - 'Native video export session was cancelled', - ) - - try { - if (!session.ffmpegProcess.stdin.destroyed && !session.ffmpegProcess.stdin.writableEnded) { - session.ffmpegProcess.stdin.destroy() - } - } catch { - // Stream may already be closed. - } - - try { - session.ffmpegProcess.kill('SIGKILL') - } catch { - // Process may already be closed. - } - - await session.completionPromise.catch(() => undefined) - await removeTemporaryExportFile(session.outputPath) - return { success: true } - }) - - ipcMain.handle('save-exported-video', async (event, videoData: ArrayBuffer, fileName: string) => { - try { - // Determine file type from extension - const isGif = fileName.toLowerCase().endsWith('.gif'); - const filters = isGif - ? [{ name: 'GIF Image', extensions: ['gif'] }] - : [{ name: 'MP4 Video', extensions: ['mp4'] }]; - const parentWindow = BrowserWindow.fromWebContents(event.sender) - const saveDialogOptions: SaveDialogOptions = { - title: isGif ? 'Save Exported GIF' : 'Save Exported Video', - defaultPath: path.join(app.getPath('downloads'), fileName), - filters, - properties: ['createDirectory', 'showOverwriteConfirmation'], - } - - const result = parentWindow - ? await dialog.showSaveDialog(parentWindow, saveDialogOptions) - : await dialog.showSaveDialog(saveDialogOptions) - - if (result.canceled || !result.filePath) { - return { - success: false, - canceled: true, - message: 'Export canceled' - }; - } - - await fs.writeFile(result.filePath, Buffer.from(videoData)); - - return { - success: true, - path: result.filePath, - message: 'Video exported successfully' - }; - } catch (error) { - console.error('Failed to save exported video:', error) - return { - success: false, - message: 'Failed to save exported video', - error: String(error) - } - } - }) - - ipcMain.handle('write-exported-video-to-path', async (_event, videoData: ArrayBuffer, outputPath: string) => { - try { - const resolvedPath = path.resolve(outputPath) - await fs.mkdir(path.dirname(resolvedPath), { recursive: true }); - await fs.writeFile(resolvedPath, Buffer.from(videoData)); - - return { - success: true, - path: outputPath, - message: 'Video exported successfully', - canceled: false, - }; - } catch (error) { - console.error('Failed to write exported video to path:', error) - return { - success: false, - message: 'Failed to write exported video', - canceled: false, - error: String(error) - } - } - }) - - ipcMain.handle('open-video-file-picker', async () => { - try { - const recordingsDir = await getRecordingsDir() - const result = await dialog.showOpenDialog({ - title: 'Select Video File', - defaultPath: recordingsDir, - filters: [ - { name: 'Video Files', extensions: ['webm', 'mp4', 'mov', 'avi', 'mkv'] }, - { name: 'All Files', extensions: ['*'] } - ], - properties: ['openFile'] - }); - - if (result.canceled || result.filePaths.length === 0) { - return { success: false, canceled: true }; - } - - approveUserPath(result.filePaths[0]) - setCurrentProjectPath(null) - return { - success: true, - path: result.filePaths[0] - }; - } catch (error) { - console.error('Failed to open file picker:', error); - return { - success: false, - message: 'Failed to open file picker', - error: String(error) - }; - } - }); - - ipcMain.handle('open-audio-file-picker', async () => { - try { - const result = await dialog.showOpenDialog({ - title: 'Select Audio File', - filters: [ - { name: 'Audio Files', extensions: ['mp3', 'wav', 'aac', 'm4a', 'flac', 'ogg'] }, - { name: 'All Files', extensions: ['*'] } - ], - properties: ['openFile'] - }); - - if (result.canceled || result.filePaths.length === 0) { - return { success: false, canceled: true }; - } - - approveUserPath(result.filePaths[0]) - return { - success: true, - path: result.filePaths[0] - }; - } catch (error) { - console.error('Failed to open audio file picker:', error); - return { - success: false, - message: 'Failed to open audio file picker', - error: String(error) - }; - } - }); - - ipcMain.handle('open-whisper-executable-picker', async () => { - try { - const result = await dialog.showOpenDialog({ - title: 'Select Whisper Executable', - filters: [ - { name: 'Executables', extensions: process.platform === 'win32' ? ['exe', 'cmd', 'bat'] : ['*'] }, - { name: 'All Files', extensions: ['*'] }, - ], - properties: ['openFile'], - }) - - if (result.canceled || result.filePaths.length === 0) { - return { success: false, canceled: true } - } - - approveUserPath(result.filePaths[0]) - return { success: true, path: result.filePaths[0] } - } catch (error) { - console.error('Failed to open Whisper executable picker:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('open-whisper-model-picker', async () => { - try { - const result = await dialog.showOpenDialog({ - title: 'Select Whisper Model', - filters: [ - { name: 'Whisper Models', extensions: ['bin'] }, - { name: 'All Files', extensions: ['*'] }, - ], - properties: ['openFile'], - }) - - if (result.canceled || result.filePaths.length === 0) { - return { success: false, canceled: true } - } - - approveUserPath(result.filePaths[0]) - return { success: true, path: result.filePaths[0] } - } catch (error) { - console.error('Failed to open Whisper model picker:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('get-whisper-small-model-status', async () => { - try { - return await getWhisperSmallModelStatus() - } catch (error) { - return { success: false, exists: false, path: null, error: String(error) } - } - }) - - ipcMain.handle('download-whisper-small-model', async (event) => { - try { - const existing = await getWhisperSmallModelStatus() - if (existing.exists) { - sendWhisperModelDownloadProgress(event.sender, { - status: 'downloaded', - progress: 100, - path: existing.path, - }) - return { success: true, path: existing.path, alreadyDownloaded: true } - } - - const modelPath = await downloadWhisperSmallModel(event.sender) - return { success: true, path: modelPath } - } catch (error) { - console.error('Failed to download Whisper small model:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('delete-whisper-small-model', async (event) => { - try { - await deleteWhisperSmallModel() - sendWhisperModelDownloadProgress(event.sender, { - status: 'idle', - progress: 0, - path: null, - }) - return { success: true } - } catch (error) { - console.error('Failed to delete Whisper small model:', error) - // Verify whether the file was actually removed despite the error - const status = await getWhisperSmallModelStatus() - if (!status.exists) { - // File is gone — treat as success - sendWhisperModelDownloadProgress(event.sender, { - status: 'idle', - progress: 0, - path: null, - }) - return { success: true } - } - sendWhisperModelDownloadProgress(event.sender, { - status: 'error', - progress: 0, - path: null, - error: String(error), - }) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('generate-auto-captions', async (_, options: { - videoPath: string - whisperExecutablePath: string - whisperModelPath: string - language?: string - }) => { - try { - const result = await generateAutoCaptionsFromVideo(options) - return { - success: true, - cues: result.cues, - message: result.audioSourceLabel === 'recording' - ? `Generated ${result.cues.length} caption cues.` - : `Generated ${result.cues.length} caption cues from the ${result.audioSourceLabel}.`, - } - } catch (error) { - console.error('Failed to generate auto captions:', error) - return { - success: false, - error: String(error), - message: 'Failed to generate auto captions', - } - } - }) - - ipcMain.handle('reveal-in-folder', async (_, filePath: string) => { - try { - // shell.showItemInFolder doesn't return a value, it throws on error - shell.showItemInFolder(filePath); - return { success: true }; - } catch (error) { - console.error(`Error revealing item in folder: ${filePath}`, error); - // Fallback to open the directory if revealing the item fails - // This might happen if the file was moved or deleted after export, - // or if the path is somehow invalid for showItemInFolder - try { - const openPathResult = await shell.openPath(path.dirname(filePath)); - if (openPathResult) { - // openPath returned an error message - return { success: false, error: openPathResult }; - } - return { success: true, message: 'Could not reveal item, but opened directory.' }; - } catch (openError) { - console.error(`Error opening directory: ${path.dirname(filePath)}`, openError); - return { success: false, error: String(error) }; - } - } - }); - - ipcMain.handle('open-recordings-folder', async () => { - try { - const recordingsDir = await getRecordingsDir(); - const openPathResult = await shell.openPath(recordingsDir); - if (openPathResult) { - return { success: false, error: openPathResult, message: 'Failed to open recordings folder.' }; - } - - return { success: true }; - } catch (error) { - console.error('Failed to open recordings folder:', error); - return { success: false, error: String(error), message: 'Failed to open recordings folder.' }; - } - }); - - ipcMain.handle('get-recordings-directory', async () => { - try { - const recordingsDir = await getRecordingsDir() - return { - success: true, - path: recordingsDir, - isDefault: recordingsDir === RECORDINGS_DIR, - } - } catch (error) { - return { - success: false, - path: RECORDINGS_DIR, - isDefault: true, - error: String(error), - } - } - }) - - ipcMain.handle('choose-recordings-directory', async () => { - try { - const current = await getRecordingsDir() - const result = await dialog.showOpenDialog({ - title: 'Choose recordings folder', - defaultPath: current, - properties: ['openDirectory', 'createDirectory', 'promptToCreate'], - }) - - if (result.canceled || result.filePaths.length === 0) { - return { success: false, canceled: true, path: current } - } - - const selectedPath = path.resolve(result.filePaths[0]) - await fs.mkdir(selectedPath, { recursive: true }) - await fs.access(selectedPath, fsConstants.W_OK) - await persistRecordingsDirectorySetting(selectedPath) - - return { success: true, path: selectedPath, isDefault: selectedPath === RECORDINGS_DIR } - } catch (error) { - return { success: false, error: String(error), message: 'Failed to set recordings folder' } - } - }) - - ipcMain.handle('save-project-file', async (_, projectData: unknown, suggestedName?: string, existingProjectPath?: string, thumbnailDataUrl?: string | null) => { - try { - const projectsDir = await getProjectsDir() - const trustedExistingProjectPath = isTrustedProjectPath(existingProjectPath) - ? existingProjectPath - : null - - if (trustedExistingProjectPath) { - await fs.writeFile(trustedExistingProjectPath, JSON.stringify(projectData, null, 2), 'utf-8') - setCurrentProjectPath(trustedExistingProjectPath) - await saveProjectThumbnail(trustedExistingProjectPath, thumbnailDataUrl) - await rememberRecentProject(trustedExistingProjectPath) - return { - success: true, - path: trustedExistingProjectPath, - message: 'Project saved successfully' - } - } - - const safeName = (suggestedName || `project-${Date.now()}`).replace(/[^a-zA-Z0-9-_]/g, '_') - const defaultName = safeName.endsWith(`.${PROJECT_FILE_EXTENSION}`) - ? safeName - : `${safeName}.${PROJECT_FILE_EXTENSION}` - - const result = await dialog.showSaveDialog({ - title: 'Save Recordly Project', - defaultPath: path.join(projectsDir, defaultName), - filters: [ - { name: 'Recordly Project', extensions: [PROJECT_FILE_EXTENSION] }, - { name: 'JSON', extensions: ['json'] } - ], - properties: ['createDirectory', 'showOverwriteConfirmation'] - }) - - if (result.canceled || !result.filePath) { - return { - success: false, - canceled: true, - message: 'Save project canceled' - } - } - - await fs.writeFile(result.filePath, JSON.stringify(projectData, null, 2), 'utf-8') - setCurrentProjectPath(result.filePath) - await saveProjectThumbnail(result.filePath, thumbnailDataUrl) - await rememberRecentProject(result.filePath) - - return { - success: true, - path: result.filePath, - message: 'Project saved successfully' - } - } catch (error) { - console.error('Failed to save project file:', error) - return { - success: false, - message: 'Failed to save project file', - error: String(error) - } - } - }) - - ipcMain.handle('load-project-file', async () => { - try { - const projectsDir = await getProjectsDir() - const result = await dialog.showOpenDialog({ - title: 'Open Recordly Project', - defaultPath: projectsDir, - filters: [ - { name: 'Recordly Project', extensions: [PROJECT_FILE_EXTENSION, ...LEGACY_PROJECT_FILE_EXTENSIONS] }, - { name: 'JSON', extensions: ['json'] }, - { name: 'All Files', extensions: ['*'] } - ], - properties: ['openFile'] - }) - - if (result.canceled || result.filePaths.length === 0) { - return { success: false, canceled: true, message: 'Open project canceled' } - } - - return await loadProjectFromPath(result.filePaths[0]) - } catch (error) { - console.error('Failed to load project file:', error) - return { - success: false, - message: 'Failed to load project file', - error: String(error) - } - } - }) - - ipcMain.handle('load-current-project-file', async () => { - try { - if (!currentProjectPath) { - return { success: false, message: 'No active project' } - } - - return await loadProjectFromPath(currentProjectPath) - } catch (error) { - console.error('Failed to load current project file:', error) - return { - success: false, - message: 'Failed to load current project file', - error: String(error), - } - } - }) - - ipcMain.handle('get-projects-directory', async () => { - try { - return { - success: true, - path: await getProjectsDir(), - } - } catch (error) { - return { - success: false, - error: String(error), - } - } - }) - - ipcMain.handle('list-project-files', async () => { - try { - const library = await listProjectLibraryEntries() - return { - success: true, - projectsDir: library.projectsDir, - entries: library.entries, - } - } catch (error) { - return { - success: false, - projectsDir: null, - entries: [], - error: String(error), - } - } - }) - - ipcMain.handle('open-project-file-at-path', async (_, filePath: string) => { - try { - return await loadProjectFromPath(filePath) - } catch (error) { - console.error('Failed to open project file at path:', error) - return { - success: false, - message: 'Failed to open project file', - error: String(error), - } - } - }) - - ipcMain.handle('open-projects-directory', async () => { - try { - const projectsDir = await getProjectsDir() - const openPathResult = await shell.openPath(projectsDir) - if (openPathResult) { - return { success: false, error: openPathResult, message: 'Failed to open projects folder.' } - } - - return { success: true, path: projectsDir } - } catch (error) { - console.error('Failed to open projects folder:', error) - return { success: false, error: String(error), message: 'Failed to open projects folder.' } - } - }) - ipcMain.handle('set-current-video-path', async (_, path: string) => { - setCurrentVideoPath(normalizeVideoSourcePath(path) ?? path) - approveUserPath(currentVideoPath) - const resolvedSession = await resolveRecordingSession(currentVideoPath) - ?? { - videoPath: currentVideoPath!, - webcamPath: null, - timeOffsetMs: 0, - } - - setCurrentRecordingSession(resolvedSession) - await replaceApprovedSessionLocalReadPaths([ - resolvedSession.videoPath, - resolvedSession.webcamPath, - ]) - - if (resolvedSession.webcamPath) { - await persistRecordingSessionManifest(resolvedSession) - } - - setCurrentProjectPath(null) - return { success: true, webcamPath: resolvedSession.webcamPath ?? null } - }) - - ipcMain.handle('set-current-recording-session', async (_, session: { videoPath: string; webcamPath?: string | null; timeOffsetMs?: number }) => { - const normalizedVideoPath = normalizeVideoSourcePath(session.videoPath) ?? session.videoPath - setCurrentVideoPath(normalizedVideoPath) - setCurrentRecordingSession({ - videoPath: normalizedVideoPath, - webcamPath: normalizeVideoSourcePath(session.webcamPath ?? null), - timeOffsetMs: normalizeRecordingTimeOffsetMs(session.timeOffsetMs), - }); - await replaceApprovedSessionLocalReadPaths([ - currentRecordingSession!.videoPath, - currentRecordingSession!.webcamPath, - ]) - setCurrentProjectPath(null) - await persistRecordingSessionManifest(currentRecordingSession!) - return { success: true } - }) - - ipcMain.handle('get-current-recording-session', () => { - if (!currentRecordingSession) { - return { success: false } - } - - return { - success: true, - session: currentRecordingSession, - } - }) - - ipcMain.handle('get-current-video-path', () => { - return currentVideoPath ? { success: true, path: currentVideoPath } : { success: false }; - }); - - ipcMain.handle('clear-current-video-path', () => { - setCurrentVideoPath(null); - setCurrentRecordingSession(null); - return { success: true }; - }); - - ipcMain.handle('delete-recording-file', async (_, filePath: string) => { - try { - if (!filePath || !isAutoRecordingPath(filePath)) { - return { success: false, error: 'Only auto-generated recordings can be deleted' }; - } - await fs.unlink(filePath); - // Also delete the cursor telemetry sidecar if it exists - const telemetryPath = getTelemetryPathForVideo(filePath); - await fs.unlink(telemetryPath).catch(() => {}); - if (currentVideoPath === filePath) { - setCurrentVideoPath(null); - setCurrentRecordingSession(null); - } - return { success: true }; - } catch (error) { - return { success: false, error: String(error) }; - } - }); - - ipcMain.handle('app:getVersion', () => { - return app.getVersion() - }) - - ipcMain.handle('get-platform', () => { - return process.platform; - }); - - // --------------------------------------------------------------------------- - // Cursor hiding for the browser-capture fallback. - // The IPC promise resolves only after the cursor hide attempt completes. - // --------------------------------------------------------------------------- - ipcMain.handle('hide-cursor', () => { - if (process.platform !== 'win32') { - return { success: true } - } - - return { success: hideCursor() } - }) - - ipcMain.handle('get-shortcuts', async () => { - try { - const data = await fs.readFile(SHORTCUTS_FILE, 'utf-8'); - return JSON.parse(data); - } catch { - return null; - } - }); - - ipcMain.handle('save-shortcuts', async (_, shortcuts: unknown) => { - try { - await fs.writeFile(SHORTCUTS_FILE, JSON.stringify(shortcuts, null, 2), 'utf-8'); - return { success: true }; - } catch (error) { - console.error('Failed to save shortcuts:', error); - return { success: false, error: String(error) }; - } - }); - - // --------------------------------------------------------------------------- - // Countdown timer before recording - // --------------------------------------------------------------------------- - ipcMain.handle('get-recording-preferences', async () => { - try { - const content = await fs.readFile(RECORDINGS_SETTINGS_FILE, 'utf-8') - const parsed = JSON.parse(content) as Record - return { - success: true, - microphoneEnabled: parsed.microphoneEnabled === true, - microphoneDeviceId: typeof parsed.microphoneDeviceId === 'string' ? parsed.microphoneDeviceId : undefined, - systemAudioEnabled: parsed.systemAudioEnabled !== false, - } - } catch { - return { success: true, microphoneEnabled: false, microphoneDeviceId: undefined, systemAudioEnabled: true } - } - }) - - ipcMain.handle('set-recording-preferences', async (_, prefs: { microphoneEnabled?: boolean; microphoneDeviceId?: string; systemAudioEnabled?: boolean }) => { - try { - let existing: Record = {} - try { - const content = await fs.readFile(RECORDINGS_SETTINGS_FILE, 'utf-8') - existing = JSON.parse(content) as Record - } catch { - // file doesn't exist yet - } - const merged = { ...existing, ...prefs } - await fs.writeFile(RECORDINGS_SETTINGS_FILE, JSON.stringify(merged, null, 2), 'utf-8') - return { success: true } - } catch (error) { - console.error('Failed to save recording preferences:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('get-countdown-delay', async () => { - try { - const content = await fs.readFile(COUNTDOWN_SETTINGS_FILE, 'utf-8') - const parsed = JSON.parse(content) as { delay?: number } - return { success: true, delay: parsed.delay ?? 3 } - } catch { - return { success: true, delay: 3 } - } - }) - - ipcMain.handle('set-countdown-delay', async (_, delay: number) => { - try { - await fs.writeFile(COUNTDOWN_SETTINGS_FILE, JSON.stringify({ delay }, null, 2), 'utf-8') - return { success: true } - } catch (error) { - console.error('Failed to save countdown delay:', error) - return { success: false, error: String(error) } - } - }) - - ipcMain.handle('start-countdown', async (_, seconds: number) => { - if (countdownInProgress) { - return { success: false, error: 'Countdown already in progress' } - } - - setCountdownInProgress(true) - setCountdownCancelled(false) - setCountdownRemaining(seconds) - - const countdownWin = createCountdownWindow() - - if (countdownWin.webContents.isLoadingMainFrame()) { - await new Promise((resolve) => { - countdownWin.webContents.once('did-finish-load', () => { - resolve() - }) - }) - } - - return new Promise<{ success: boolean; cancelled?: boolean }>((resolve) => { - let remaining = seconds - setCountdownRemaining(remaining) - - countdownWin.webContents.send('countdown-tick', remaining) - - setCountdownTimer(setInterval(() => { - if (countdownCancelled) { - if (countdownTimer) { - clearInterval(countdownTimer) - setCountdownTimer(null) - } - closeCountdownWindow() - setCountdownInProgress(false) - setCountdownRemaining(null) - resolve({ success: false, cancelled: true }) - return - } - - remaining-- - setCountdownRemaining(remaining) - - if (remaining <= 0) { - if (countdownTimer) { - clearInterval(countdownTimer) - setCountdownTimer(null) - } - closeCountdownWindow() - setCountdownInProgress(false) - setCountdownRemaining(null) - resolve({ success: true }) - } else { - const win = getCountdownWindow() - if (win && !win.isDestroyed()) { - win.webContents.send('countdown-tick', remaining) - } - } - }, 1000)) - }) - }) - - ipcMain.handle('cancel-countdown', () => { - setCountdownCancelled(true) - setCountdownInProgress(false) - setCountdownRemaining(null) - if (countdownTimer) { - clearInterval(countdownTimer) - setCountdownTimer(null) - } - closeCountdownWindow() - return { success: true } - }) - - ipcMain.handle('get-active-countdown', () => { - return { - success: true, - seconds: countdownInProgress ? countdownRemaining : null, - } - }) + registerSourceHandlers({ createEditorWindow, createSourceSelectorWindow, getSourceSelectorWindow }); + registerRecordingHandlers(onRecordingStateChange); + registerPermissionHandlers(); + registerAssetHandlers(); + registerExportHandlers(); + registerCaptionHandlers(); + registerProjectHandlers(); + registerSettingsHandlers(); } diff --git a/electron/ipc/project/manager.ts b/electron/ipc/project/manager.ts index 728988f9..b3d09607 100644 --- a/electron/ipc/project/manager.ts +++ b/electron/ipc/project/manager.ts @@ -335,3 +335,9 @@ export async function loadProjectFromPath(projectPath: string) { project, }; } + +export function isTrustedProjectPath(filePath?: string | null): boolean { + if (!filePath || !currentProjectPath) return false; + return normalizePath(filePath) === normalizePath(currentProjectPath); +} + diff --git a/electron/ipc/register/assets.ts b/electron/ipc/register/assets.ts new file mode 100644 index 00000000..cf35bff4 --- /dev/null +++ b/electron/ipc/register/assets.ts @@ -0,0 +1,123 @@ +import { existsSync } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { pathToFileURL } from "node:url"; +import { ipcMain } from "electron"; +import { USER_DATA_PATH } from "../../appPaths"; +import { normalizePath } from "../utils"; +import { isAllowedLocalReadPath, getAssetRootPath } from "../project/manager"; + +export function registerAssetHandlers() { + + // Generate a tiny thumbnail for a wallpaper image and cache it in userData. + // Returns the cached thumbnail as raw JPEG bytes for fast grid rendering. + // Serialized to prevent concurrent nativeImage operations from eating memory. + const THUMB_SIZE = 96 + const thumbCacheDir = path.join(USER_DATA_PATH, 'wallpaper-thumbs') + let thumbGenerationQueue: Promise = Promise.resolve() + + ipcMain.handle('generate-wallpaper-thumbnail', async (_, filePath: string) => { + try { + const resolved = normalizePath(filePath) + const realResolved = await fs.realpath(resolved).catch(() => resolved) + + if (!isAllowedLocalReadPath(resolved) && !isAllowedLocalReadPath(realResolved)) { + return { success: false, error: 'Access denied' } + } + + // Deterministic cache key from file path + mtime + const stat = await fs.stat(resolved) + const cacheKey = Buffer.from(`${resolved}:${stat.mtimeMs}`).toString('base64url') + const thumbPath = path.join(thumbCacheDir, `${cacheKey}.jpg`) + + // Return cached thumbnail if it exists (no queue needed) + if (existsSync(thumbPath)) { + const data = await fs.readFile(thumbPath) + return { success: true, data } + } + + // Serialize nativeImage operations to avoid OOM from concurrent full-res decodes + let jpegData: Buffer + const generation = thumbGenerationQueue.then(async () => { + const { nativeImage } = await import('electron') + const img = nativeImage.createFromPath(resolved) + if (img.isEmpty()) { + throw new Error('Failed to load image') + } + const { width, height } = img.getSize() + const scale = THUMB_SIZE / Math.min(width, height) + const resized = img.resize({ + width: Math.round(width * scale), + height: Math.round(height * scale), + quality: 'good', + }) + jpegData = resized.toJPEG(70) + + // Cache to disk + await fs.mkdir(thumbCacheDir, { recursive: true }) + await fs.writeFile(thumbPath, jpegData) + }) + // Keep the queue moving even if one fails + thumbGenerationQueue = generation.catch(() => {}) + await generation + + return { success: true, data: jpegData! } + } catch (error) { + return { success: false, error: String(error) } + } + }) + + // Return base path for assets so renderer can resolve file:// paths in production + ipcMain.handle('get-asset-base-path', () => { + try { + const assetPath = getAssetRootPath() + return pathToFileURL(`${assetPath}${path.sep}`).toString() + } catch (err) { + console.error('Failed to resolve asset base path:', err) + return null + } + }) + + ipcMain.handle('list-asset-directory', async (_, relativeDir: string) => { + try { + const normalizedRelativeDir = String(relativeDir ?? '') + .replace(/\\/g, '/') + .replace(/^\/+/, '') + + const assetRootPath = path.resolve(getAssetRootPath()) + const targetDirPath = path.resolve(assetRootPath, normalizedRelativeDir) + if (targetDirPath !== assetRootPath && !targetDirPath.startsWith(`${assetRootPath}${path.sep}`)) { + return { success: false, error: 'Invalid asset directory' } + } + + const entries = await fs.readdir(targetDirPath, { withFileTypes: true }) + const files = entries + .filter((entry) => entry.isFile()) + .map((entry) => entry.name) + .sort(new Intl.Collator(undefined, { numeric: true, sensitivity: 'base' }).compare) + + return { success: true, files } + } catch (error) { + console.error('Failed to list asset directory:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('read-local-file', async (_, filePath: string) => { + try { + const resolved = normalizePath(filePath) + const realResolved = await fs.realpath(resolved).catch(() => resolved) + if (!isAllowedLocalReadPath(resolved) && !isAllowedLocalReadPath(realResolved)) { + console.warn(`[read-local-file] Blocked read outside allowed directories: ${resolved}`) + return { success: false, error: 'Access denied: path outside allowed directories' } + } + + const data = await fs.readFile(resolved) + return { success: true, data } + } catch (error) { + console.error('Failed to read local file:', error) + return { success: false, error: String(error) } + } + }) + +} diff --git a/electron/ipc/register/captions.ts b/electron/ipc/register/captions.ts new file mode 100644 index 00000000..9d9b7f06 --- /dev/null +++ b/electron/ipc/register/captions.ts @@ -0,0 +1,207 @@ +import { dialog, ipcMain } from "electron"; +import { setCurrentProjectPath } from "../state"; +import { + getWhisperSmallModelStatus, + downloadWhisperSmallModel, + deleteWhisperSmallModel, + sendWhisperModelDownloadProgress, +} from "../captions/whisper"; +import { generateAutoCaptionsFromVideo } from "../captions/generate"; +import { approveUserPath, getRecordingsDir } from "../utils"; + +export function registerCaptionHandlers() { + ipcMain.handle('open-video-file-picker', async () => { + try { + const recordingsDir = await getRecordingsDir() + const result = await dialog.showOpenDialog({ + title: 'Select Video File', + defaultPath: recordingsDir, + filters: [ + { name: 'Video Files', extensions: ['webm', 'mp4', 'mov', 'avi', 'mkv'] }, + { name: 'All Files', extensions: ['*'] } + ], + properties: ['openFile'] + }); + + if (result.canceled || result.filePaths.length === 0) { + return { success: false, canceled: true }; + } + + approveUserPath(result.filePaths[0]) + setCurrentProjectPath(null) + return { + success: true, + path: result.filePaths[0] + }; + } catch (error) { + console.error('Failed to open file picker:', error); + return { + success: false, + message: 'Failed to open file picker', + error: String(error) + }; + } + }); + + ipcMain.handle('open-audio-file-picker', async () => { + try { + const result = await dialog.showOpenDialog({ + title: 'Select Audio File', + filters: [ + { name: 'Audio Files', extensions: ['mp3', 'wav', 'aac', 'm4a', 'flac', 'ogg'] }, + { name: 'All Files', extensions: ['*'] } + ], + properties: ['openFile'] + }); + + if (result.canceled || result.filePaths.length === 0) { + return { success: false, canceled: true }; + } + + approveUserPath(result.filePaths[0]) + return { + success: true, + path: result.filePaths[0] + }; + } catch (error) { + console.error('Failed to open audio file picker:', error); + return { + success: false, + message: 'Failed to open audio file picker', + error: String(error) + }; + } + }); + + ipcMain.handle('open-whisper-executable-picker', async () => { + try { + const result = await dialog.showOpenDialog({ + title: 'Select Whisper Executable', + filters: [ + { name: 'Executables', extensions: process.platform === 'win32' ? ['exe', 'cmd', 'bat'] : ['*'] }, + { name: 'All Files', extensions: ['*'] }, + ], + properties: ['openFile'], + }) + + if (result.canceled || result.filePaths.length === 0) { + return { success: false, canceled: true } + } + + approveUserPath(result.filePaths[0]) + return { success: true, path: result.filePaths[0] } + } catch (error) { + console.error('Failed to open Whisper executable picker:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('open-whisper-model-picker', async () => { + try { + const result = await dialog.showOpenDialog({ + title: 'Select Whisper Model', + filters: [ + { name: 'Whisper Models', extensions: ['bin'] }, + { name: 'All Files', extensions: ['*'] }, + ], + properties: ['openFile'], + }) + + if (result.canceled || result.filePaths.length === 0) { + return { success: false, canceled: true } + } + + approveUserPath(result.filePaths[0]) + return { success: true, path: result.filePaths[0] } + } catch (error) { + console.error('Failed to open Whisper model picker:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('get-whisper-small-model-status', async () => { + try { + return await getWhisperSmallModelStatus() + } catch (error) { + return { success: false, exists: false, path: null, error: String(error) } + } + }) + + ipcMain.handle('download-whisper-small-model', async (event) => { + try { + const existing = await getWhisperSmallModelStatus() + if (existing.exists) { + sendWhisperModelDownloadProgress(event.sender, { + status: 'downloaded', + progress: 100, + path: existing.path, + }) + return { success: true, path: existing.path, alreadyDownloaded: true } + } + + const modelPath = await downloadWhisperSmallModel(event.sender) + return { success: true, path: modelPath } + } catch (error) { + console.error('Failed to download Whisper small model:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('delete-whisper-small-model', async (event) => { + try { + await deleteWhisperSmallModel() + sendWhisperModelDownloadProgress(event.sender, { + status: 'idle', + progress: 0, + path: null, + }) + return { success: true } + } catch (error) { + console.error('Failed to delete Whisper small model:', error) + // Verify whether the file was actually removed despite the error + const status = await getWhisperSmallModelStatus() + if (!status.exists) { + // File is gone — treat as success + sendWhisperModelDownloadProgress(event.sender, { + status: 'idle', + progress: 0, + path: null, + }) + return { success: true } + } + sendWhisperModelDownloadProgress(event.sender, { + status: 'error', + progress: 0, + path: null, + error: String(error), + }) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('generate-auto-captions', async (_, options: { + videoPath: string + whisperExecutablePath: string + whisperModelPath: string + language?: string + }) => { + try { + const result = await generateAutoCaptionsFromVideo(options) + return { + success: true, + cues: result.cues, + message: result.audioSourceLabel === 'recording' + ? `Generated ${result.cues.length} caption cues.` + : `Generated ${result.cues.length} caption cues from the ${result.audioSourceLabel}.`, + } + } catch (error) { + console.error('Failed to generate auto captions:', error) + return { + success: false, + error: String(error), + message: 'Failed to generate auto captions', + } + } + }) + +} diff --git a/electron/ipc/register/export.ts b/electron/ipc/register/export.ts new file mode 100644 index 00000000..cbd2b00f --- /dev/null +++ b/electron/ipc/register/export.ts @@ -0,0 +1,383 @@ +import type { ChildProcessByStdio } from "node:child_process"; +import { spawn } from "node:child_process"; +import fs from "node:fs/promises"; +import path from "node:path"; +import type { Readable, Writable } from "node:stream"; +import type { SaveDialogOptions } from "electron"; +import { app, BrowserWindow, dialog, ipcMain } from "electron"; +import { getFfmpegBinaryPath } from "../ffmpeg/binary"; +import { + buildNativeH264StreamExportArgs, + buildNativeVideoExportArgs, + getNativeVideoInputByteSize, + type NativeExportEncodingMode, + type NativeVideoExportFinishOptions, +} from "../nativeVideoExport"; +import { + nativeVideoExportSessions, + getNativeVideoExportMaxQueuedWriteBytes, + isHardwareAcceleratedVideoEncoder, + removeTemporaryExportFile, + getNativeVideoExportSessionError, + sendNativeVideoExportWriteFrameResult, + settleNativeVideoExportWriteFrameRequest, + flushNativeVideoExportPendingWriteRequests, + isIgnorableNativeVideoExportStreamError, + enqueueNativeVideoExportFrameWrite, + resolveNativeVideoEncoder, + muxNativeVideoExportAudio, + muxExportedVideoAudioBuffer, + type NativeVideoExportSession, +} from "../export/native-video"; + +export function registerExportHandlers() { + ipcMain.handle( + 'native-video-export-start', + async ( + event, + options: { + width: number + height: number + frameRate: number + bitrate: number + encodingMode: NativeExportEncodingMode + inputMode?: 'rawvideo' | 'h264-stream' + }, + ) => { + try { + if (options.width % 2 !== 0 || options.height % 2 !== 0) { + throw new Error('Native export requires even output dimensions') + } + + const ffmpegPath = getFfmpegBinaryPath() + const inputMode = options.inputMode ?? 'rawvideo' + const sessionId = `recordly-export-${Date.now()}-${Math.random().toString(36).slice(2, 8)}` + const outputPath = path.join(app.getPath('temp'), `${sessionId}.mp4`) + + let encoderName: string + let ffmpegArgs: string[] + + if (inputMode === 'h264-stream') { + // Pre-encoded H.264 Annex B from browser VideoEncoder — just stream-copy into MP4 + encoderName = 'h264-stream-copy' + ffmpegArgs = buildNativeH264StreamExportArgs({ frameRate: options.frameRate, outputPath }) + } else { + encoderName = await resolveNativeVideoEncoder(ffmpegPath, options.encodingMode) + ffmpegArgs = buildNativeVideoExportArgs(encoderName, options, outputPath) + } + + const ffmpegProcess = spawn(ffmpegPath, ffmpegArgs, { + stdio: ['pipe', 'ignore', 'pipe'], + }) as ChildProcessByStdio + // For rawvideo, frames are a fixed RGBA size. For h264-stream, chunks are variable. + const inputByteSize = inputMode === 'rawvideo' ? getNativeVideoInputByteSize(options.width, options.height) : 0 + + const session: NativeVideoExportSession = { + ffmpegProcess, + outputPath, + inputByteSize, + inputMode, + maxQueuedWriteBytes: inputMode === 'h264-stream' ? 8 * 1024 * 1024 : getNativeVideoExportMaxQueuedWriteBytes(inputByteSize), + stderrOutput: '', + encoderName, + processError: null, + stdinError: null, + terminating: false, + writeSequence: Promise.resolve(), + sender: event.sender, + pendingWriteRequestIds: new Set(), + completionPromise: new Promise((resolve, reject) => { + ffmpegProcess.once('error', (error) => { + const processError = error instanceof Error ? error : new Error(String(error)) + if (session.terminating) { + resolve() + return + } + + session.processError = processError + reject(processError) + }) + ffmpegProcess.stdin.once('error', (error) => { + const stdinError = error instanceof Error ? error : new Error(String(error)) + if (session.terminating && isIgnorableNativeVideoExportStreamError(stdinError)) { + return + } + + session.stdinError = stdinError + }) + ffmpegProcess.once('close', (code, signal) => { + if (session.terminating) { + resolve() + return + } + + if (code === 0) { + resolve() + return + } + + reject( + new Error( + getNativeVideoExportSessionError( + session, + `FFmpeg exited with code ${code ?? 'unknown'}${signal ? ` (signal ${signal})` : ''}`, + ), + ), + ) + }) + }), + } + void session.completionPromise.catch(() => undefined) + + ffmpegProcess.stderr.on('data', (chunk: Buffer) => { + session.stderrOutput += chunk.toString() + }) + + nativeVideoExportSessions.set(sessionId, session) + + console.log( + `[native-export] Started ${isHardwareAcceleratedVideoEncoder(encoderName) ? 'hardware' : 'software'} session ${sessionId} with ${encoderName}`, + ) + + return { + success: true, + sessionId, + encoderName, + } + } catch (error) { + console.error('[native-export] Failed to start native video export session:', error) + return { + success: false, + error: String(error), + } + } + }, + ) + + ipcMain.on( + 'native-video-export-write-frame-async', + ( + event, + payload: { + sessionId: string + requestId: number + frameData: Uint8Array + }, + ) => { + const sessionId = payload?.sessionId + const requestId = payload?.requestId + const frameData = payload?.frameData + + if (typeof sessionId !== 'string' || typeof requestId !== 'number' || !frameData) { + return + } + + const session = nativeVideoExportSessions.get(sessionId) + if (!session) { + sendNativeVideoExportWriteFrameResult(event.sender, sessionId, requestId, { + success: false, + error: 'Invalid native export session', + }) + return + } + + session.sender = event.sender + session.pendingWriteRequestIds.add(requestId) + + if (session.terminating) { + settleNativeVideoExportWriteFrameRequest(sessionId, session, requestId, { + success: false, + error: 'Native video export session was cancelled', + }) + return + } + + if (session.inputMode !== 'h264-stream' && frameData.byteLength !== session.inputByteSize) { + settleNativeVideoExportWriteFrameRequest(sessionId, session, requestId, { + success: false, + error: `Native video export expected ${session.inputByteSize} bytes per frame but received ${frameData.byteLength}`, + }) + return + } + + void enqueueNativeVideoExportFrameWrite(session, frameData) + .then(() => { + settleNativeVideoExportWriteFrameRequest(sessionId, session, requestId, { + success: true, + }) + }) + .catch((error) => { + session.stdinError = error instanceof Error ? error : new Error(String(error)) + settleNativeVideoExportWriteFrameRequest(sessionId, session, requestId, { + success: false, + error: getNativeVideoExportSessionError( + session, + session.stdinError.message, + ), + }) + }) + }, + ) + + ipcMain.handle( + 'native-video-export-finish', + async (_, sessionId: string, options?: NativeVideoExportFinishOptions) => { + const session = nativeVideoExportSessions.get(sessionId) + if (!session) { + return { success: false, error: 'Invalid native export session' } + } + + try { + await session.writeSequence + if (!session.ffmpegProcess.stdin.destroyed && !session.ffmpegProcess.stdin.writableEnded) { + session.ffmpegProcess.stdin.end() + } + await session.completionPromise + + const finalizedPath = await muxNativeVideoExportAudio(session.outputPath, options ?? {}) + const data = await fs.readFile(finalizedPath) + nativeVideoExportSessions.delete(sessionId) + await removeTemporaryExportFile(finalizedPath) + + return { + success: true, + data: new Uint8Array(data), + encoderName: session.encoderName, + } + } catch (error) { + flushNativeVideoExportPendingWriteRequests( + sessionId, + session, + String(error), + ) + nativeVideoExportSessions.delete(sessionId) + await removeTemporaryExportFile(session.outputPath) + const finalizedSuffix = session.outputPath.replace(/\.mp4$/, '-final.mp4') + await removeTemporaryExportFile(finalizedSuffix) + return { + success: false, + error: String(error), + } + } + }, + ) + + ipcMain.handle( + 'mux-exported-video-audio', + async (_, videoData: ArrayBuffer, options?: NativeVideoExportFinishOptions) => { + try { + const data = await muxExportedVideoAudioBuffer(videoData, options ?? {}) + return { + success: true, + data, + } + } catch (error) { + return { + success: false, + error: String(error), + } + } + }, + ) + + ipcMain.handle('native-video-export-cancel', async (_, sessionId: string) => { + const session = nativeVideoExportSessions.get(sessionId) + if (!session) { + return { success: true } + } + + session.terminating = true + nativeVideoExportSessions.delete(sessionId) + flushNativeVideoExportPendingWriteRequests( + sessionId, + session, + 'Native video export session was cancelled', + ) + + try { + if (!session.ffmpegProcess.stdin.destroyed && !session.ffmpegProcess.stdin.writableEnded) { + session.ffmpegProcess.stdin.destroy() + } + } catch { + // Stream may already be closed. + } + + try { + session.ffmpegProcess.kill('SIGKILL') + } catch { + // Process may already be closed. + } + + await session.completionPromise.catch(() => undefined) + await removeTemporaryExportFile(session.outputPath) + return { success: true } + }) + + ipcMain.handle('save-exported-video', async (event, videoData: ArrayBuffer, fileName: string) => { + try { + // Determine file type from extension + const isGif = fileName.toLowerCase().endsWith('.gif'); + const filters = isGif + ? [{ name: 'GIF Image', extensions: ['gif'] }] + : [{ name: 'MP4 Video', extensions: ['mp4'] }]; + const parentWindow = BrowserWindow.fromWebContents(event.sender) + const saveDialogOptions: SaveDialogOptions = { + title: isGif ? 'Save Exported GIF' : 'Save Exported Video', + defaultPath: path.join(app.getPath('downloads'), fileName), + filters, + properties: ['createDirectory', 'showOverwriteConfirmation'], + } + + const result = parentWindow + ? await dialog.showSaveDialog(parentWindow, saveDialogOptions) + : await dialog.showSaveDialog(saveDialogOptions) + + if (result.canceled || !result.filePath) { + return { + success: false, + canceled: true, + message: 'Export canceled' + }; + } + + await fs.writeFile(result.filePath, Buffer.from(videoData)); + + return { + success: true, + path: result.filePath, + message: 'Video exported successfully' + }; + } catch (error) { + console.error('Failed to save exported video:', error) + return { + success: false, + message: 'Failed to save exported video', + error: String(error) + } + } + }) + + ipcMain.handle('write-exported-video-to-path', async (_event, videoData: ArrayBuffer, outputPath: string) => { + try { + const resolvedPath = path.resolve(outputPath) + await fs.mkdir(path.dirname(resolvedPath), { recursive: true }); + await fs.writeFile(resolvedPath, Buffer.from(videoData)); + + return { + success: true, + path: outputPath, + message: 'Video exported successfully', + canceled: false, + }; + } catch (error) { + console.error('Failed to write exported video to path:', error) + return { + success: false, + message: 'Failed to write exported video', + canceled: false, + error: String(error) + } + } + }) + +} diff --git a/electron/ipc/register/permissions.ts b/electron/ipc/register/permissions.ts new file mode 100644 index 00000000..07057c96 --- /dev/null +++ b/electron/ipc/register/permissions.ts @@ -0,0 +1,87 @@ +import { ipcMain, shell, systemPreferences } from "electron"; +import { getMacPrivacySettingsUrl } from "../utils"; + +export function registerPermissionHandlers() { + ipcMain.handle('open-external-url', async (_, url: string) => { + try { + // Security: only allow http/https URLs to prevent file:// or custom protocol abuse + const parsed = new URL(url) + if (parsed.protocol !== 'https:' && parsed.protocol !== 'http:') { + return { success: false, error: `Blocked non-HTTP URL: ${parsed.protocol}` } + } + await shell.openExternal(url) + return { success: true } + } catch (error) { + console.error('Failed to open URL:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('get-accessibility-permission-status', () => { + if (process.platform !== 'darwin') { + return { success: true, trusted: true, prompted: false } + } + + return { + success: true, + trusted: systemPreferences.isTrustedAccessibilityClient(false), + prompted: false, + } + }) + + ipcMain.handle('request-accessibility-permission', () => { + if (process.platform !== 'darwin') { + return { success: true, trusted: true, prompted: false } + } + + return { + success: true, + trusted: systemPreferences.isTrustedAccessibilityClient(true), + prompted: true, + } + }) + + ipcMain.handle('get-screen-recording-permission-status', () => { + if (process.platform !== 'darwin') { + return { success: true, status: 'granted' } + } + + try { + return { + success: true, + status: systemPreferences.getMediaAccessStatus('screen'), + } + } catch (error) { + console.error('Failed to get screen recording permission status:', error) + return { success: false, status: 'unknown', error: String(error) } + } + }) + + ipcMain.handle('open-screen-recording-preferences', async () => { + if (process.platform !== 'darwin') { + return { success: true } + } + + try { + await shell.openExternal(getMacPrivacySettingsUrl('screen')) + return { success: true } + } catch (error) { + console.error('Failed to open Screen Recording preferences:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('open-accessibility-preferences', async () => { + if (process.platform !== 'darwin') { + return { success: true } + } + + try { + await shell.openExternal(getMacPrivacySettingsUrl('accessibility')) + return { success: true } + } catch (error) { + console.error('Failed to open Accessibility preferences:', error) + return { success: false, error: String(error) } + } + }) +} diff --git a/electron/ipc/register/project.ts b/electron/ipc/register/project.ts new file mode 100644 index 00000000..e5028802 --- /dev/null +++ b/electron/ipc/register/project.ts @@ -0,0 +1,371 @@ +import { constants as fsConstants } from "node:fs"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { dialog, ipcMain, shell } from "electron"; +import { RECORDINGS_DIR } from "../../appPaths"; +import { + PROJECT_FILE_EXTENSION, + LEGACY_PROJECT_FILE_EXTENSIONS, +} from "../constants"; +import { + currentProjectPath, + setCurrentProjectPath, + currentVideoPath, + setCurrentVideoPath, + currentRecordingSession, + setCurrentRecordingSession, +} from "../state"; +import { normalizeVideoSourcePath } from "../utils"; +import { replaceApprovedSessionLocalReadPaths } from "../project/manager"; +import { + getTelemetryPathForVideo, + isAutoRecordingPath, + getRecordingsDir, + approveUserPath, +} from "../utils"; +import { + getProjectsDir, + persistRecordingsDirectorySetting, + saveProjectThumbnail, + rememberRecentProject, + listProjectLibraryEntries, + loadProjectFromPath, + isTrustedProjectPath, +} from "../project/manager"; +import { persistRecordingSessionManifest, resolveRecordingSession } from "../project/session"; + +function normalizeRecordingTimeOffsetMs(value: unknown): number { + return typeof value === "number" && Number.isFinite(value) ? Math.round(value) : 0; +} + +export function registerProjectHandlers() { + ipcMain.handle('reveal-in-folder', async (_, filePath: string) => { + try { + // shell.showItemInFolder doesn't return a value, it throws on error + shell.showItemInFolder(filePath); + return { success: true }; + } catch (error) { + console.error(`Error revealing item in folder: ${filePath}`, error); + // Fallback to open the directory if revealing the item fails + // This might happen if the file was moved or deleted after export, + // or if the path is somehow invalid for showItemInFolder + try { + const openPathResult = await shell.openPath(path.dirname(filePath)); + if (openPathResult) { + // openPath returned an error message + return { success: false, error: openPathResult }; + } + return { success: true, message: 'Could not reveal item, but opened directory.' }; + } catch (openError) { + console.error(`Error opening directory: ${path.dirname(filePath)}`, openError); + return { success: false, error: String(error) }; + } + } + }); + + ipcMain.handle('open-recordings-folder', async () => { + try { + const recordingsDir = await getRecordingsDir(); + const openPathResult = await shell.openPath(recordingsDir); + if (openPathResult) { + return { success: false, error: openPathResult, message: 'Failed to open recordings folder.' }; + } + + return { success: true }; + } catch (error) { + console.error('Failed to open recordings folder:', error); + return { success: false, error: String(error), message: 'Failed to open recordings folder.' }; + } + }); + + ipcMain.handle('get-recordings-directory', async () => { + try { + const recordingsDir = await getRecordingsDir() + return { + success: true, + path: recordingsDir, + isDefault: recordingsDir === RECORDINGS_DIR, + } + } catch (error) { + return { + success: false, + path: RECORDINGS_DIR, + isDefault: true, + error: String(error), + } + } + }) + + ipcMain.handle('choose-recordings-directory', async () => { + try { + const current = await getRecordingsDir() + const result = await dialog.showOpenDialog({ + title: 'Choose recordings folder', + defaultPath: current, + properties: ['openDirectory', 'createDirectory', 'promptToCreate'], + }) + + if (result.canceled || result.filePaths.length === 0) { + return { success: false, canceled: true, path: current } + } + + const selectedPath = path.resolve(result.filePaths[0]) + await fs.mkdir(selectedPath, { recursive: true }) + await fs.access(selectedPath, fsConstants.W_OK) + await persistRecordingsDirectorySetting(selectedPath) + + return { success: true, path: selectedPath, isDefault: selectedPath === RECORDINGS_DIR } + } catch (error) { + return { success: false, error: String(error), message: 'Failed to set recordings folder' } + } + }) + + ipcMain.handle('save-project-file', async (_, projectData: unknown, suggestedName?: string, existingProjectPath?: string, thumbnailDataUrl?: string | null) => { + try { + const projectsDir = await getProjectsDir() + const trustedExistingProjectPath = isTrustedProjectPath(existingProjectPath) + ? existingProjectPath + : null + + if (trustedExistingProjectPath) { + await fs.writeFile(trustedExistingProjectPath, JSON.stringify(projectData, null, 2), 'utf-8') + setCurrentProjectPath(trustedExistingProjectPath) + await saveProjectThumbnail(trustedExistingProjectPath, thumbnailDataUrl) + await rememberRecentProject(trustedExistingProjectPath) + return { + success: true, + path: trustedExistingProjectPath, + message: 'Project saved successfully' + } + } + + const safeName = (suggestedName || `project-${Date.now()}`).replace(/[^a-zA-Z0-9-_]/g, '_') + const defaultName = safeName.endsWith(`.${PROJECT_FILE_EXTENSION}`) + ? safeName + : `${safeName}.${PROJECT_FILE_EXTENSION}` + + const result = await dialog.showSaveDialog({ + title: 'Save Recordly Project', + defaultPath: path.join(projectsDir, defaultName), + filters: [ + { name: 'Recordly Project', extensions: [PROJECT_FILE_EXTENSION] }, + { name: 'JSON', extensions: ['json'] } + ], + properties: ['createDirectory', 'showOverwriteConfirmation'] + }) + + if (result.canceled || !result.filePath) { + return { + success: false, + canceled: true, + message: 'Save project canceled' + } + } + + await fs.writeFile(result.filePath, JSON.stringify(projectData, null, 2), 'utf-8') + setCurrentProjectPath(result.filePath) + await saveProjectThumbnail(result.filePath, thumbnailDataUrl) + await rememberRecentProject(result.filePath) + + return { + success: true, + path: result.filePath, + message: 'Project saved successfully' + } + } catch (error) { + console.error('Failed to save project file:', error) + return { + success: false, + message: 'Failed to save project file', + error: String(error) + } + } + }) + + ipcMain.handle('load-project-file', async () => { + try { + const projectsDir = await getProjectsDir() + const result = await dialog.showOpenDialog({ + title: 'Open Recordly Project', + defaultPath: projectsDir, + filters: [ + { name: 'Recordly Project', extensions: [PROJECT_FILE_EXTENSION, ...LEGACY_PROJECT_FILE_EXTENSIONS] }, + { name: 'JSON', extensions: ['json'] }, + { name: 'All Files', extensions: ['*'] } + ], + properties: ['openFile'] + }) + + if (result.canceled || result.filePaths.length === 0) { + return { success: false, canceled: true, message: 'Open project canceled' } + } + + return await loadProjectFromPath(result.filePaths[0]) + } catch (error) { + console.error('Failed to load project file:', error) + return { + success: false, + message: 'Failed to load project file', + error: String(error) + } + } + }) + + ipcMain.handle('load-current-project-file', async () => { + try { + if (!currentProjectPath) { + return { success: false, message: 'No active project' } + } + + return await loadProjectFromPath(currentProjectPath) + } catch (error) { + console.error('Failed to load current project file:', error) + return { + success: false, + message: 'Failed to load current project file', + error: String(error), + } + } + }) + + ipcMain.handle('get-projects-directory', async () => { + try { + return { + success: true, + path: await getProjectsDir(), + } + } catch (error) { + return { + success: false, + error: String(error), + } + } + }) + + ipcMain.handle('list-project-files', async () => { + try { + const library = await listProjectLibraryEntries() + return { + success: true, + projectsDir: library.projectsDir, + entries: library.entries, + } + } catch (error) { + return { + success: false, + projectsDir: null, + entries: [], + error: String(error), + } + } + }) + + ipcMain.handle('open-project-file-at-path', async (_, filePath: string) => { + try { + return await loadProjectFromPath(filePath) + } catch (error) { + console.error('Failed to open project file at path:', error) + return { + success: false, + message: 'Failed to open project file', + error: String(error), + } + } + }) + + ipcMain.handle('open-projects-directory', async () => { + try { + const projectsDir = await getProjectsDir() + const openPathResult = await shell.openPath(projectsDir) + if (openPathResult) { + return { success: false, error: openPathResult, message: 'Failed to open projects folder.' } + } + + return { success: true, path: projectsDir } + } catch (error) { + console.error('Failed to open projects folder:', error) + return { success: false, error: String(error), message: 'Failed to open projects folder.' } + } + }) + ipcMain.handle('set-current-video-path', async (_, path: string) => { + setCurrentVideoPath(normalizeVideoSourcePath(path) ?? path) + approveUserPath(currentVideoPath) + const resolvedSession = await resolveRecordingSession(currentVideoPath) + ?? { + videoPath: currentVideoPath!, + webcamPath: null, + timeOffsetMs: 0, + } + + setCurrentRecordingSession(resolvedSession) + await replaceApprovedSessionLocalReadPaths([ + resolvedSession.videoPath, + resolvedSession.webcamPath, + ]) + + if (resolvedSession.webcamPath) { + await persistRecordingSessionManifest(resolvedSession) + } + + setCurrentProjectPath(null) + return { success: true, webcamPath: resolvedSession.webcamPath ?? null } + }) + + ipcMain.handle('set-current-recording-session', async (_, session: { videoPath: string; webcamPath?: string | null; timeOffsetMs?: number }) => { + const normalizedVideoPath = normalizeVideoSourcePath(session.videoPath) ?? session.videoPath + setCurrentVideoPath(normalizedVideoPath) + setCurrentRecordingSession({ + videoPath: normalizedVideoPath, + webcamPath: normalizeVideoSourcePath(session.webcamPath ?? null), + timeOffsetMs: normalizeRecordingTimeOffsetMs(session.timeOffsetMs), + }); + await replaceApprovedSessionLocalReadPaths([ + currentRecordingSession!.videoPath, + currentRecordingSession!.webcamPath, + ]) + setCurrentProjectPath(null) + await persistRecordingSessionManifest(currentRecordingSession!) + return { success: true } + }) + + ipcMain.handle('get-current-recording-session', () => { + if (!currentRecordingSession) { + return { success: false } + } + + return { + success: true, + session: currentRecordingSession, + } + }) + + ipcMain.handle('get-current-video-path', () => { + return currentVideoPath ? { success: true, path: currentVideoPath } : { success: false }; + }); + + ipcMain.handle('clear-current-video-path', () => { + setCurrentVideoPath(null); + setCurrentRecordingSession(null); + return { success: true }; + }); + + ipcMain.handle('delete-recording-file', async (_, filePath: string) => { + try { + if (!filePath || !isAutoRecordingPath(filePath)) { + return { success: false, error: 'Only auto-generated recordings can be deleted' }; + } + await fs.unlink(filePath); + // Also delete the cursor telemetry sidecar if it exists + const telemetryPath = getTelemetryPathForVideo(filePath); + await fs.unlink(telemetryPath).catch(() => {}); + if (currentVideoPath === filePath) { + setCurrentVideoPath(null); + setCurrentRecordingSession(null); + } + return { success: true }; + } catch (error) { + return { success: false, error: String(error) }; + } + }); + +} diff --git a/electron/ipc/register/recording.ts b/electron/ipc/register/recording.ts new file mode 100644 index 00000000..5be6495a --- /dev/null +++ b/electron/ipc/register/recording.ts @@ -0,0 +1,1159 @@ +import type { ChildProcessWithoutNullStreams } from "node:child_process"; +import { execFile, spawn } from "node:child_process"; +import fs from "node:fs/promises"; +import path from "node:path"; +import { promisify } from "node:util"; +import { app, BrowserWindow, desktopCapturer, dialog, ipcMain, shell, systemPreferences } from "electron"; +import { showCursor } from "../../cursorHider"; +import { ALLOW_RECORDLY_WINDOW_CAPTURE, CURSOR_SAMPLE_INTERVAL_MS } from "../constants"; +import type { SelectedSource, NativeMacRecordingOptions, PauseSegment, CursorTelemetryPoint } from "../types"; +import { + selectedSource, + nativeScreenRecordingActive, + setNativeScreenRecordingActive, + currentVideoPath, + nativeCaptureProcess, + setNativeCaptureProcess, + nativeCaptureOutputBuffer, + setNativeCaptureOutputBuffer, + nativeCaptureTargetPath, + setNativeCaptureTargetPath, + setNativeCaptureStopRequested, + nativeCaptureSystemAudioPath, + setNativeCaptureSystemAudioPath, + nativeCaptureMicrophonePath, + setNativeCaptureMicrophonePath, + nativeCapturePaused, + setNativeCapturePaused, + windowsCaptureProcess, + setWindowsCaptureProcess, + windowsCaptureTargetPath, + setWindowsCaptureTargetPath, + windowsNativeCaptureActive, + setWindowsNativeCaptureActive, + setWindowsCaptureStopRequested, + windowsCapturePaused, + setWindowsCapturePaused, + windowsSystemAudioPath, + setWindowsSystemAudioPath, + windowsMicAudioPath, + setWindowsMicAudioPath, + windowsPendingVideoPath, + setWindowsPendingVideoPath, + lastNativeCaptureDiagnostics, + ffmpegScreenRecordingActive, + setFfmpegScreenRecordingActive, + ffmpegCaptureProcess, + setFfmpegCaptureProcess, + ffmpegCaptureOutputBuffer, + setFfmpegCaptureOutputBuffer, + ffmpegCaptureTargetPath, + setFfmpegCaptureTargetPath, + cachedSystemCursorAssets, + setCachedSystemCursorAssets, + cachedSystemCursorAssetsSourceMtimeMs, + setCachedSystemCursorAssetsSourceMtimeMs, + setCursorCaptureInterval, + setCursorCaptureStartTimeMs, + setActiveCursorSamples, + setPendingCursorSamples, + setIsCursorCaptureActive, + setLastLeftClick, + setLinuxCursorScreenPoint, + windowsCaptureOutputBuffer, + setWindowsCaptureOutputBuffer, +} from "../state"; +import { + getRecordingsDir, + getScreen, + getMacPrivacySettingsUrl, + moveFileWithOverwrite, + parseWindowId, + normalizeVideoSourcePath, + getTelemetryPathForVideo, +} from "../utils"; +import { + ensureSwiftHelperBinary, + getSystemCursorHelperSourcePath, + getSystemCursorHelperBinaryPath, + getNativeCaptureHelperBinaryPath, + ensureNativeCaptureHelperBinary, + getWindowsCaptureExePath, +} from "../paths/binaries"; +import { getFfmpegBinaryPath } from "../ffmpeg/binary"; +import { + recordNativeCaptureDiagnostics, + getFileSizeIfPresent, + getCompanionAudioFallbackPaths, +} from "../recording/diagnostics"; +import { rememberApprovedLocalReadPath } from "../project/manager"; +import { + isNativeWindowsCaptureAvailable, + waitForWindowsCaptureStart, + waitForWindowsCaptureStop, + attachWindowsCaptureLifecycle, + muxNativeWindowsVideoWithAudio, +} from "../recording/windows"; +import { + waitForNativeCaptureStart, + waitForNativeCaptureStop, + muxNativeMacRecordingWithAudio, + attachNativeCaptureLifecycle, + finalizeStoredVideo, + recoverNativeMacCaptureOutput, +} from "../recording/mac"; +import { + buildFfmpegCaptureArgs, + waitForFfmpegCaptureStart, + waitForFfmpegCaptureStop, + getDisplayBoundsForSource, +} from "../recording/ffmpeg"; +import { resolveWindowsCaptureDisplay } from "../windowsCaptureSelection"; +import { + clamp, + stopCursorCapture, + sampleCursorPoint, + snapshotCursorTelemetryForPersistence, +} from "../cursor/telemetry"; +import { + startWindowBoundsCapture, + stopWindowBoundsCapture, +} from "../cursor/bounds"; +import { startInteractionCapture, stopInteractionCapture } from "../cursor/interaction"; +import { stopNativeCursorMonitor, startNativeCursorMonitor } from "../cursor/monitor"; + +const execFileAsync = promisify(execFile); + +async function getSystemCursorAssets() { + if (process.platform !== "darwin") { + setCachedSystemCursorAssets({}); + setCachedSystemCursorAssetsSourceMtimeMs(null); + return cachedSystemCursorAssets ?? {}; + } + const sourcePath = getSystemCursorHelperSourcePath(); + const sourceStat = await fs.stat(sourcePath); + if (cachedSystemCursorAssets && cachedSystemCursorAssetsSourceMtimeMs === sourceStat.mtimeMs) { + return cachedSystemCursorAssets; + } + const binaryPath = await ensureSwiftHelperBinary( + sourcePath, + getSystemCursorHelperBinaryPath(), + "system cursor helper", + "recordly-system-cursors", + ); + const { stdout } = await execFileAsync(binaryPath, [], { timeout: 15000, maxBuffer: 20 * 1024 * 1024 }); + const parsed = JSON.parse(stdout) as Record>; + const result = Object.fromEntries( + Object.entries(parsed).filter(([, asset]) => + typeof asset?.dataUrl === "string" && + typeof asset?.hotspotX === "number" && + typeof asset?.hotspotY === "number" && + typeof asset?.width === "number" && + typeof asset?.height === "number" + ), + ) as Record; + setCachedSystemCursorAssets(result); + setCachedSystemCursorAssetsSourceMtimeMs(sourceStat.mtimeMs); + return result; +} + +function normalizeDesktopSourceName(value: string) { + return value.trim().replace(/\s+/g, " ").toLowerCase(); +} + +export function registerRecordingHandlers( + onRecordingStateChange?: (recording: boolean, sourceName: string) => void, +) { + ipcMain.handle('start-native-screen-recording', async (_, source: SelectedSource, options?: NativeMacRecordingOptions) => { + // Windows native capture path + if (process.platform === 'win32') { + const windowsCaptureAvailable = await isNativeWindowsCaptureAvailable() + if (!windowsCaptureAvailable) { + return { success: false, message: 'Native Windows capture is not available on this system.' } + } + + if (windowsCaptureProcess && !windowsNativeCaptureActive) { + try { windowsCaptureProcess.kill() } catch { /* ignore */ } + setWindowsCaptureProcess(null) + setWindowsCaptureTargetPath(null) + setWindowsCaptureStopRequested(false) + } + + if (windowsCaptureProcess) { + return { success: false, message: 'A native Windows screen recording is already active.' } + } + + let wcProc: ChildProcessWithoutNullStreams | null = null + try { + const exePath = getWindowsCaptureExePath() + const recordingsDir = await getRecordingsDir() + const timestamp = Date.now() + const outputPath = path.join(recordingsDir, `recording-${timestamp}.mp4`) + const displayBounds = source?.id?.startsWith('window:') ? null : getDisplayBoundsForSource(source) + + const config: Record = { + outputPath, + fps: 60, + } + + if (options?.capturesSystemAudio) { + const audioPath = path.join(recordingsDir, `recording-${timestamp}.system.wav`) + config.captureSystemAudio = true + config.audioOutputPath = audioPath + setWindowsSystemAudioPath(audioPath) + } + + if (options?.capturesMicrophone) { + const micPath = path.join(recordingsDir, `recording-${timestamp}.mic.wav`) + config.captureMic = true + config.micOutputPath = micPath + if (options.microphoneLabel) { + config.micDeviceName = options.microphoneLabel + } + setWindowsMicAudioPath(micPath) + } + + const windowId = parseWindowId(source?.id) + if (windowId && source?.id?.startsWith('window:')) { + config.windowHandle = windowId + } else { + const resolvedDisplay = resolveWindowsCaptureDisplay( + source, + getScreen().getAllDisplays(), + getScreen().getPrimaryDisplay(), + ) + config.displayId = resolvedDisplay.displayId + + // Monitor handle IDs can drift across Electron/Windows capture boundaries, + // so also provide display bounds for a coordinate-based native fallback. + config.displayX = Math.round(resolvedDisplay.bounds.x) + config.displayY = Math.round(resolvedDisplay.bounds.y) + config.displayW = Math.round(resolvedDisplay.bounds.width) + config.displayH = Math.round(resolvedDisplay.bounds.height) + } + + recordNativeCaptureDiagnostics({ + backend: 'windows-wgc', + phase: 'start', + sourceId: source?.id ?? null, + sourceType: source?.sourceType ?? 'unknown', + displayId: typeof config.displayId === 'number' ? config.displayId : null, + displayBounds, + windowHandle: typeof config.windowHandle === 'number' ? config.windowHandle : null, + helperPath: exePath, + outputPath, + systemAudioPath: windowsSystemAudioPath, + microphonePath: windowsMicAudioPath, + }) + + setWindowsCaptureOutputBuffer('') + setWindowsCaptureTargetPath(outputPath) + setWindowsCaptureStopRequested(false) + setWindowsCapturePaused(false) + wcProc = spawn(exePath, [JSON.stringify(config)], { + cwd: recordingsDir, + stdio: ['pipe', 'pipe', 'pipe'], + }) + setWindowsCaptureProcess(wcProc) + attachWindowsCaptureLifecycle(wcProc) + + wcProc.stdout.on('data', (chunk: Buffer) => { + setWindowsCaptureOutputBuffer(windowsCaptureOutputBuffer + chunk.toString()) + }) + wcProc.stderr.on('data', (chunk: Buffer) => { + setWindowsCaptureOutputBuffer(windowsCaptureOutputBuffer + chunk.toString()) + }) + + await waitForWindowsCaptureStart(wcProc) + setWindowsNativeCaptureActive(true) + setNativeScreenRecordingActive(true) + recordNativeCaptureDiagnostics({ + backend: 'windows-wgc', + phase: 'start', + sourceId: source?.id ?? null, + sourceType: source?.sourceType ?? 'unknown', + displayId: typeof config.displayId === 'number' ? config.displayId : null, + displayBounds, + windowHandle: typeof config.windowHandle === 'number' ? config.windowHandle : null, + helperPath: exePath, + outputPath, + systemAudioPath: windowsSystemAudioPath, + microphonePath: windowsMicAudioPath, + processOutput: windowsCaptureOutputBuffer.trim() || undefined, + }) + return { success: true } + } catch (error) { + recordNativeCaptureDiagnostics({ + backend: 'windows-wgc', + phase: 'start', + sourceId: source?.id ?? null, + sourceType: source?.sourceType ?? 'unknown', + helperPath: windowsCaptureTargetPath ? getWindowsCaptureExePath() : null, + outputPath: windowsCaptureTargetPath, + systemAudioPath: windowsSystemAudioPath, + microphonePath: windowsMicAudioPath, + processOutput: windowsCaptureOutputBuffer.trim() || undefined, + error: String(error), + }) + console.error('Failed to start native Windows capture:', error) + try { if (wcProc) wcProc.kill() } catch { /* ignore */ } + setWindowsNativeCaptureActive(false) + setNativeScreenRecordingActive(false) + setWindowsCaptureProcess(null) + setWindowsCaptureTargetPath(null) + setWindowsCaptureStopRequested(false) + setWindowsCapturePaused(false) + return { + success: false, + message: 'Failed to start native Windows capture', + error: String(error), + } + } + } + + if (process.platform !== 'darwin') { + return { success: false, message: 'Native screen recording is only available on macOS.' } + } + + if (nativeCaptureProcess && !nativeScreenRecordingActive) { + try { + nativeCaptureProcess.kill() + } catch { + // ignore stale helper cleanup failures + } + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureStopRequested(false) + } + + if (nativeCaptureProcess) { + return { success: false, message: 'A native screen recording is already active.' } + } + + let captProc: ChildProcessWithoutNullStreams | null = null + try { + const recordingsDir = await getRecordingsDir() + + // Warm up TCC: trigger an Electron-level screen capture API call so macOS + // activates the screen-recording grant for this process tree before the + // native helper binary spawns and calls SCStream.startCapture(). + try { + await desktopCapturer.getSources({ types: ['screen'], thumbnailSize: { width: 1, height: 1 } }) + } catch { + // non-fatal – the helper will report its own TCC status + } + + // Ensure microphone TCC is granted for this process tree when mic capture + // is requested, so the child helper inherits the grant. + if (options?.capturesMicrophone) { + const micStatus = systemPreferences.getMediaAccessStatus('microphone') + if (micStatus !== 'granted') { + await systemPreferences.askForMediaAccess('microphone') + } + } + + const appName = normalizeDesktopSourceName(String(source?.appName ?? '')) + const ownAppName = normalizeDesktopSourceName(app.getName()) + if ( + !ALLOW_RECORDLY_WINDOW_CAPTURE + && + source?.id?.startsWith('window:') + && appName + && (appName === ownAppName || appName === 'recordly') + ) { + return { success: false, message: 'Cannot record Recordly windows. Please select another app window.' } + } + + const helperPath = await ensureNativeCaptureHelperBinary() + const timestamp = Date.now() + const outputPath = path.join(recordingsDir, `recording-${timestamp}.mp4`) + const capturesSystemAudio = Boolean(options?.capturesSystemAudio) + const capturesMicrophone = Boolean(options?.capturesMicrophone) + const systemAudioOutputPath = capturesSystemAudio + ? path.join(recordingsDir, `recording-${timestamp}.system.m4a`) + : null + const microphoneOutputPath = capturesMicrophone + ? path.join(recordingsDir, `recording-${timestamp}.mic.m4a`) + : null + const config: Record = { + fps: 60, + outputPath, + capturesSystemAudio, + capturesMicrophone, + } + + if (options?.microphoneDeviceId) { + config.microphoneDeviceId = options.microphoneDeviceId + } + + if (options?.microphoneLabel) { + config.microphoneLabel = options.microphoneLabel + } + + if (systemAudioOutputPath) { + config.systemAudioOutputPath = systemAudioOutputPath + } + + if (microphoneOutputPath) { + config.microphoneOutputPath = microphoneOutputPath + } + + const windowId = parseWindowId(source?.id) + const screenId = Number(source?.display_id) + + if (Number.isFinite(windowId) && windowId && source?.id?.startsWith('window:')) { + config.windowId = windowId + } else if (Number.isFinite(screenId) && screenId > 0) { + config.displayId = screenId + } else { + config.displayId = Number(getScreen().getPrimaryDisplay().id) + } + + setNativeCaptureOutputBuffer('') + setNativeCaptureTargetPath(outputPath) + setNativeCaptureSystemAudioPath(systemAudioOutputPath) + setNativeCaptureMicrophonePath(microphoneOutputPath) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) + captProc = spawn(helperPath, [JSON.stringify(config)], { + cwd: recordingsDir, + stdio: ['pipe', 'pipe', 'pipe'], + }) + setNativeCaptureProcess(captProc) + attachNativeCaptureLifecycle(captProc) + + captProc.stdout.on('data', (chunk: Buffer) => { + setNativeCaptureOutputBuffer(nativeCaptureOutputBuffer + chunk.toString()) + }) + captProc.stderr.on('data', (chunk: Buffer) => { + setNativeCaptureOutputBuffer(nativeCaptureOutputBuffer + chunk.toString()) + }) + + await waitForNativeCaptureStart(captProc) + setNativeScreenRecordingActive(true) + + // If the native helper reported MICROPHONE_CAPTURE_UNAVAILABLE, it started + // capture without microphone. Clear the mic path so the renderer can fall + // back to a browser-side sidecar recording for the microphone track. + const micUnavailableNatively = nativeCaptureOutputBuffer.includes('MICROPHONE_CAPTURE_UNAVAILABLE') + if (micUnavailableNatively) { + setNativeCaptureMicrophonePath(null) + } + + recordNativeCaptureDiagnostics({ + backend: 'mac-screencapturekit', + phase: 'start', + sourceId: source?.id ?? null, + sourceType: source?.sourceType ?? 'unknown', + displayId: typeof config.displayId === 'number' ? config.displayId : null, + helperPath, + outputPath, + systemAudioPath: systemAudioOutputPath, + microphonePath: nativeCaptureMicrophonePath, + processOutput: nativeCaptureOutputBuffer.trim() || undefined, + }) + return { success: true, microphoneFallbackRequired: micUnavailableNatively } + } catch (error) { + console.error('Failed to start native ScreenCaptureKit recording:', error) + const errorStr = String(error) + + // Detect TCC (screen recording permission) errors and show a helpful dialog + if (errorStr.includes('declined TCC') || errorStr.includes('declined TCCs') || errorStr.includes('SCREEN_RECORDING_PERMISSION_DENIED')) { + const { response } = await dialog.showMessageBox({ + type: 'warning', + title: 'Screen Recording Permission Required', + message: 'Recordly needs screen recording permission to capture your screen.', + detail: 'Please open System Settings > Privacy & Security > Screen Recording, make sure Recordly is toggled ON, then try recording again.', + buttons: ['Open System Settings', 'Cancel'], + defaultId: 0, + cancelId: 1, + }) + if (response === 0) { + await shell.openExternal(getMacPrivacySettingsUrl('screen')) + } + try { if (captProc) captProc.kill() } catch { /* ignore */ } + setNativeScreenRecordingActive(false) + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) + return { + success: false, + message: 'Screen recording permission not granted. Please allow access in System Settings and restart the app.', + userNotified: true, + } + } + + if (errorStr.includes('MICROPHONE_PERMISSION_DENIED')) { + const { response } = await dialog.showMessageBox({ + type: 'warning', + title: 'Microphone Permission Required', + message: 'Recordly needs microphone permission to record audio.', + detail: 'Please open System Settings > Privacy & Security > Microphone, make sure Recordly is toggled ON, then try recording again.', + buttons: ['Open System Settings', 'Cancel'], + defaultId: 0, + cancelId: 1, + }) + if (response === 0) { + await shell.openExternal(getMacPrivacySettingsUrl('microphone')) + } + try { if (captProc) captProc.kill() } catch { /* ignore */ } + setNativeScreenRecordingActive(false) + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) + return { + success: false, + message: 'Microphone permission not granted. Please allow access in System Settings.', + userNotified: true, + } + } + + recordNativeCaptureDiagnostics({ + backend: 'mac-screencapturekit', + phase: 'start', + sourceId: source?.id ?? null, + sourceType: source?.sourceType ?? 'unknown', + helperPath: getNativeCaptureHelperBinaryPath(), + outputPath: nativeCaptureTargetPath, + systemAudioPath: nativeCaptureSystemAudioPath, + microphonePath: nativeCaptureMicrophonePath, + processOutput: nativeCaptureOutputBuffer.trim() || undefined, + fileSizeBytes: await getFileSizeIfPresent(nativeCaptureTargetPath), + error: String(error), + }) + try { + if (captProc) captProc.kill() + } catch { + // ignore cleanup failures + } + setNativeScreenRecordingActive(false) + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) + return { + success: false, + message: 'Failed to start native ScreenCaptureKit recording', + error: String(error), + } + } + }) + + ipcMain.handle('stop-native-screen-recording', async () => { + // Windows native capture stop path + if (process.platform === 'win32' && windowsNativeCaptureActive) { + try { + if (!windowsCaptureProcess) { + throw new Error('Native Windows capture process is not running') + } + + const proc = windowsCaptureProcess + const preferredVideoPath = windowsCaptureTargetPath + setWindowsCaptureStopRequested(true) + proc.stdin.write('stop\n') + const tempVideoPath = await waitForWindowsCaptureStop(proc) + setWindowsCaptureProcess(null) + setWindowsNativeCaptureActive(false) + setNativeScreenRecordingActive(false) + setWindowsCaptureTargetPath(null) + setWindowsCaptureStopRequested(false) + setWindowsCapturePaused(false) + + const finalVideoPath = preferredVideoPath ?? tempVideoPath + if (tempVideoPath !== finalVideoPath) { + await moveFileWithOverwrite(tempVideoPath, finalVideoPath) + } + + setWindowsPendingVideoPath(finalVideoPath) + recordNativeCaptureDiagnostics({ + backend: 'windows-wgc', + phase: 'stop', + outputPath: finalVideoPath, + systemAudioPath: windowsSystemAudioPath, + microphonePath: windowsMicAudioPath, + processOutput: windowsCaptureOutputBuffer.trim() || undefined, + fileSizeBytes: await getFileSizeIfPresent(finalVideoPath), + }) + return { success: true, path: finalVideoPath } + } catch (error) { + console.error('Failed to stop native Windows capture:', error) + const fallbackPath = windowsCaptureTargetPath + setWindowsNativeCaptureActive(false) + setNativeScreenRecordingActive(false) + setWindowsCaptureProcess(null) + setWindowsCaptureTargetPath(null) + setWindowsCaptureStopRequested(false) + setWindowsCapturePaused(false) + setWindowsSystemAudioPath(null) + setWindowsMicAudioPath(null) + setWindowsPendingVideoPath(null) + + if (fallbackPath) { + try { + await fs.access(fallbackPath) + setWindowsPendingVideoPath(fallbackPath) + recordNativeCaptureDiagnostics({ + backend: 'windows-wgc', + phase: 'stop', + outputPath: fallbackPath, + systemAudioPath: windowsSystemAudioPath, + microphonePath: windowsMicAudioPath, + processOutput: windowsCaptureOutputBuffer.trim() || undefined, + fileSizeBytes: await getFileSizeIfPresent(fallbackPath), + error: String(error), + }) + return { success: true, path: fallbackPath } + } catch { + // File doesn't exist + } + } + + recordNativeCaptureDiagnostics({ + backend: 'windows-wgc', + phase: 'stop', + outputPath: fallbackPath, + systemAudioPath: windowsSystemAudioPath, + microphonePath: windowsMicAudioPath, + processOutput: windowsCaptureOutputBuffer.trim() || undefined, + error: String(error), + }) + + return { + success: false, + message: 'Failed to stop native Windows capture', + error: String(error), + } + } + } + + if (process.platform !== 'darwin') { + return { success: false, message: 'Native screen recording is only available on macOS.' } + } + + if (!nativeScreenRecordingActive) { + const recovered = await recoverNativeMacCaptureOutput() + if (recovered) { + return recovered + } + + return { success: false, message: 'No native screen recording is active.' } + } + + try { + if (!nativeCaptureProcess) { + throw new Error('Native capture helper process is not running') + } + + const process = nativeCaptureProcess + const preferredVideoPath = nativeCaptureTargetPath + const preferredSystemAudioPath = nativeCaptureSystemAudioPath + const preferredMicrophonePath = nativeCaptureMicrophonePath + console.log('[stop-native] Audio paths — system:', preferredSystemAudioPath, 'mic:', preferredMicrophonePath) + setNativeCaptureStopRequested(true) + process.stdin.write('stop\n') + const tempVideoPath = await waitForNativeCaptureStop(process) + console.log('[stop-native] Helper stopped, tempVideoPath:', tempVideoPath) + setNativeCaptureProcess(null) + setNativeScreenRecordingActive(false) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) + + const finalVideoPath = preferredVideoPath ?? tempVideoPath + if (tempVideoPath !== finalVideoPath) { + await moveFileWithOverwrite(tempVideoPath, finalVideoPath) + } + + if (preferredSystemAudioPath || preferredMicrophonePath) { + console.log('[stop-native] Attempting audio mux (merging separate tracks) into:', finalVideoPath) + try { + await muxNativeMacRecordingWithAudio(finalVideoPath, preferredSystemAudioPath, preferredMicrophonePath) + console.log('[stop-native] Audio mux completed successfully') + } catch (error) { + console.warn('[stop-native] Audio mux failed (video still has inline audio):', error) + } + } else { + console.log('[stop-native] No separate audio tracks to mux') + } + + return await finalizeStoredVideo(finalVideoPath) + } catch (error) { + console.error('Failed to stop native ScreenCaptureKit recording:', error) + const fallbackPath = nativeCaptureTargetPath + const fallbackSystemAudioPath = nativeCaptureSystemAudioPath + const fallbackMicrophonePath = nativeCaptureMicrophonePath + const fallbackFileSizeBytes = await getFileSizeIfPresent(fallbackPath) + setNativeScreenRecordingActive(false) + setNativeCaptureProcess(null) + setNativeCaptureTargetPath(null) + setNativeCaptureSystemAudioPath(null) + setNativeCaptureMicrophonePath(null) + setNativeCaptureStopRequested(false) + setNativeCapturePaused(false) + + recordNativeCaptureDiagnostics({ + backend: 'mac-screencapturekit', + phase: 'stop', + sourceId: lastNativeCaptureDiagnostics?.sourceId ?? null, + sourceType: lastNativeCaptureDiagnostics?.sourceType ?? 'unknown', + displayId: lastNativeCaptureDiagnostics?.displayId ?? null, + displayBounds: lastNativeCaptureDiagnostics?.displayBounds ?? null, + windowHandle: lastNativeCaptureDiagnostics?.windowHandle ?? null, + helperPath: lastNativeCaptureDiagnostics?.helperPath ?? null, + outputPath: fallbackPath, + systemAudioPath: fallbackSystemAudioPath, + microphonePath: fallbackMicrophonePath, + osRelease: lastNativeCaptureDiagnostics?.osRelease, + supported: lastNativeCaptureDiagnostics?.supported, + helperExists: lastNativeCaptureDiagnostics?.helperExists, + processOutput: nativeCaptureOutputBuffer.trim() || undefined, + fileSizeBytes: fallbackFileSizeBytes, + error: String(error), + }) + + // Try to recover: if the target file exists on disk, finalize with it + if (fallbackPath) { + try { + await fs.access(fallbackPath) + console.log('[stop-native-screen-recording] Recovering with fallback path:', fallbackPath) + if (fallbackSystemAudioPath || fallbackMicrophonePath) { + try { + await muxNativeMacRecordingWithAudio( + fallbackPath, + fallbackSystemAudioPath, + fallbackMicrophonePath, + ) + } catch (muxError) { + console.warn('Failed to mux recovered native macOS audio into capture:', muxError) + } + } + return await finalizeStoredVideo(fallbackPath) + } catch { + // File doesn't exist or isn't accessible + } + } + + const recovered = await recoverNativeMacCaptureOutput() + if (recovered) { + return recovered + } + + return { + success: false, + message: 'Failed to stop native ScreenCaptureKit recording', + error: String(error), + } + } + }) + + ipcMain.handle('recover-native-screen-recording', async () => { + if (process.platform !== 'darwin') { + return { success: false, message: 'Native screen recording recovery is only available on macOS.' } + } + + const recovered = await recoverNativeMacCaptureOutput() + if (recovered) { + return recovered + } + + return { + success: false, + message: 'No recoverable native macOS recording output was found.', + } + }) + + ipcMain.handle('pause-native-screen-recording', async () => { + if (process.platform === 'win32') { + if (!windowsNativeCaptureActive || !windowsCaptureProcess) { + return { success: false, message: 'No native Windows screen recording is active.' } + } + + if (windowsCapturePaused) { + return { success: true } + } + + try { + windowsCaptureProcess.stdin.write('pause\n') + setWindowsCapturePaused(true) + return { success: true } + } catch (error) { + return { success: false, message: 'Failed to pause native Windows capture', error: String(error) } + } + } + + if (process.platform !== 'darwin') { + return { success: false, message: 'Native screen recording is only available on macOS.' } + } + + if (!nativeScreenRecordingActive || !nativeCaptureProcess) { + return { success: false, message: 'No native screen recording is active.' } + } + + if (nativeCapturePaused) { + return { success: true } + } + + try { + nativeCaptureProcess.stdin.write('pause\n') + setNativeCapturePaused(true) + return { success: true } + } catch (error) { + return { success: false, message: 'Failed to pause native screen recording', error: String(error) } + } + }) + + ipcMain.handle('resume-native-screen-recording', async () => { + if (process.platform === 'win32') { + if (!windowsNativeCaptureActive || !windowsCaptureProcess) { + return { success: false, message: 'No native Windows screen recording is active.' } + } + + if (!windowsCapturePaused) { + return { success: true } + } + + try { + windowsCaptureProcess.stdin.write('resume\n') + setWindowsCapturePaused(false) + return { success: true } + } catch (error) { + return { success: false, message: 'Failed to resume native Windows capture', error: String(error) } + } + } + + if (process.platform !== 'darwin') { + return { success: false, message: 'Native screen recording is only available on macOS.' } + } + + if (!nativeScreenRecordingActive || !nativeCaptureProcess) { + return { success: false, message: 'No native screen recording is active.' } + } + + if (!nativeCapturePaused) { + return { success: true } + } + + try { + nativeCaptureProcess.stdin.write('resume\n') + setNativeCapturePaused(false) + return { success: true } + } catch (error) { + return { success: false, message: 'Failed to resume native screen recording', error: String(error) } + } + }) + + ipcMain.handle('get-system-cursor-assets', async () => { + try { + return { success: true, cursors: await getSystemCursorAssets() } + } catch (error) { + console.error('Failed to load system cursor assets:', error) + return { success: false, cursors: {}, error: String(error) } + } + }) + + ipcMain.handle('is-native-windows-capture-available', async () => { + return { available: await isNativeWindowsCaptureAvailable() } + }) + + ipcMain.handle('get-last-native-capture-diagnostics', async () => { + return { success: true, diagnostics: lastNativeCaptureDiagnostics } + }) + + ipcMain.handle('get-video-audio-fallback-paths', async (_event, videoPath: string) => { + if (!videoPath) { + return { success: true, paths: [] } + } + + try { + const paths = await getCompanionAudioFallbackPaths(videoPath) + await Promise.all([ + rememberApprovedLocalReadPath(videoPath), + ...paths.map((fallbackPath) => rememberApprovedLocalReadPath(fallbackPath)), + ]) + return { success: true, paths } + } catch (error) { + console.error('Failed to resolve companion audio fallback paths:', error) + return { success: false, paths: [], error: String(error) } + } + }) + + ipcMain.handle('mux-native-windows-recording', async (_event, pauseSegments?: PauseSegment[]) => { + const videoPath = windowsPendingVideoPath + setWindowsPendingVideoPath(null) + + if (!videoPath) { + return { success: false, message: 'No native Windows video pending for mux' } + } + + try { + if (windowsSystemAudioPath || windowsMicAudioPath) { + await muxNativeWindowsVideoWithAudio(videoPath, windowsSystemAudioPath, windowsMicAudioPath, pauseSegments ?? []) + setWindowsSystemAudioPath(null) + setWindowsMicAudioPath(null) + } + + recordNativeCaptureDiagnostics({ + backend: 'windows-wgc', + phase: 'mux', + outputPath: videoPath, + fileSizeBytes: await getFileSizeIfPresent(videoPath), + }) + return await finalizeStoredVideo(videoPath) + } catch (error) { + console.error('Failed to mux native Windows recording:', error) + recordNativeCaptureDiagnostics({ + backend: 'windows-wgc', + phase: 'mux', + outputPath: videoPath, + systemAudioPath: windowsSystemAudioPath, + microphonePath: windowsMicAudioPath, + fileSizeBytes: await getFileSizeIfPresent(videoPath), + error: String(error), + }) + setWindowsSystemAudioPath(null) + setWindowsMicAudioPath(null) + try { + return await finalizeStoredVideo(videoPath) + } catch { + return { success: false, message: 'Failed to mux native Windows recording', error: String(error) } + } + } + }) + + ipcMain.handle('start-ffmpeg-recording', async (_, source: SelectedSource) => { + if (ffmpegCaptureProcess) { + return { success: false, message: 'An FFmpeg recording is already active.' } + } + + try { + const recordingsDir = await getRecordingsDir() + const ffmpegPath = getFfmpegBinaryPath() + const outputPath = path.join(recordingsDir, `recording-${Date.now()}.mp4`) + const args = await buildFfmpegCaptureArgs(source, outputPath) + + setFfmpegCaptureOutputBuffer('') + setFfmpegCaptureTargetPath(outputPath) + const ffProc = spawn(ffmpegPath, args, { + cwd: recordingsDir, + stdio: ['pipe', 'pipe', 'pipe'], + }) + setFfmpegCaptureProcess(ffProc) + + ffProc.stdout.on('data', (chunk: Buffer) => { + setFfmpegCaptureOutputBuffer(ffmpegCaptureOutputBuffer + chunk.toString()) + }) + ffProc.stderr.on('data', (chunk: Buffer) => { + setFfmpegCaptureOutputBuffer(ffmpegCaptureOutputBuffer + chunk.toString()) + }) + + await waitForFfmpegCaptureStart(ffProc) + setFfmpegScreenRecordingActive(true) + return { success: true } + } catch (error) { + console.error('Failed to start FFmpeg recording:', error) + setFfmpegScreenRecordingActive(false) + setFfmpegCaptureProcess(null) + setFfmpegCaptureTargetPath(null) + return { + success: false, + message: 'Failed to start FFmpeg recording', + error: String(error), + } + } + }) + + ipcMain.handle('stop-ffmpeg-recording', async () => { + if (!ffmpegScreenRecordingActive) { + return { success: false, message: 'No FFmpeg recording is active.' } + } + + try { + if (!ffmpegCaptureProcess || !ffmpegCaptureTargetPath) { + throw new Error('FFmpeg process is not running') + } + + const process = ffmpegCaptureProcess + const outputPath = ffmpegCaptureTargetPath + process.stdin.write('q\n') + const finalVideoPath = await waitForFfmpegCaptureStop(process, outputPath) + + setFfmpegCaptureProcess(null) + setFfmpegCaptureTargetPath(null) + setFfmpegScreenRecordingActive(false) + + return await finalizeStoredVideo(finalVideoPath) + } catch (error) { + console.error('Failed to stop FFmpeg recording:', error) + setFfmpegCaptureProcess(null) + setFfmpegCaptureTargetPath(null) + setFfmpegScreenRecordingActive(false) + return { + success: false, + message: 'Failed to stop FFmpeg recording', + error: String(error), + } + } + }) + + + + ipcMain.handle('store-microphone-sidecar', async (_, audioData: ArrayBuffer, videoPath: string) => { + try { + const baseName = videoPath.replace(/\.[^.]+$/, '') + const sidecarPath = `${baseName}.mic.webm` + await fs.writeFile(sidecarPath, Buffer.from(audioData)) + return { success: true, path: sidecarPath } + } catch (error) { + console.error('Failed to store microphone sidecar:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('store-recorded-video', async (_, videoData: ArrayBuffer, fileName: string) => { + try { + const recordingsDir = await getRecordingsDir() + const videoPath = path.join(recordingsDir, fileName) + await fs.writeFile(videoPath, Buffer.from(videoData)) + return await finalizeStoredVideo(videoPath) + } catch (error) { + console.error('Failed to store video:', error) + return { + success: false, + message: 'Failed to store video', + error: String(error) + } + } + }) + + + + ipcMain.handle('get-recorded-video-path', async () => { + try { + const recordingsDir = await getRecordingsDir() + const files = await fs.readdir(recordingsDir) + const videoFiles = files.filter(file => /\.(webm|mov|mp4)$/i.test(file)) + + if (videoFiles.length === 0) { + return { success: false, message: 'No recorded video found' } + } + + const latestVideo = videoFiles.sort().reverse()[0] + const videoPath = path.join(recordingsDir, latestVideo) + + return { success: true, path: videoPath } + } catch (error) { + console.error('Failed to get video path:', error) + return { success: false, message: 'Failed to get video path', error: String(error) } + } + }) + + ipcMain.handle('set-recording-state', (_, recording: boolean) => { + if (recording) { + stopCursorCapture() + stopInteractionCapture() + startWindowBoundsCapture() + void startNativeCursorMonitor() + setIsCursorCaptureActive(true) + setActiveCursorSamples([]) + setPendingCursorSamples([]) + setCursorCaptureStartTimeMs(Date.now()) + setLinuxCursorScreenPoint(null) + setLastLeftClick(null) + sampleCursorPoint() + setCursorCaptureInterval(setInterval(sampleCursorPoint, CURSOR_SAMPLE_INTERVAL_MS)) + void startInteractionCapture() + } else { + setIsCursorCaptureActive(false) + stopCursorCapture() + stopInteractionCapture() + stopWindowBoundsCapture() + stopNativeCursorMonitor() + showCursor() + setLinuxCursorScreenPoint(null) + snapshotCursorTelemetryForPersistence() + setActiveCursorSamples([]) + } + + const source = selectedSource || { name: 'Screen' } + BrowserWindow.getAllWindows().forEach((window) => { + if (!window.isDestroyed()) { + window.webContents.send('recording-state-changed', { + recording, + sourceName: source.name, + }) + } + }) + + if (onRecordingStateChange) { + onRecordingStateChange(recording, source.name) + } + }) + + ipcMain.handle('get-cursor-telemetry', async (_, videoPath?: string) => { + const targetVideoPath = normalizeVideoSourcePath(videoPath ?? currentVideoPath) + if (!targetVideoPath) { + return { success: true, samples: [] } + } + + const telemetryPath = getTelemetryPathForVideo(targetVideoPath) + try { + const content = await fs.readFile(telemetryPath, 'utf-8') + const parsed = JSON.parse(content) + const rawSamples = Array.isArray(parsed) + ? parsed + : (Array.isArray(parsed?.samples) ? parsed.samples : []) + + const samples: CursorTelemetryPoint[] = rawSamples + .filter((sample: unknown) => Boolean(sample && typeof sample === 'object')) + .map((sample: unknown) => { + const point = sample as Partial + return { + timeMs: typeof point.timeMs === 'number' && Number.isFinite(point.timeMs) ? Math.max(0, point.timeMs) : 0, + cx: typeof point.cx === 'number' && Number.isFinite(point.cx) ? clamp(point.cx, 0, 1) : 0.5, + cy: typeof point.cy === 'number' && Number.isFinite(point.cy) ? clamp(point.cy, 0, 1) : 0.5, + interactionType: point.interactionType === 'click' + || point.interactionType === 'double-click' + || point.interactionType === 'right-click' + || point.interactionType === 'middle-click' + || point.interactionType === 'move' + || point.interactionType === 'mouseup' + ? point.interactionType + : undefined, + cursorType: point.cursorType === 'arrow' + || point.cursorType === 'text' + || point.cursorType === 'pointer' + || point.cursorType === 'crosshair' + || point.cursorType === 'open-hand' + || point.cursorType === 'closed-hand' + || point.cursorType === 'resize-ew' + || point.cursorType === 'resize-ns' + || point.cursorType === 'not-allowed' + ? point.cursorType + : undefined, + } + }) + .sort((a: CursorTelemetryPoint, b: CursorTelemetryPoint) => a.timeMs - b.timeMs) + + return { success: true, samples } + } catch (error) { + const nodeError = error as NodeJS.ErrnoException + if (nodeError.code === 'ENOENT') { + return { success: true, samples: [] } + } + console.error('Failed to load cursor telemetry:', error) + return { success: false, message: 'Failed to load cursor telemetry', error: String(error), samples: [] } + } + }) + + +} diff --git a/electron/ipc/register/settings.ts b/electron/ipc/register/settings.ts new file mode 100644 index 00000000..9d323ac5 --- /dev/null +++ b/electron/ipc/register/settings.ts @@ -0,0 +1,195 @@ +import fs from "node:fs/promises"; +import { app, ipcMain } from "electron"; +import { hideCursor } from "../../cursorHider"; +import { closeCountdownWindow, createCountdownWindow, getCountdownWindow } from "../../windows"; +import { + SHORTCUTS_FILE, + RECORDINGS_SETTINGS_FILE, + COUNTDOWN_SETTINGS_FILE, +} from "../constants"; +import { + countdownTimer, + setCountdownTimer, + countdownCancelled, + setCountdownCancelled, + countdownInProgress, + setCountdownInProgress, + countdownRemaining, + setCountdownRemaining, +} from "../state"; + +export function registerSettingsHandlers() { + ipcMain.handle('app:getVersion', () => { + return app.getVersion() + }) + + ipcMain.handle('get-platform', () => { + return process.platform; + }); + + // --------------------------------------------------------------------------- + // Cursor hiding for the browser-capture fallback. + // The IPC promise resolves only after the cursor hide attempt completes. + // --------------------------------------------------------------------------- + ipcMain.handle('hide-cursor', () => { + if (process.platform !== 'win32') { + return { success: true } + } + + return { success: hideCursor() } + }) + + ipcMain.handle('get-shortcuts', async () => { + try { + const data = await fs.readFile(SHORTCUTS_FILE, 'utf-8'); + return JSON.parse(data); + } catch { + return null; + } + }); + + ipcMain.handle('save-shortcuts', async (_, shortcuts: unknown) => { + try { + await fs.writeFile(SHORTCUTS_FILE, JSON.stringify(shortcuts, null, 2), 'utf-8'); + return { success: true }; + } catch (error) { + console.error('Failed to save shortcuts:', error); + return { success: false, error: String(error) }; + } + }); + + // --------------------------------------------------------------------------- + // Countdown timer before recording + // --------------------------------------------------------------------------- + ipcMain.handle('get-recording-preferences', async () => { + try { + const content = await fs.readFile(RECORDINGS_SETTINGS_FILE, 'utf-8') + const parsed = JSON.parse(content) as Record + return { + success: true, + microphoneEnabled: parsed.microphoneEnabled === true, + microphoneDeviceId: typeof parsed.microphoneDeviceId === 'string' ? parsed.microphoneDeviceId : undefined, + systemAudioEnabled: parsed.systemAudioEnabled !== false, + } + } catch { + return { success: true, microphoneEnabled: false, microphoneDeviceId: undefined, systemAudioEnabled: true } + } + }) + + ipcMain.handle('set-recording-preferences', async (_, prefs: { microphoneEnabled?: boolean; microphoneDeviceId?: string; systemAudioEnabled?: boolean }) => { + try { + let existing: Record = {} + try { + const content = await fs.readFile(RECORDINGS_SETTINGS_FILE, 'utf-8') + existing = JSON.parse(content) as Record + } catch { + // file doesn't exist yet + } + const merged = { ...existing, ...prefs } + await fs.writeFile(RECORDINGS_SETTINGS_FILE, JSON.stringify(merged, null, 2), 'utf-8') + return { success: true } + } catch (error) { + console.error('Failed to save recording preferences:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('get-countdown-delay', async () => { + try { + const content = await fs.readFile(COUNTDOWN_SETTINGS_FILE, 'utf-8') + const parsed = JSON.parse(content) as { delay?: number } + return { success: true, delay: parsed.delay ?? 3 } + } catch { + return { success: true, delay: 3 } + } + }) + + ipcMain.handle('set-countdown-delay', async (_, delay: number) => { + try { + await fs.writeFile(COUNTDOWN_SETTINGS_FILE, JSON.stringify({ delay }, null, 2), 'utf-8') + return { success: true } + } catch (error) { + console.error('Failed to save countdown delay:', error) + return { success: false, error: String(error) } + } + }) + + ipcMain.handle('start-countdown', async (_, seconds: number) => { + if (countdownInProgress) { + return { success: false, error: 'Countdown already in progress' } + } + + setCountdownInProgress(true) + setCountdownCancelled(false) + setCountdownRemaining(seconds) + + const countdownWin = createCountdownWindow() + + if (countdownWin.webContents.isLoadingMainFrame()) { + await new Promise((resolve) => { + countdownWin.webContents.once('did-finish-load', () => { + resolve() + }) + }) + } + + return new Promise<{ success: boolean; cancelled?: boolean }>((resolve) => { + let remaining = seconds + setCountdownRemaining(remaining) + + countdownWin.webContents.send('countdown-tick', remaining) + + setCountdownTimer(setInterval(() => { + if (countdownCancelled) { + if (countdownTimer) { + clearInterval(countdownTimer) + setCountdownTimer(null) + } + closeCountdownWindow() + setCountdownInProgress(false) + setCountdownRemaining(null) + resolve({ success: false, cancelled: true }) + return + } + + remaining-- + setCountdownRemaining(remaining) + + if (remaining <= 0) { + if (countdownTimer) { + clearInterval(countdownTimer) + setCountdownTimer(null) + } + closeCountdownWindow() + setCountdownInProgress(false) + setCountdownRemaining(null) + resolve({ success: true }) + } else { + const win = getCountdownWindow() + if (win && !win.isDestroyed()) { + win.webContents.send('countdown-tick', remaining) + } + } + }, 1000)) + }) + }) + + ipcMain.handle('cancel-countdown', () => { + setCountdownCancelled(true) + setCountdownInProgress(false) + setCountdownRemaining(null) + if (countdownTimer) { + clearInterval(countdownTimer) + setCountdownTimer(null) + } + closeCountdownWindow() + return { success: true } + }) + + ipcMain.handle('get-active-countdown', () => { + return { + success: true, + seconds: countdownInProgress ? countdownRemaining : null, + } + }) +} diff --git a/electron/ipc/register/sources.ts b/electron/ipc/register/sources.ts new file mode 100644 index 00000000..81eb82da --- /dev/null +++ b/electron/ipc/register/sources.ts @@ -0,0 +1,448 @@ +import { execFile } from "node:child_process"; +import { promisify } from "node:util"; +import { app, BrowserWindow, desktopCapturer, ipcMain } from "electron"; +import { ALLOW_RECORDLY_WINDOW_CAPTURE } from "../constants"; +import { selectedSource, setSelectedSource } from "../state"; +import type { SelectedSource } from "../types"; +import { getScreen, parseWindowId } from "../utils"; +import { getDisplayBoundsForSource } from "../recording/ffmpeg"; +import { + getNativeMacWindowSources, + resolveMacWindowBounds, + resolveWindowsWindowBounds, + resolveLinuxWindowBounds, + stopWindowBoundsCapture, +} from "../cursor/bounds"; + +const execFileAsync = promisify(execFile); + +function normalizeDesktopSourceName(value: string) { + return value.trim().replace(/\s+/g, " ").toLowerCase(); +} + +function hasUsableSourceThumbnail( + thumbnail: + | { + isEmpty: () => boolean; + getSize: () => { width: number; height: number }; + } + | null + | undefined, +) { + if (!thumbnail || thumbnail.isEmpty()) return false; + const size = thumbnail.getSize(); + return size.width > 1 && size.height > 1; +} + +function broadcastSelectedSourceChange() { + for (const window of BrowserWindow.getAllWindows()) { + if (!window.isDestroyed()) { + window.webContents.send("selected-source-changed", selectedSource); + } + } +} + +export function registerSourceHandlers({ + createEditorWindow, + createSourceSelectorWindow, + getSourceSelectorWindow, +}: { + createEditorWindow: () => void; + createSourceSelectorWindow: () => BrowserWindow; + getSourceSelectorWindow: () => BrowserWindow | null; +}) { + ipcMain.handle("get-sources", async (_, opts) => { + const includeScreens = Array.isArray(opts?.types) ? opts.types.includes("screen") : true; + const includeWindows = Array.isArray(opts?.types) ? opts.types.includes("window") : true; + const electronTypes = [ + ...(includeScreens ? ["screen" as const] : []), + ...(includeWindows ? ["window" as const] : []), + ]; + const electronSources = + electronTypes.length > 0 + ? await desktopCapturer + .getSources({ + ...opts, + types: electronTypes, + }) + .catch((error) => { + console.warn( + "desktopCapturer.getSources failed (screen recording permission may be missing):", + error, + ); + return []; + }) + : []; + const ownWindowNames = new Set( + [ + app.getName(), + "Recordly", + ...BrowserWindow.getAllWindows().flatMap((win) => { + const title = win.getTitle().trim(); + return title ? [title] : []; + }), + ] + .map((name) => normalizeDesktopSourceName(name)) + .filter(Boolean), + ); + const ownAppName = normalizeDesktopSourceName(app.getName()); + + const displays = includeScreens + ? [...getScreen().getAllDisplays()].sort( + (left, right) => + left.bounds.x - right.bounds.x || + left.bounds.y - right.bounds.y || + left.id - right.id, + ) + : []; + const primaryDisplayId = includeScreens ? String(getScreen().getPrimaryDisplay().id) : ""; + const electronScreenSourcesByDisplayId = new Map( + electronSources + .filter((source) => source.id.startsWith("screen:")) + .map((source) => [String(source.display_id ?? ""), source] as const), + ); + + const screenSources = displays.map((display, index) => { + const displayId = String(display.id); + const matchedSource = electronScreenSourcesByDisplayId.get(displayId); + const displayName = + displayId === primaryDisplayId + ? `Screen ${index + 1} (Primary)` + : `Screen ${index + 1}`; + + return { + id: matchedSource?.id ?? `screen:fallback:${displayId}`, + name: displayName, + originalName: matchedSource?.name ?? displayName, + display_id: displayId, + thumbnail: matchedSource?.thumbnail ? matchedSource.thumbnail.toDataURL() : null, + appIcon: matchedSource?.appIcon ? matchedSource.appIcon.toDataURL() : null, + sourceType: "screen" as const, + }; + }); + + if (process.platform !== "darwin" || !includeWindows) { + const windowSources = electronSources + .filter((source) => source.id.startsWith("window:")) + .filter((source) => hasUsableSourceThumbnail(source.thumbnail)) + .filter((source) => { + const normalizedName = normalizeDesktopSourceName(source.name); + if (!normalizedName) { + return true; + } + + if (ALLOW_RECORDLY_WINDOW_CAPTURE && normalizedName.includes("recordly")) { + return true; + } + + for (const ownName of ownWindowNames) { + if (!ownName) continue; + if (normalizedName === ownName) { + return false; + } + } + + return true; + }) + .map((source) => ({ + id: source.id, + name: source.name, + originalName: source.name, + display_id: source.display_id, + thumbnail: source.thumbnail ? source.thumbnail.toDataURL() : null, + appIcon: source.appIcon ? source.appIcon.toDataURL() : null, + sourceType: "window" as const, + })); + + return [...screenSources, ...windowSources]; + } + + try { + const nativeWindowSources = await getNativeMacWindowSources(); + const electronWindowSourceMap = new Map( + electronSources + .filter((source) => source.id.startsWith("window:")) + .map((source) => [source.id, source] as const), + ); + + const mergedWindowSources = nativeWindowSources + .filter((source) => { + const normalizedWindowName = normalizeDesktopSourceName( + source.windowTitle ?? source.name, + ); + const normalizedAppName = normalizeDesktopSourceName(source.appName ?? ""); + + if ( + !ALLOW_RECORDLY_WINDOW_CAPTURE && + normalizedAppName && + normalizedAppName === ownAppName + ) { + return false; + } + + if ( + ALLOW_RECORDLY_WINDOW_CAPTURE && + (normalizedAppName === "recordly" || + normalizedWindowName?.includes("recordly")) + ) { + return true; + } + + if (!normalizedWindowName) { + return true; + } + + for (const ownName of ownWindowNames) { + if (!ownName) continue; + if (normalizedWindowName === ownName) { + return false; + } + } + + return true; + }) + .map((source) => { + const electronWindowSource = electronWindowSourceMap.get(source.id); + return { + id: source.id, + name: source.name, + originalName: source.name, + display_id: source.display_id ?? electronWindowSource?.display_id ?? "", + thumbnail: electronWindowSource?.thumbnail + ? electronWindowSource.thumbnail.toDataURL() + : null, + appIcon: + source.appIcon ?? + (electronWindowSource?.appIcon + ? electronWindowSource.appIcon.toDataURL() + : null), + appName: source.appName, + windowTitle: source.windowTitle, + sourceType: "window" as const, + }; + }); + + return [...screenSources, ...mergedWindowSources]; + } catch (error) { + console.warn("Falling back to Electron window enumeration on macOS:", error); + + const windowSources = electronSources + .filter((source) => source.id.startsWith("window:")) + .filter((source) => { + const normalizedName = normalizeDesktopSourceName(source.name); + if (!normalizedName) { + return true; + } + + if (ALLOW_RECORDLY_WINDOW_CAPTURE && normalizedName.includes("recordly")) { + return true; + } + + for (const ownName of ownWindowNames) { + if (!ownName) continue; + if ( + normalizedName === ownName || + normalizedName.includes(ownName) || + ownName.includes(normalizedName) + ) { + return false; + } + } + + return true; + }) + .map((source) => ({ + id: source.id, + name: source.name, + originalName: source.name, + display_id: source.display_id, + thumbnail: source.thumbnail ? source.thumbnail.toDataURL() : null, + appIcon: source.appIcon ? source.appIcon.toDataURL() : null, + sourceType: "window" as const, + })); + + return [...screenSources, ...windowSources]; + } + }); + + ipcMain.handle("select-source", (_, source: SelectedSource) => { + setSelectedSource(source); + broadcastSelectedSourceChange(); + stopWindowBoundsCapture(); + const sourceSelectorWin = getSourceSelectorWindow(); + if (sourceSelectorWin) { + sourceSelectorWin.close(); + } + return selectedSource; + }); + + ipcMain.handle("show-source-highlight", async (_, source: SelectedSource) => { + try { + const isWindow = source.id?.startsWith("window:"); + const windowId = isWindow ? parseWindowId(source.id) : null; + + // ── 1. Bring window to front ── + if (isWindow && process.platform === "darwin") { + const appName = source.appName || source.name?.split(" — ")[0]?.trim(); + if (appName) { + try { + await execFileAsync( + "osascript", + ["-e", `tell application "${appName}" to activate`], + { timeout: 2000 }, + ); + await new Promise((resolve) => setTimeout(resolve, 350)); + } catch { + /* ignore */ + } + } + } else if (windowId && process.platform === "linux") { + try { + await execFileAsync("wmctrl", ["-i", "-a", `0x${windowId.toString(16)}`], { + timeout: 1500, + }); + } catch { + try { + await execFileAsync("xdotool", ["windowactivate", String(windowId)], { + timeout: 1500, + }); + } catch { + /* not available */ + } + } + await new Promise((resolve) => setTimeout(resolve, 250)); + } + + // ── 2. Resolve bounds ── + let bounds: { x: number; y: number; width: number; height: number } | null = null; + + if (source.id?.startsWith("screen:")) { + bounds = getDisplayBoundsForSource(source); + } else if (isWindow) { + if (process.platform === "darwin") { + bounds = await resolveMacWindowBounds(source); + } else if (process.platform === "win32") { + bounds = await resolveWindowsWindowBounds(source); + } else if (process.platform === "linux") { + bounds = await resolveLinuxWindowBounds(source); + } + } + + if (!bounds || bounds.width <= 0 || bounds.height <= 0) { + bounds = getDisplayBoundsForSource(source); + } + + // ── 3. Show traveling wave highlight ── + const pad = 6; + const highlightWin = new BrowserWindow({ + x: bounds.x - pad, + y: bounds.y - pad, + width: bounds.width + pad * 2, + height: bounds.height + pad * 2, + frame: false, + transparent: true, + alwaysOnTop: true, + skipTaskbar: true, + hasShadow: false, + resizable: false, + focusable: false, + webPreferences: { nodeIntegration: false, contextIsolation: true }, + }); + + highlightWin.setIgnoreMouseEvents(true); + + const html = ` + +
+
+` + + await highlightWin.loadURL(`data:text/html;charset=utf-8,${encodeURIComponent(html)}`) + + setTimeout(() => { + if (!highlightWin.isDestroyed()) highlightWin.close() + }, 1700) + + return { success: true } + } catch (error) { + console.error('Failed to show source highlight:', error) + return { success: false } + } + }) + + ipcMain.handle('get-selected-source', () => { + return selectedSource + }) + + ipcMain.handle('open-source-selector', () => { + const sourceSelectorWin = getSourceSelectorWindow() + if (sourceSelectorWin) { + sourceSelectorWin.focus() + return + } + createSourceSelectorWindow() + }) + ipcMain.handle('switch-to-editor', () => { + console.log('[switch-to-editor] Opening editor window') + const sourceSelectorWin = getSourceSelectorWindow() + if (sourceSelectorWin && !sourceSelectorWin.isDestroyed()) { + sourceSelectorWin.close() + } + createEditorWindow() + }) + +} diff --git a/electron/ipc/utils.ts b/electron/ipc/utils.ts index ccb28750..5b7e040e 100644 --- a/electron/ipc/utils.ts +++ b/electron/ipc/utils.ts @@ -6,6 +6,7 @@ import { app } from "electron"; import { RECORDINGS_DIR } from "../appPaths"; import { RECORDINGS_SETTINGS_FILE, AUTO_RECORDING_PREFIX } from "./constants"; import { + approvedLocalReadPaths, customRecordingsDir, setCustomRecordingsDir, recordingsDirLoaded, @@ -103,3 +104,21 @@ export async function getRecordingsDir() { await fs.mkdir(targetDir, { recursive: true }); return targetDir; } + +export function getMacPrivacySettingsUrl(pane: "screen" | "accessibility" | "microphone"): string { + if (pane === "screen") + return "x-apple.systempreferences:com.apple.preference.security?Privacy_ScreenCapture"; + if (pane === "microphone") + return "x-apple.systempreferences:com.apple.preference.security?Privacy_Microphone"; + return "x-apple.systempreferences:com.apple.preference.security?Privacy_Accessibility"; +} + +export function approveUserPath(filePath: string | null | undefined): void { + if (!filePath) return; + try { + approvedLocalReadPaths.add(path.resolve(filePath)); + } catch { + // Ignore invalid paths; later reads will surface the underlying error. + } +} + From 2ae0aa9a929ee34e154ee4ad4c357ec1a82637b5 Mon Sep 17 00:00:00 2001 From: webadderall <131426131+webadderall@users.noreply.github.com> Date: Fri, 17 Apr 2026 20:35:08 +1000 Subject: [PATCH 3/6] fix: address CodeRabbit review feedback - Remove dead helperExists local in recording/windows.ts - Hoist fs/promises import out of close handler in recording/ffmpeg.ts - Guard fs.readdir with mkdir in recording/prune.ts (ENOENT resilience) - Derive companion audio suffixes from COMPANION_AUDIO_LAYOUTS in prune.ts - Guard mousemove hook registration to Linux only in cursor/interaction.ts - Replace dynamic require('electron') with static import in cursor/monitor.ts - Wrap nodeRequire in try/catch in ffmpeg/binary.ts for fallback safety - Fix hardcoded timeOffsetMs: 0 in project/session.ts (use normalizer) - Fix isPathInsideDirectory to normalize candidatePath in project/manager.ts - Fix isAllowedLocalReadPath security: require path to be in allowlist (AND not OR) - Derive extension regex from constants in project/manager.ts - Consolidate duplicate Duration parsers in recording/diagnostics.ts - Refactor ensureReadableFile to use options object instead of description string - Make swiftc compilation async (execFile) in paths/binaries.ts - Add socket timeout to httpsGet in captions/whisper.ts --- electron/ipc/captions/generate.ts | 10 ++++----- electron/ipc/captions/whisper.ts | 5 ++++- electron/ipc/cursor/interaction.ts | 12 ++++++++--- electron/ipc/cursor/monitor.ts | 2 +- electron/ipc/ffmpeg/binary.ts | 16 ++++++++------ electron/ipc/paths/binaries.ts | 20 +++++++++++------- electron/ipc/project/manager.ts | 16 ++++++++------ electron/ipc/project/session.ts | 2 +- electron/ipc/recording/diagnostics.ts | 10 +++------ electron/ipc/recording/ffmpeg.ts | 2 +- electron/ipc/recording/prune.ts | 16 +++++++------- electron/ipc/recording/windows.ts | 9 +++----- .../recordly-native-cursor-monitor | Bin 95840 -> 95840 bytes .../recordly-screencapturekit-helper | Bin 189936 -> 189936 bytes .../bin/darwin-arm64/recordly-system-cursors | Bin 99752 -> 99752 bytes .../bin/darwin-arm64/recordly-window-list | Bin 116024 -> 116024 bytes .../darwin-x64/recordly-native-cursor-monitor | Bin 70568 -> 70568 bytes .../recordly-screencapturekit-helper | Bin 180456 -> 180456 bytes .../bin/darwin-x64/recordly-system-cursors | Bin 70360 -> 70360 bytes .../bin/darwin-x64/recordly-window-list | Bin 82424 -> 82424 bytes 20 files changed, 67 insertions(+), 53 deletions(-) diff --git a/electron/ipc/captions/generate.ts b/electron/ipc/captions/generate.ts index f74abf15..33c34584 100644 --- a/electron/ipc/captions/generate.ts +++ b/electron/ipc/captions/generate.ts @@ -12,9 +12,9 @@ import { resolveRecordingSession } from "../project/session"; const execFileAsync = promisify(execFile); -export async function ensureReadableFile(filePath: string, description: string) { +export async function ensureReadableFile(filePath: string, options?: { executable?: boolean }) { await fs.access(filePath, fsConstants.R_OK); - if (description === "whisper executable") { + if (options?.executable) { try { await fs.access(filePath, fsConstants.X_OK); } catch { @@ -113,7 +113,7 @@ export async function extractCaptionAudioSource(options: { for (const candidate of candidates) { try { - await ensureReadableFile(candidate.path, "video file"); + await ensureReadableFile(candidate.path); await execFileAsync( options.ffmpegPath, [ @@ -169,8 +169,8 @@ export async function generateAutoCaptionsFromVideo(options: { const whisperExecutablePath = await resolveWhisperExecutablePath(options.whisperExecutablePath); const whisperModelPath = path.resolve(options.whisperModelPath); - await ensureReadableFile(whisperExecutablePath, "whisper executable"); - await ensureReadableFile(whisperModelPath, "whisper model"); + await ensureReadableFile(whisperExecutablePath, { executable: true }); + await ensureReadableFile(whisperModelPath); const tempBase = path.join( app.getPath("temp"), diff --git a/electron/ipc/captions/whisper.ts b/electron/ipc/captions/whisper.ts index 70a97ede..c8e774c6 100644 --- a/electron/ipc/captions/whisper.ts +++ b/electron/ipc/captions/whisper.ts @@ -41,7 +41,7 @@ export function downloadFileWithProgress( ): Promise { const request = (currentUrl: string, redirectCount = 0): Promise => { return new Promise((resolve, reject) => { - const req = httpsGet(currentUrl, (response) => { + const req = httpsGet(currentUrl, { timeout: 30_000 }, (response) => { const statusCode = response.statusCode ?? 0; const location = response.headers.location; @@ -97,6 +97,9 @@ export function downloadFileWithProgress( }); req.on("error", reject); + req.on("timeout", () => { + req.destroy(new Error("Whisper model download timed out.")); + }); }); }; diff --git a/electron/ipc/cursor/interaction.ts b/electron/ipc/cursor/interaction.ts index 81b8bbdb..f5ae15ca 100644 --- a/electron/ipc/cursor/interaction.ts +++ b/electron/ipc/cursor/interaction.ts @@ -183,7 +183,9 @@ export async function startInteractionCapture() { hook.on("mousedown", onMouseDown); hook.on("mouseup", onMouseUp); - hook.on("mousemove", onMouseMove); + if (process.platform === "linux") { + hook.on("mousemove", onMouseMove); + } hook.start(); @@ -192,11 +194,15 @@ export async function startInteractionCapture() { if (typeof hook.off === "function") { hook.off("mousedown", onMouseDown); hook.off("mouseup", onMouseUp); - hook.off("mousemove", onMouseMove); + if (process.platform === "linux") { + hook.off("mousemove", onMouseMove); + } } else if (typeof hook.removeListener === "function") { hook.removeListener("mousedown", onMouseDown); hook.removeListener("mouseup", onMouseUp); - hook.removeListener("mousemove", onMouseMove); + if (process.platform === "linux") { + hook.removeListener("mousemove", onMouseMove); + } } } catch { // ignore listener cleanup errors diff --git a/electron/ipc/cursor/monitor.ts b/electron/ipc/cursor/monitor.ts index 4943e523..0c39909a 100644 --- a/electron/ipc/cursor/monitor.ts +++ b/electron/ipc/cursor/monitor.ts @@ -1,6 +1,7 @@ import { spawn } from "node:child_process"; import { constants as fsConstants } from "node:fs"; import fs from "node:fs/promises"; +import { BrowserWindow } from "electron"; import type { CursorVisualType } from "../types"; import { currentCursorVisualType, @@ -13,7 +14,6 @@ import { import { getCursorMonitorExePath, ensureNativeCursorMonitorBinary } from "../paths/binaries"; export function emitCursorStateChanged(cursorType: CursorVisualType) { - const { BrowserWindow } = require("electron") as typeof import("electron"); BrowserWindow.getAllWindows().forEach((window) => { if (!window.isDestroyed()) { window.webContents.send("cursor-state-changed", { cursorType }); diff --git a/electron/ipc/ffmpeg/binary.ts b/electron/ipc/ffmpeg/binary.ts index 095db5a8..c76acd40 100644 --- a/electron/ipc/ffmpeg/binary.ts +++ b/electron/ipc/ffmpeg/binary.ts @@ -6,13 +6,17 @@ import { app } from "electron"; const nodeRequire = createRequire(import.meta.url); export function loadFfmpegStatic(): string | null { - const moduleExports = nodeRequire("ffmpeg-static"); - if (typeof moduleExports === "string") { - return moduleExports; - } + try { + const moduleExports = nodeRequire("ffmpeg-static"); + if (typeof moduleExports === "string") { + return moduleExports; + } - if (typeof moduleExports?.default === "string") { - return moduleExports.default as string; + if (typeof moduleExports?.default === "string") { + return moduleExports.default as string; + } + } catch { + // ffmpeg-static not available; fall through to system FFmpeg } return null; diff --git a/electron/ipc/paths/binaries.ts b/electron/ipc/paths/binaries.ts index c43cb97c..45ec80da 100644 --- a/electron/ipc/paths/binaries.ts +++ b/electron/ipc/paths/binaries.ts @@ -1,13 +1,16 @@ -import { spawnSync } from "node:child_process"; +import { execFile } from "node:child_process"; import { existsSync, constants as fsConstants } from "node:fs"; import fs from "node:fs/promises"; import path from "node:path"; +import { promisify } from "node:util"; import { app } from "electron"; import { nativeHelperMigrationPromise, setNativeHelperMigrationPromise, } from "../state"; +const execFileAsync = promisify(execFile); + /** * Resolve a path within the app bundle, handling asar unpacking in production. * Files listed in asarUnpack are extracted to app.asar.unpacked/ and must be @@ -205,13 +208,14 @@ export async function ensureSwiftHelperBinary( return binaryPath; } - const result = spawnSync("swiftc", ["-O", sourcePath, "-o", binaryPath], { - encoding: "utf8", - timeout: 120000, - }); - - if (result.status !== 0) { - const details = [result.stderr, result.stdout].filter(Boolean).join("\n").trim(); + try { + await execFileAsync("swiftc", ["-O", sourcePath, "-o", binaryPath], { + encoding: "utf8", + timeout: 120000, + }); + } catch (error) { + const err = error as NodeJS.ErrnoException & { stdout?: string; stderr?: string }; + const details = [err.stderr, err.stdout].filter(Boolean).join("\n").trim(); throw new Error(details || `Failed to compile ${label}`); } diff --git a/electron/ipc/project/manager.ts b/electron/ipc/project/manager.ts index b3d09607..2db9e774 100644 --- a/electron/ipc/project/manager.ts +++ b/electron/ipc/project/manager.ts @@ -41,10 +41,11 @@ export function getAssetRootPath() { } export function isPathInsideDirectory(candidatePath: string, directoryPath: string) { + const normalizedCandidatePath = normalizePath(candidatePath); const normalizedDirectoryPath = normalizePath(directoryPath); return ( - candidatePath === normalizedDirectoryPath || - candidatePath.startsWith(`${normalizedDirectoryPath}${path.sep}`) + normalizedCandidatePath === normalizedDirectoryPath || + normalizedCandidatePath.startsWith(`${normalizedDirectoryPath}${path.sep}`) ); } @@ -52,9 +53,9 @@ export function isAllowedLocalReadPath(candidatePath: string) { const allowedPrefixes = [RECORDINGS_DIR, USER_DATA_PATH, getAssetRootPath(), app.getPath("temp")]; return ( - existsSync(candidatePath) || - allowedPrefixes.some((prefix) => isPathInsideDirectory(candidatePath, prefix)) || - approvedLocalReadPaths.has(candidatePath) + existsSync(candidatePath) && + (allowedPrefixes.some((prefix) => isPathInsideDirectory(candidatePath, prefix)) || + approvedLocalReadPaths.has(candidatePath)) ); } @@ -238,7 +239,10 @@ export async function buildProjectLibraryEntry( return { path: normalizedPath, - name: path.basename(normalizedPath).replace(/\.(recordly|openscreen)$/i, ""), + name: path.basename(normalizedPath).replace( + new RegExp(`\\.(${[PROJECT_FILE_EXTENSION, ...LEGACY_PROJECT_FILE_EXTENSIONS].join("|")})$`, "i"), + "", + ), updatedAt: stats.mtimeMs, thumbnailPath: thumbnailExists ? thumbnailPath : null, isCurrent: Boolean( diff --git a/electron/ipc/project/session.ts b/electron/ipc/project/session.ts index 429e7607..995f7193 100644 --- a/electron/ipc/project/session.ts +++ b/electron/ipc/project/session.ts @@ -65,7 +65,7 @@ export async function resolveRecordingSessionManifest( return { videoPath: normalizedVideoPath, webcamPath: null, - timeOffsetMs: 0, + timeOffsetMs: normalizeRecordingTimeOffsetMs(parsed.timeOffsetMs), }; } diff --git a/electron/ipc/recording/diagnostics.ts b/electron/ipc/recording/diagnostics.ts index 07bdfe13..9ce89a89 100644 --- a/electron/ipc/recording/diagnostics.ts +++ b/electron/ipc/recording/diagnostics.ts @@ -55,13 +55,9 @@ export async function probeMediaDurationSeconds(filePath: string): Promise Boolean(value)) @@ -74,14 +76,12 @@ export async function pruneAutoRecordings(exemptPaths: string[] = []) { await fs.rm(getTelemetryPathForVideo(entry.filePath), { force: true }); // Clean up companion audio files left from recording (macOS .m4a, Windows .wav) const base = entry.filePath.replace(/\.(mp4|mov|webm)$/i, ""); - for (const suffix of [ - ".system.m4a", - ".mic.m4a", - ".system.wav", - ".mic.wav", - ".mic.webm", - ".system.webm", - ]) { + const companionSuffixes = Array.from( + new Set( + COMPANION_AUDIO_LAYOUTS.flatMap((layout) => [layout.systemSuffix, layout.micSuffix]), + ), + ); + for (const suffix of companionSuffixes) { await fs.rm(base + suffix, { force: true }).catch(() => undefined); } } catch (error) { diff --git a/electron/ipc/recording/windows.ts b/electron/ipc/recording/windows.ts index 76355b66..32c39cb4 100644 --- a/electron/ipc/recording/windows.ts +++ b/electron/ipc/recording/windows.ts @@ -32,21 +32,18 @@ const execFileAsync = promisify(execFile); export async function isNativeWindowsCaptureAvailable(): Promise { if (process.platform !== "win32") return false; - const helperPath = getWindowsCaptureExePath(); const os = await import("node:os"); const [major, , build] = os.release().split(".").map(Number); const supported = major >= 10 && build >= 19041; - let helperExists = false; + if (!supported) return false; try { - await fs.access(helperPath, fsConstants.X_OK); - helperExists = true; + await fs.access(getWindowsCaptureExePath(), fsConstants.X_OK); } catch { return false; } - void helperExists; - return supported; + return true; } export function waitForWindowsCaptureStart(proc: ChildProcessWithoutNullStreams) { diff --git a/electron/native/bin/darwin-arm64/recordly-native-cursor-monitor b/electron/native/bin/darwin-arm64/recordly-native-cursor-monitor index fdd0bd08f3aea74efde13723ae5e8a7c063990be..e0e478f11b0d8ac0ed5bcfbcd5fc4ea5dfac4e55 100755 GIT binary patch delta 147 zcmaFxhV{W4)(ty21;m1)p3bl~pRnNUiL@fO8>gC2aBe@r$;h9n*v-JezzD=3AoA-T z4-*3eb2$SO3j+fa5Hc{7PybQG=we@#nw(#hl2fUhmspZnma3avT2!20q??Nl#c!~HQafi-+%FoNJ;e%$D%!4WuPtVL%mg$K06=^;bZ|eiE7O!IJckRWaQ6O>}6nJU<6_i5czeF zhlzoKxsrj2g@J(y2pJeEr~fEobaBitNX;uwE=oa)@mqbJTcjW#yXx3e-qO3qYTK)e86PtN0Ha4Wr2qf` diff --git a/electron/native/bin/darwin-arm64/recordly-screencapturekit-helper b/electron/native/bin/darwin-arm64/recordly-screencapturekit-helper index ff4b8aa5a49ba34960122c50f9d514b61c35ba44..91b54570d68b316b5fd176d8496a340e8c8dfe79 100755 GIT binary patch delta 6049 zcmaKwdt6l27RUEKyc`5&1Qm?OAj*Iu4@FVI0L6F&2!hY+4HQVm2mz58HD?e|FeIkY zwl5O9CW78V2OX}8iH|-~nc~HZnDNL*SelS&zUthyIg3x;{HTVO?^lEC>;uTfft9(PJbBO%N|nh4pYg!tf!{I@{9?RyWK=`GxYJ`n)*p@ zoMaMH{a$br(e*_)QQrq{5xV~97U@IHg5?e9dZ62&*MZxGt|z)(`fhM#6Q$AS<9ua$ zKl5OD@+8T1#N1?k0l4eX9T;t$z6IPmbbIhj*6Cd>g5}$%;8B=KCAJ%~EsSP18h3N^ z)6_5VlNrLz50Cph7^Yeubr6~+=pVReQmTJL=|cG=9xmD=>e0Ve`Y`+)f#F8^c1XDr!9xP{nHHQ zAd5p5fM+-ox@V9Dwbd~~D)e4)4uy*AkKrG1(=GJ!(Q$K%7x4imBkg32{BdR)lD6AtfKY`D3Mv#rw^}I1A!nSo#`5rl21N{%!O_AofH*2;z(A zPlb3C`u-4)z}OdJp+6o^h!?;%?BD^h4r4wfKVp8j+k>xWF+=FcDA7%tOBl?B4vY`+ zBlLqHzKiixh`+=5<#N4ng8%NOiyzL;4xy3Mj#&?-N_x*Yc_>{==~i~w1~7xQa|vTq zPHpK$He61}Q&Scyr%IH^Vx2;iGv7A1%?intnoQ9MjRy>TlIGlXR zvVjUFknuE#kQ1zN78AFxVWYH-O(OIewdnxXbym6DT|mYK3ND|dBH+FPnz z?>tp%+DfCwj_x%XG?2x{*;998tUa|cqoY`o6ZP_pafhoz&sq`^PDsL1wcrQ~S2z(? z!>X3BoCvewGI5>E>Yb>suOp-~^7n z7Oq|o{s8CCEvQEn;{kg`QJf6%RyIRPN1Ay-n9dd{;ga>J2>EpxJo8sX5q>_jyCHKo zG}|$ql8#q|J^Hg?7T7j$oAF{95GVgz6!k5`73_tQ4l-K?&H*O#psx&C0-99!xEV;{IHiuc&Nkk=n3vq~QB0XLB~LP)n8+db%sL4HNwK|$CRouU{9dphj< zV7~x+YnLd#fZe|vRwmfXVE@{9-j5!nF27m z6nGT#nQH(IpwVnm01bu*a|fqX*1}~WlT8J=#zLn;W-3#0ab!Afma%3ow-}uUlFX)Y zQLt<-7g!CKUe-MgI@mGUbfD8LbUIKnQ*jx?bknI1wP#ISZm~Wt>C80{+NxM&Aau~O zf_Rxy@{OQ%w1Ra7bZ0*=x%mTKS-YGPr$Dr0*rUEl{J z`Dy6_dW@B7p;IHP;nK{Sxpc9+TnLjDfGC(Jmk<`oC6TGP{Ed}z$znBJDp)g@I(C=K zWhUD|U4&i-Hf{rTrZy}Rkoqzemk3tMC6U!|NoLJlwCpaIDkdugInF$}G_y!9txUzG zi1zm8(Hh5bdFx$DdBa*1QIB9JWR$)$ota;ak~F3^z+VKqh6$3o%APgWK@UY=}F zF}0zA%%K>XkFjZ7!q}o>I7tdtT1>;KA8P?qvkv|*<0rWC|EdBqq?LKfXemEMA*mZ) zpzAOTUT3+hq(2~sBl}|h5geycQVlte#zoX?sCQ6*Llys`Q{mr1KAJEJ3=0QihX(Ah5ZfoBCb>xk?J+L^FF_~v-+}!GWBwoKSss|RfM0P-R#sY6(flfS9 z5%bf~kHvupo|b{9WZ>x-cq$^8s+3m4@+03&-={&wy_;b@PS4Sa;%_4Vnf~8xGx)#0 z+UkT5>}P?Ei{Qf zV28KRPMXVfj{|)H>VdvA6d}C*Irxd7d@@{l{zA-y)hW`bdY>v+ zYX;tpudXM=*<4E!@m={Dau)JvxJ!6@9)mG3*?2!g0u9pn%mDBEwMzS5EE>WKR_OVT!TCY zxea+7@)P8l$RjsM6IzHIh@69*$XNrYbORbIF`)#x47mdNDDrmXGswGjWaiV60}SCQS3ZzFplkK8B?Fa_BY`Bmg3TEWYf)r41oc| zkmG^*gp-lqM%GHKA)C>t!h|a1)5!k<9s)1010T>@k$=SxYQG`x_K|KXyZ5C0%g9TC zovpNFp|8}Qjl8{|;reevqaG9XA@?Ak=4`Dc1K;W{BirLQwmZmP$R=e{eh9J)=OM7N zA_pU9aONM~R)aAFKFbVzh}+5;-niEpT`h%;S6yQuhd(ZSvwPNIB@kjhjR>aj&e{Kp zuVxoIg|N_p_v;CNeG8wEo|ci7mP1{cNE&?^1Tb-;L=9cccVeoV_)0P5U|b*Lxy=d-x!*@cxXV zKs(FL@Aw9t@^ouRnc1jYIlr;~O8HuI?a%Y}jA*>|CD0ePZPVz0&?VHf9D#{$|@AUc1<~6s@JwEqw{x3s5*=8RavhU&SAFM+v zb=Q7~yfSh0$SrY}EftjouT<@gA3VA}wLLS(!_nl?a9y*1(TmLCSI@+)xW<0*Eq^N^ z-p_B8Zf)ln=F=^NXv~EBMvtA@RhefiZZ~$%irJ>|&#XOrs<2hjCyT3UKNVkSmG<7M zDQ`x9m2Pmmd2iXCuWlagzIJ=!g0@2?pQ>J+_p{7z#^Y#Z>p|m_hZFCHUpaHH?63>F^u3U* z_-RMmzPq+>*>9?jv8y<6`u%b-B}(m7WHqb{*IH-#?6*6jPk=)-ndev<3y i=togAyJo$%OE`D?ZsVPA7XR|fHiL1_EnzQ(+xNeNtPxZI delta 6067 zcmaKwc~lfv7RKKznpH(wP}!6Q)CLg*AuK`!5|4m3n~)I<2@oNIQA9-pN~W7lfgoZU zeW*zUmk25ou+=t3Gl;K2R?JPON3zgT?ff6Fp}K6+GF6RO_1C}l1b9E za)V&Gy;5>b(6!g5f;$e~L%cbB#%Y_uZFiA!H_0ToT}x%b@_cv6tuc_?d~GbaR$h`D zBbmfXTL*3$x}NB!X}iI#M%M@3YHg@tu>2IdF6f@pt_QaZT~~Cwv>o8uOp#{$3$E8j z>tz%y&-RvFd(6$&7JyrU?%-?{+BR@o(0zbMvPJ7?94xQ*#l0|;TGZ>780!r!3}ltG zv2yCB4XnrCaLnbu==_Hruo0+-_UVKy8mfI?Ihh{QRw}*a1s>}w3P@W84c)hhkgmpK zx=YF_MpU7#J8Y*@c=QQ0RA=k8iwbmv?)a1yGANts=M62g`~qolq>etF+GvB5$Lbp5 zT#SWdqXy#cxhffSnU=gufG+n_l?DQ}(+y9rqAWp)V_j@1XlvPAc<{>1(Kr+S&5XWI04{`rEX|jx!gv>yHI{44f4}mxU z{UC^Mq3;jzMD%?io`A6@#3Ke`H$o->?8E{Wi0d%sQ}R9LC%iTIWh~O1j-ELjho{3@ z06X#C^`vdi^1OjPdL3y5ImWa$X-hk{vgv5p*0IHk>Ny3;i3z=_*Qhux1Ou ze0GCNJfm`INpG={a_UB{Sg4#T$2=M(O+bhN;kTgfHwpP<7%D4~)8B^mj+9bbAVtMW zMo@yb9#=Cedg>SpuE>eW(0VAv2a78;A}3rnh-yf`~OYuUV|>FHS>lFUoZ&RM=R z{iDnrj}@6|>5HZ6!N2;&R{S)Bwx$#GA=cE~keadRF?5pan-d7hqsPq%34=2! zHBT4{ce5}eq{FQ)Zaxv-fG>;dc(!8<_4FJH>B-q)ByQfkyu2LY4>&bA65%F9d3g|0 zS5ufjE)$BNn#sn}?WTz^7{(3Oyh|7bH#@qDCkd|L6Ty!Er6;%}8c(?`id?f$cMquW zBkLba$I}j`aG>tvAM!IOFDD{>e)!U4=uiZloK1w^;S9=)i{r}>9N1C^>M^SRXQ|&g z)UE+#7Bd7peVYRfGdG@nCe1q{nrfs(o@e4f2(m;$y1oOrxh62vi=MN!)}Qo$Z6sfpoiaB5he zlG>T5AnZAUgF!CT!f-k``&q0D%w5wdia)VMF4S%yjA3hexDMRutPw(5sz2>Qzcum7 zxdjbjv$!LQKCmUgRt?)B*uHNU#YeD>>k!3w*z#dJqCen8_t41V-(QOPp`uv(wkX=K z5Jgp~C@x$pid{9Lm?DVc=RTt70%I}Ju*yVH+>s3Pq`~?iSC%V^=T?ehDb#!pH41&B zH#Ih(73_d7^`;iA(-$tz3?}!Zel&ASQQHhgo13gBoNGMR>#esxH#w^1X6RE-YA5&8Z+x~YR%fis6WkNHsSOKTFW}Z z2c|HMpdV9DRuDn2(Q_>@&Bp< zGNk+T3@0gnnL<);_$QXv{|f)&a{bs*(&xx?kV7!P73VoGshV6x;}+@z)c>Fw!~cQ2 z@$f%sII=D3V4v}SVh?0r)IpXOptd0I;lQ9i!Iip?lP2Dcd)$M2G7GsI$IZq|c_vtw zi@F-eZE(B>IS$9S^L}dBvrX7|DK?JAf>T(Kf#YwXE_RkCbiuqZ{0g38eFN6nVZJp^ zcnZf$krSa_%@6HHZ1g=Ar&7^ z<49qCNZgJMiZEdsPBFdhj zUYt?5UKF2;{3rW=KbyhtdCjl+Q{HF5eK0<#gzp@D%EA8){3_6XRQP?AeuGmgs3%Qg z>nmt9+^{<;=wi65#IB{uRAkL-=^a|k)>qQ7aL+1fa$nGAP&^g%1*kvz(o}@-@!!Eu z1Lce1%JVZY5AIIk)Iy)Rjw+X{2OpQO{*{oiMjDcakI((c8e}IpPx$z1ufjksB0eJ*+ z&H$@5#1@SrOi&L9v42{K@P=j27 zT#I}FxgNO{c`x#h$W6%I$SuetilhZyM_vG|=HKaUG>S0c9`fhNzak$+?m~7hmKydV z??EPTO;SRLM~O6Uf}Dpu3i+!Nxcl)1IH7R>6I_t5BKso$f*gcAdbKpc3}jd2DCA`1 zB;-8g50EQX<5{{8ja`_KgM0;9gB-O+ny?ai1#&g=7s#8D_aoOMA4mQh@-TI&)bJo0 ztB}7#ZbCkZ{147%8Z!7c>h1s&;)!pk{yrknQos*Fj_zvaFmCbC@t5ITn~NI2(Bpa+SntQjSIwCe$KdME(ZY93E%~->F?e zmU&1g*>mK*lO?wLRLXBhP6Zx2R6{a6rST=m^#csw{~9#D$Alfoy~r(`hiS;*>${7{ zF8JE*K5`JUL4}kbgY3ZB9PX^h!N~cX`FptKV3@;en!$H-)tuo8yh872Cak{d7!5i6 za^fGW{@X(ZLW}~5AcH5)fk*yT?9?4$`rEIbuigLrRPLUUzA!s|De>KR)?<%j&N{lx zWQtM3LdRJfe|b0A!L8s}`NV56X|?ezvRzQ9TjJK7pS)3#wXHBYeVuXcGVhz0EI-x7 zx3~JPZ0z~hS-IQ!8Q!y+GcWIHdU8T_y|G7~w7~Sma8Ki}H+*MTxuN%1+?31BYe)EJ z?hK0!-g2U|{N&1apL)$QiGJ6#vOivx`FNXSf^+ZDfZmPCW~#f_*W5_WK2ufq;-{V; z*0x?JFOy65~$?gZcgx2N_)_3FqcI};Jv$a_U zhE+d|{G(~P`y|U;*B8@wIGA_mKCpFp!rVKA5OuKX7q10>)4E+y?0ZsE{Bha5#z5UO zZ(~JX|ACMHH8J2u#+=i2=kCsF?+NRh7P4c_7yo{_!g6B5i_S@_)wdT$r5tGrO6tD2 zcG7_@ofaQFbm|Q`Fz(9^)u9au{-I7@E~RBlKfFL=?|$RsHus=W%~xxGd352#vRbb- z3w%5(?yH^@&8FMg;|@5QTY9!1DVi4L^Ybj_>4V*GuImz7w+zV%tFAd!krT9a%Qq*E z)E~92d0D#8Pe!iJ`|(7f{B6II&@-K%mS&xE*IXE)%PJgqU;WU1e!%_)v-hI+mCNO^ zn}nT9a^Ajqc&=RDxgb8nymn$Lx%fPPC;OVyM0SR+x$)Kx98h#wiy!Q zK@zh&vhAmD$zkX33Mq=8cR1Bs=`VH6IOg75{oG-vrl`90O!AeS_It=|nzzD=3AoA-T z4-*3e^8^Ma76t|;AY@=Dn6A;p=weZnnw(#hl2fT$Tv=R_nyZ^!T2!20RLl?-Tzyk- piCS;uoz%*(q=cU{MW@%l7M(YzS253G0z3Pi?aP`NnV5kF0RVv&PE1y5FfcFyAp=A1bc=(GE~Z7P$@xVoIhDHQnRzMs<+?eU#U%`e@jV@B&OFT? lwp$|3{aCt1YFX&wtWcc;HzkWZuRd%H*uME7V=FVz6abTpEx-T( delta 136 zcmdnd#lEA9eFF!Vz{9*|*9mtm9<30lP(2{v;o2<0wOxXX@&7hOWd;TYMj!?Okze3vX_!*d37#NsBntoXp}92L6Y)o<4bK mwA<)~+5WT%Dt9XqjIMqE%y5I-{;=MBqc7syHy&hcWd;CLn=f(z diff --git a/electron/native/bin/darwin-x64/recordly-native-cursor-monitor b/electron/native/bin/darwin-x64/recordly-native-cursor-monitor index 58bdf488fcec54b0ce0fff90796124b14b7d881e..d577b1a6fe148c5f17883a13c42d75441b112b2b 100755 GIT binary patch delta 35 rcmZ3noMpvwmJJ!40(b5o^D~}g-9GE1cxK1;!iUWjoZBln84b7qCl?Pv delta 35 rcmZ3noMpvwmJJ!40=E5|?k=)0GFaypb2MEo-=(>Nb9)6RqX8EH2Hp)j diff --git a/electron/native/bin/darwin-x64/recordly-screencapturekit-helper b/electron/native/bin/darwin-x64/recordly-screencapturekit-helper index 831b869056ecb3705c6db369a80f1f3cc6c682d3..3696e45dfb7f9cccc0cb63bf9d5b5a4c1c01cb1c 100755 GIT binary patch delta 5423 zcmZ{o4OCRs7RS%NFan7-BbGucgOW%pA1P%<3it(xrGOup7Nk7E5C=31pE-kq55INY z>`1FuR<4y;tEgLzsc!{KD=>X&noCVrSX$WfnLf?0=kvaO=Kg14v!1ioo&Wv)&p!L? zbI#12JGZ^Mb9;5?#ulO~y_Lo}qS8o^5TZ`wH-1IwG1^GS+ZmQy-N|y_)+^YJ&9dAb zmMLQOW_C})4TXDBcLa@eobku1YA??3GM$C}kf{FC-VafEnK4RDt+9fhx9(I{% zhF&qelEREL!-qytz7hFkTnNoD(kJgF@*16{wi5L=4o90hu$m$`CWwJrZXnAM|MbH6xb_flmVCfi2alKF8k98G6LVeyRB?~dH zixBBb<>aAsxl8+fR!9?vq{_)D6dk$y4$ICN)c$4-7GjrNn?n6!Ja;r7;dAw&7?U8x zyWvoID1{=rgop)FZ`CNTf{um_0R2lyW4U9b5y`z*QZniNNar5tT-Xmm0 zR~FBjUpc$1Dq+E_%HrbkSw*geFH{ykU%D`1QR(dB`QE1PNDQ;j9#|SM?z0;Q}$_!b0+BB7Z z2EE`@?N@msogSpfS2JkvgZ)&8s)bp_b22NRsv3m0hfAx<%Tt4DaWXf95*&@c zTIpJ#ZQhL;^mIr}?Ec$4Ovfv=-83gLUNlWJh0o8(F4@$>Hi%DOlJVKREty$DqzKg+s+>)m8^`P8NGtIqn!gzYXrcU$aJazh}?{qDn z(q?bNc-j@3Tyu(p`EdSdnxpwB;bSWwEqoklGtFCk4CQs5&Bq!(_Io$w(|a_b>DX;^ z&|sc%BFDQ-vuU|$=B(uUH*sv+ZS!Jp(>$MGn(ZS@Gj;;!&o|8((@nFkf_Z^yrY$nf zK{ckinfuK4uAM>=L6julD&Q~PX4%T3i3jAr3#h*q+D}GJr~Z^C)2DOmFj=akNp4iK zS$?kMpggUlMTR}ivPC8<*&z#*Y>?GTT4aNgBzZ{5GEpRWR{XNS)n9T)+;$E zo0XJE$4rWrO$8KeTNA{4>Sg{zlb&_t@P-u9rBI5MH%jO!dA)>s%ED4=q(bSK%U$jA zvAOi3b}>tSJeQ8r8kt^3J>-_>$wj-Rql^;iq_W=R>(FT>c3C~oH-3YXY4VT~m%O3G zBV)>03S}0Hbe2;mxu~4_hHd5FP9f~VBdMIC=@Z$k+M87Sg>st9^DUT9Zki~M&*$-6 z-Y^%%*?FX74^5>Na;1kmt(6T*>Se2vMtM%jK^ay}(b~nwENe43sOC zq{#*)g|bzNN1ju%MTRY7X_E0u+GLgzAxo9?lq;3Q$_6D#vQ^1Mc}|H-hAn4VCF7OU z%Pb{ZWvP-zxl+kq*`TCNwki?woD!!Dt6@o#@k)4slz3#R67Hwuplqn2#E^?4)LOat z>lH6UYspTXWn?WkuaJ)^=`IUuDN(yPLawT%CnIY!t(8^3_Ch%MuwEoD)l!^$JDP_~ z{&REgSgTulfWPQ2{vPBBj6ZP~KgXBX?iTZ`le?%>^yG3Q_&7AiDri|iLG7a~e{F!D zKF;zt1^D%tz~6no+i9%^zi&swwtx;_!T&74zs@&|>LdGgfPVsoejDJQ`+vThQ(V9d z0%sP58Jq}6lyH}seTah22E>1h{DCWx!g+XnySNb0p%1={R|EV`m_cCtW$ba_7Cbuo z&MolYz6#%#s+Jz3NzLCbtjan9?Ch$00RMnPFa!TU+c8uB0A4UB^yL$OT2Z^O3iFR+ z7uMd}#kR?AtAn+-{=KvdYwtQVly7uZq1IM(B(j#egRPaZ3v1>4E89^(Z{+do2X0jQ zFMP=6Vu?S&Oge)uN9q+>cb|m-!?K%d!4fTIJ)c-cM zH8&CeA_go&KPM)z3Oox1jPh}WUEGGz4S}m=%xZdblK;Ay_PCY5Z7&bQ|5hY-+Ry|BCew?!1rn7HbQ9f2pedKkQ#&RmCV(@z)W@KUA4?&|ANe zuI0M@f4&o!3em^r5wU63pT&G|8h8nKEO;e28yv(}4%L4ecntVC@M`c}@P`(=MJ0@% z5#Rw|0oQ>CdaME72TuU+2EPpc3j7XuKlo$tci?~e*zFM~V4Olg8~8l<7jRIuRlpT+ zGWaGq7u+q~TEPnN0PrSo2KWQTZktEs!e~XnH1IFrLh!xItO82Fy})JQc(4mR9_#^^ zfj5KqGP~8Dy#eDC0=9y$g13WvEVl}11iuX40k+jx?M>i1a0~b__$XM^^6#B0;3pXO z)>;YL!HM9j;E~{4;0kaj{!Wt+VmY`g_;YYC@L_OY@GrIakPU)CE35*Bf)l`L;BDY1 z!3V+l;A`ON;I4nO@|A#lgUi8l+%Oiw_#V6zd{3Q~U+@ zUxO!F>=tKWxDap?ya61siW7Dg!vBOW0lXJa>?VQBa;+JcgSUWpG56{05d-in?;CK5 zk9q%3!>B>PZSW3o)M`!;?h*cnfe(S((Vhmr1)d4E<8fY<;x2q;1=oX96|1Lu-?GuQ Wn-KmdeHRq-Eq0DK<~REK=>Gr${>-lc delta 5437 zcmZ{o4OCTC7RS%N@DLE~y)Y3)ec%V8@|D<-X?&;wij63KU|5=vVv2xN;$ZKgVvwdd zx7^rR*=jywTGNo%S&zt7u+&jXCmbDOniwIq#9_@)`JKa^ecst`VQ-CZt^5A>_domW zv(Gv2-FM%;Tru!+#lZTL0M&X6q}e7Wg8(5!l}2y$ny80sNw!0&%B}EI?jgOH+=L9} zHYigh=*{Hzuzo2P$AZY{eL*sax5EJ?N|cB9yB()W_v&F*5Z zw_EQbHlAzVSM zcxEaD56ZnOtRBCUiR13Fw$r)%s$82a#A=Ls2quO04CyZ=nVKzxE2-JU^$&p&yP+t* zaO0|y{QSZgC975zl|EOJ|7<~N%<~0#`5RPk&$}z>XYjU9V1F;XJPpRXtdGE83#`S_ z$uK!;>R2Jl;Ao%_X+kVqS*ERvl&9M9@r0{|mLIM(8cUNQE-rz>pDIcd3s38T|B7&?b`DltQd>8V20pEFIm zN;wZuVQmWc+(>#nb=E4YzJe`Hh<>?;x=@vSLD%bwG z@R5IfX$J+ymm7J1DC;gh{O10)H(rkq|{@Dlq#%~(u8NE zG~g{MH5mQ`$-9^##fimI8gRc9J2p#6$8JToT%wcsu#`AlDJ2oBr8MCOQkEhthtXJ{ z1HP7BKD4JU;%l1AZv}kskLd+446Ox_2(vJw08T&=+SXDpJ3hP?UeLrGJhT?t-~gtq zgAhFUEEK^hw5@|!=#kc2bXDq-V#kX05Q(kp!5{0_gB{l38EM!H!4Lb^Lj)Gdv+;!x zK|^E|(hvu+m;{>&!Jp;`g)kb9VY6&Ghuwv+0g^Fi12|v>{(S?D=X3=WL8O&Nig&^i z*oW0l>hT8FNvXpYDNWcdr4s`xV6-Nti6D@IA_#SAG{ERZq*tED7hos?v3k+K52r8qHQ8_DY!C8Z8Cq#VWqDNR@{BSZ)R_vA%hXLD3=3|r;S{ErbSRjS^N$JG8?GQUaB*|avBKl@Up?@V{Eh$_=KYYql!rmtFaby!@;*<^RUZuj2{)!RXD{EM2mZp#zrux{_wsN3Ki@$E_i-ZcnN8#wT=7cu=sjlk3Ej`i zoxbK3f1dMuuf%N1L*rXTpI3(%x~-MIAKmQT{gD~UGw_c0;dt*YNSk%{7ISNEB!Bj$Tss5ALE&f zU~c9;|B|(xXPnOc7qj2Zx`uTOw{PTp1+1x@*D;#EpS9c}kpu2$P2$3Ca(f8x^(UO5 zjQxM{K$p3&T5ey;eiV=M3~Q!Rhj^6-`kp(E;{oiP;A3uohX)$W2|V8p`7cg>h61_J zmw3Q(?ia`Y>&zRu|3WthTg6}=FoFYrh2cBltt`)VbN(YLf9Ek8hQ{$+FgLNk;<)lX zJGF!TxHjeA=K6M{ZI}8n-35F6!{)fNT3q4F>GyU`#39qXVai*!pT4G%fAuau(JSsj z;#<`2QTP8iN->fAAIOg*z0FEDM(S>?W;eva9c6|ke1$BuLf@;y+)nCB)vo0&c3Hq+5RK>z8|P!q{?`IQdvCxP^CksYyV!jvB2T^ zD<1oz5Th(kk&vwZI%YG^XD(-6%v{a9jM;~-9J2o!=Gn|8%sZJkGyhSsLu_T^0teJE zUuCXkp5#;m9ATcve2RGsa~tzJ%wI7dV*Z->FK%`?#T7O>IG~rghxt0QPlYPLH$|;b zJaZuP0_F(jV&+8V*O(VDzc1Ngaf+weXyJf0%-=8~YW5f=*XYaGo!2ySvuQN9?hip>?G%;^sKFMs^uG(9ftC-uF&oY0*EGp^u zP8RSz8~0VJ1UH#unf>THD`%X<9K>AAJd}AG^C;#InI|)!Wqy$Pn@awWO=kmkr~+m& z$1u-lewX=4=2qrB<{QjwnFqh5@@--s$^0VoY6lxT*!UasZsx!$m0%xpoa6wf@I2|7 z?q*sVx_-$OTEZ-^XOcHFXEP&n6|J@ZGz zqXs%fB0t&tjCqrrY5%*}c$EYCm`^ef-$@CAoWk>Ha3b?fZckq0?WDp diff --git a/electron/native/bin/darwin-x64/recordly-system-cursors b/electron/native/bin/darwin-x64/recordly-system-cursors index 2c53fd6830cd5954465399d110ee2da5014ee7ca..545613624b4254e34bceb3c2de9606dba61fad60 100755 GIT binary patch delta 35 rcmcbyl;y@!mJJ!40&n!)n3dd(>Sx<6_I`Hp-K6FU&g~VPj0v0o4c-n` delta 35 rcmcbyl;y@!mJJ!40!dm2j~r67kmz%Nz@!Ct-kh;CJ+Se(vRWd?wa{h>uI& Date: Fri, 17 Apr 2026 20:56:50 +1000 Subject: [PATCH 4/6] fix: revert overly strict isAllowedLocalReadPath to OR logic The AND gate broke access to user-selected files outside the allowlist (wallpapers, user videos, etc). Keep isPathInsideDirectory normalization fix, revert the existsSync AND guard back to the original OR behavior. --- electron/ipc/project/manager.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/electron/ipc/project/manager.ts b/electron/ipc/project/manager.ts index 2db9e774..faabccfe 100644 --- a/electron/ipc/project/manager.ts +++ b/electron/ipc/project/manager.ts @@ -53,9 +53,9 @@ export function isAllowedLocalReadPath(candidatePath: string) { const allowedPrefixes = [RECORDINGS_DIR, USER_DATA_PATH, getAssetRootPath(), app.getPath("temp")]; return ( - existsSync(candidatePath) && - (allowedPrefixes.some((prefix) => isPathInsideDirectory(candidatePath, prefix)) || - approvedLocalReadPaths.has(candidatePath)) + existsSync(candidatePath) || + allowedPrefixes.some((prefix) => isPathInsideDirectory(candidatePath, prefix)) || + approvedLocalReadPaths.has(candidatePath) ); } From 0b3169ffff9657e133e9032afd32494cba9c6171 Mon Sep 17 00:00:00 2001 From: webadderall <131426131+webadderall@users.noreply.github.com> Date: Fri, 17 Apr 2026 21:11:58 +1000 Subject: [PATCH 5/6] fix: address ipc review follow-ups --- electron/ipc/cursor/bounds.ts | 45 ++++++++++++++++------------- electron/ipc/cursor/interaction.ts | 4 +-- electron/ipc/cursor/monitor.ts | 33 +++++++++++++++------ electron/ipc/cursor/telemetry.ts | 4 --- electron/ipc/export/native-video.ts | 8 +++-- electron/ipc/project/manager.ts | 15 +++++----- electron/ipc/recording/ffmpeg.ts | 9 +++--- electron/ipc/recording/mac.ts | 5 ++-- electron/ipc/recording/windows.ts | 5 ++-- electron/ipc/register/project.ts | 15 ++++++---- electron/ipc/register/recording.ts | 26 +++++++++++------ electron/ipc/register/sources.ts | 33 +++++++++++++++++---- electron/ipc/state.ts | 4 +-- 13 files changed, 131 insertions(+), 75 deletions(-) diff --git a/electron/ipc/cursor/bounds.ts b/electron/ipc/cursor/bounds.ts index 6cf638db..02fb3c75 100644 --- a/electron/ipc/cursor/bounds.ts +++ b/electron/ipc/cursor/bounds.ts @@ -29,29 +29,33 @@ export async function getNativeMacWindowSources(options?: { maxAgeMs?: number }) return cachedNativeMacWindowSources; } - const binaryPath = await ensureNativeWindowListBinary(); - const { stdout } = await execFileAsync(binaryPath, [], { - timeout: 30000, - maxBuffer: 10 * 1024 * 1024, - }); - - const parsed = JSON.parse(stdout); - if (!Array.isArray(parsed)) { - return [] as NativeMacWindowSource[]; - } + try { + const binaryPath = await ensureNativeWindowListBinary(); + const { stdout } = await execFileAsync(binaryPath, [], { + timeout: 30000, + maxBuffer: 10 * 1024 * 1024, + }); - const entries = parsed.filter((entry: unknown): entry is NativeMacWindowSource => { - if (!entry || typeof entry !== "object") { - return false; + const parsed = JSON.parse(stdout); + if (!Array.isArray(parsed)) { + return [] as NativeMacWindowSource[]; } - const candidate = entry as Partial; - return typeof candidate.id === "string" && typeof candidate.name === "string"; - }); + const entries = parsed.filter((entry: unknown): entry is NativeMacWindowSource => { + if (!entry || typeof entry !== "object") { + return false; + } - setCachedNativeMacWindowSources(entries); - setCachedNativeMacWindowSourcesAtMs(now); - return entries; + const candidate = entry as Partial; + return typeof candidate.id === "string" && typeof candidate.name === "string"; + }); + + setCachedNativeMacWindowSources(entries); + setCachedNativeMacWindowSourcesAtMs(now); + return entries; + } catch { + return cachedNativeMacWindowSources ?? ([] as NativeMacWindowSource[]); + } } export function getWindowBoundsFromNativeSource( @@ -180,8 +184,9 @@ export async function resolveWindowsWindowBounds(source: SelectedSource): Promis "if ($windowId) {", " $handle = [Int64]$windowId", "}", + "$escapedWindowTitle = if ($windowTitle) { [WildcardPattern]::Escape($windowTitle) } else { $null }", "if ($handle -le 0 -and $windowTitle) {", - ' $matchingProcess = Get-Process | Where-Object { $_.MainWindowTitle -eq $windowTitle -or $_.MainWindowTitle -like "*$windowTitle*" } | Select-Object -First 1', + ' $matchingProcess = Get-Process | Where-Object { $_.MainWindowTitle -eq $windowTitle -or ($escapedWindowTitle -and $_.MainWindowTitle -like "*$escapedWindowTitle*") } | Select-Object -First 1', " if ($matchingProcess) {", " $handle = $matchingProcess.MainWindowHandle.ToInt64()", " }", diff --git a/electron/ipc/cursor/interaction.ts b/electron/ipc/cursor/interaction.ts index f5ae15ca..e0b501e1 100644 --- a/electron/ipc/cursor/interaction.ts +++ b/electron/ipc/cursor/interaction.ts @@ -187,8 +187,6 @@ export async function startInteractionCapture() { hook.on("mousemove", onMouseMove); } - hook.start(); - setInteractionCaptureCleanup(() => { try { if (typeof hook.off === "function") { @@ -216,6 +214,8 @@ export async function startInteractionCapture() { // ignore hook shutdown errors } }); + + hook.start(); } catch (error) { if (!hasLoggedInteractionHookFailure) { setHasLoggedInteractionHookFailure(true); diff --git a/electron/ipc/cursor/monitor.ts b/electron/ipc/cursor/monitor.ts index 0c39909a..a076713e 100644 --- a/electron/ipc/cursor/monitor.ts +++ b/electron/ipc/cursor/monitor.ts @@ -112,20 +112,35 @@ export async function startNativeCursorMonitor() { } setNativeCursorMonitorProcess(proc as Parameters[0]); - - proc.once("error", (error) => { - console.warn("Native cursor monitor process error:", error); + const spawned = proc; + if (!spawned) { setNativeCursorMonitorProcess(null); - setNativeCursorMonitorOutputBuffer(""); setCurrentCursorVisualType("arrow"); + return; + } + + spawned.once("error", (error) => { + console.warn("Native cursor monitor process error:", error); + if (nativeCursorMonitorProcess === spawned) { + setNativeCursorMonitorProcess(null); + setNativeCursorMonitorOutputBuffer(""); + setCurrentCursorVisualType("arrow"); + } }); - if (proc.stdout) proc.stdout.on("data", handleCursorMonitorStdout); + if (spawned.stdout) spawned.stdout.on("data", handleCursorMonitorStdout); + if (spawned.stderr) { + spawned.stderr.on("data", () => { + // Drain stderr so helper logging cannot block the process. + }); + } - proc.once("close", () => { - setNativeCursorMonitorProcess(null); - setNativeCursorMonitorOutputBuffer(""); - setCurrentCursorVisualType("arrow"); + spawned.once("close", () => { + if (nativeCursorMonitorProcess === spawned) { + setNativeCursorMonitorProcess(null); + setNativeCursorMonitorOutputBuffer(""); + setCurrentCursorVisualType("arrow"); + } }); } catch (error) { console.warn("Failed to start native cursor monitor:", error); diff --git a/electron/ipc/cursor/telemetry.ts b/electron/ipc/cursor/telemetry.ts index f7eb5eaf..51b901cd 100644 --- a/electron/ipc/cursor/telemetry.ts +++ b/electron/ipc/cursor/telemetry.ts @@ -117,10 +117,6 @@ export function pushCursorSample( export function sampleCursorPoint() { const point = getNormalizedCursorPoint(); - if (!point) { - return; - } - pushCursorSample(point.cx, point.cy, Date.now() - cursorCaptureStartTimeMs, "move"); } diff --git a/electron/ipc/export/native-video.ts b/electron/ipc/export/native-video.ts index ccfe1486..9b2472bf 100644 --- a/electron/ipc/export/native-video.ts +++ b/electron/ipc/export/native-video.ts @@ -326,7 +326,10 @@ export async function resolveNativeVideoEncoder( ffmpegPath: string, encodingMode: NativeExportEncodingMode, ) { - if (cachedNativeVideoEncoder?.ffmpegPath === ffmpegPath) { + if ( + cachedNativeVideoEncoder?.ffmpegPath === ffmpegPath && + cachedNativeVideoEncoder?.encodingMode === encodingMode + ) { return cachedNativeVideoEncoder.encoderName; } @@ -341,7 +344,7 @@ export async function resolveNativeVideoEncoder( } if (await probeNativeVideoEncoder(ffmpegPath, encoderName, encodingMode)) { - setCachedNativeVideoEncoder({ ffmpegPath, encoderName }); + setCachedNativeVideoEncoder({ ffmpegPath, encodingMode, encoderName }); return encoderName; } } @@ -451,7 +454,6 @@ export async function muxExportedVideoAudioBuffer( } finally { await Promise.allSettled([ removeTemporaryExportFile(tempVideoPath), - removeTemporaryExportFile(`${tempVideoPath}.muxed.mp4`), removeTemporaryExportFile( path.join( path.dirname(tempVideoPath), diff --git a/electron/ipc/project/manager.ts b/electron/ipc/project/manager.ts index faabccfe..9859db0b 100644 --- a/electron/ipc/project/manager.ts +++ b/electron/ipc/project/manager.ts @@ -51,11 +51,12 @@ export function isPathInsideDirectory(candidatePath: string, directoryPath: stri export function isAllowedLocalReadPath(candidatePath: string) { const allowedPrefixes = [RECORDINGS_DIR, USER_DATA_PATH, getAssetRootPath(), app.getPath("temp")]; + const normalizedCandidatePath = normalizePath(candidatePath); return ( - existsSync(candidatePath) || - allowedPrefixes.some((prefix) => isPathInsideDirectory(candidatePath, prefix)) || - approvedLocalReadPaths.has(candidatePath) + existsSync(normalizedCandidatePath) || + allowedPrefixes.some((prefix) => isPathInsideDirectory(normalizedCandidatePath, prefix)) || + approvedLocalReadPaths.has(normalizedCandidatePath) ); } @@ -308,9 +309,6 @@ export async function loadProjectFromPath(projectPath: string) { message: mediaSources.message, }; } - - setCurrentProjectPath(normalizedPath); - setCurrentVideoPath(mediaSources.videoPath); const projectObj = project as Record; const editorObj = projectObj?.editor as Record | undefined; const audioTracks = editorObj?.audioTracks as { sourcePath?: unknown }[] | undefined; @@ -326,12 +324,15 @@ export async function loadProjectFromPath(projectPath: string) { } } await replaceApprovedSessionLocalReadPaths(approvedProjectPaths); + await rememberRecentProject(normalizedPath); + + setCurrentProjectPath(normalizedPath); + setCurrentVideoPath(mediaSources.videoPath); setCurrentRecordingSession({ videoPath: mediaSources.videoPath, webcamPath: mediaSources.webcamPath, timeOffsetMs: 0, } as RecordingSessionData); - await rememberRecentProject(normalizedPath); return { success: true, diff --git a/electron/ipc/recording/ffmpeg.ts b/electron/ipc/recording/ffmpeg.ts index 39dbc5dc..2d486d16 100644 --- a/electron/ipc/recording/ffmpeg.ts +++ b/electron/ipc/recording/ffmpeg.ts @@ -4,7 +4,7 @@ import type { SelectedSource } from "../types"; import { ffmpegCaptureOutputBuffer, } from "../state"; -import { getScreen } from "../utils"; +import { getScreen, parseWindowId } from "../utils"; import { resolveWindowsCaptureDisplay } from "../windowsCaptureSelection"; import { resolveLinuxWindowBounds } from "../cursor/bounds"; @@ -32,12 +32,13 @@ export async function buildFfmpegCaptureArgs(source: SelectedSource, outputPath: if (process.platform === "win32") { if (source?.id?.startsWith("window:")) { + const windowId = parseWindowId(source.id); const windowTitle = typeof source.windowTitle === "string" ? source.windowTitle.trim() : source.name.trim(); - if (!windowTitle) { - throw new Error("Missing window title for FFmpeg window capture"); + if (!windowId && !windowTitle) { + throw new Error("Missing window identifier for FFmpeg window capture"); } return [ @@ -49,7 +50,7 @@ export async function buildFfmpegCaptureArgs(source: SelectedSource, outputPath: "-draw_mouse", "0", "-i", - `title=${windowTitle}`, + windowId ? `hwnd=${windowId}` : `title=${windowTitle}`, ...commonOutputArgs, ]; } diff --git a/electron/ipc/recording/mac.ts b/electron/ipc/recording/mac.ts index b7cb8747..d340b8e1 100644 --- a/electron/ipc/recording/mac.ts +++ b/electron/ipc/recording/mac.ts @@ -52,9 +52,10 @@ export function waitForNativeCaptureStart(process: ChildProcessWithoutNullStream reject(new Error("Timed out waiting for ScreenCaptureKit recorder to start")); }, 12000); + let stdoutBuffer = ""; const onStdout = (chunk: Buffer) => { - const text = chunk.toString(); - if (text.includes("Recording started")) { + stdoutBuffer += chunk.toString(); + if (stdoutBuffer.includes("Recording started")) { cleanup(); resolve(); } diff --git a/electron/ipc/recording/windows.ts b/electron/ipc/recording/windows.ts index 32c39cb4..312f31e1 100644 --- a/electron/ipc/recording/windows.ts +++ b/electron/ipc/recording/windows.ts @@ -53,9 +53,10 @@ export function waitForWindowsCaptureStart(proc: ChildProcessWithoutNullStreams) reject(new Error("Timed out waiting for native Windows capture to start")); }, 12000); + let stdoutBuffer = ""; const onStdout = (chunk: Buffer) => { - const text = chunk.toString(); - if (text.includes("Recording started")) { + stdoutBuffer += chunk.toString(); + if (stdoutBuffer.includes("Recording started")) { cleanup(); resolve(); } diff --git a/electron/ipc/register/project.ts b/electron/ipc/register/project.ts index e5028802..2e9836d9 100644 --- a/electron/ipc/register/project.ts +++ b/electron/ipc/register/project.ts @@ -16,7 +16,7 @@ import { setCurrentRecordingSession, } from "../state"; import { normalizeVideoSourcePath } from "../utils"; -import { replaceApprovedSessionLocalReadPaths } from "../project/manager"; +import { isPathInsideDirectory, replaceApprovedSessionLocalReadPaths } from "../project/manager"; import { getTelemetryPathForVideo, isAutoRecordingPath, @@ -351,14 +351,19 @@ export function registerProjectHandlers() { ipcMain.handle('delete-recording-file', async (_, filePath: string) => { try { - if (!filePath || !isAutoRecordingPath(filePath)) { + if (!filePath) { return { success: false, error: 'Only auto-generated recordings can be deleted' }; } - await fs.unlink(filePath); + const resolvedPath = await fs.realpath(filePath).catch(() => path.resolve(filePath)); + const recordingsDir = await getRecordingsDir(); + if (!isPathInsideDirectory(resolvedPath, recordingsDir) || !isAutoRecordingPath(resolvedPath)) { + return { success: false, error: 'Only auto-generated recordings can be deleted' }; + } + await fs.unlink(resolvedPath); // Also delete the cursor telemetry sidecar if it exists - const telemetryPath = getTelemetryPathForVideo(filePath); + const telemetryPath = getTelemetryPathForVideo(resolvedPath); await fs.unlink(telemetryPath).catch(() => {}); - if (currentVideoPath === filePath) { + if (currentVideoPath === resolvedPath) { setCurrentVideoPath(null); setCurrentRecordingSession(null); } diff --git a/electron/ipc/register/recording.ts b/electron/ipc/register/recording.ts index 5be6495a..83d4b27b 100644 --- a/electron/ipc/register/recording.ts +++ b/electron/ipc/register/recording.ts @@ -1040,17 +1040,25 @@ export function registerRecordingHandlers( ipcMain.handle('get-recorded-video-path', async () => { try { const recordingsDir = await getRecordingsDir() - const files = await fs.readdir(recordingsDir) - const videoFiles = files.filter(file => /\.(webm|mov|mp4)$/i.test(file)) - - if (videoFiles.length === 0) { + const entries = await fs.readdir(recordingsDir, { withFileTypes: true }) + const candidates = await Promise.all( + entries + .filter((entry) => entry.isFile() && /^recording-\d+\.(webm|mov|mp4)$/i.test(entry.name)) + .map(async (entry) => { + const fullPath = path.join(recordingsDir, entry.name) + const stat = await fs.stat(fullPath).catch(() => null) + return stat ? { path: fullPath, mtimeMs: stat.mtimeMs } : null + }), + ) + const latestVideo = candidates + .filter((candidate): candidate is { path: string; mtimeMs: number } => candidate !== null) + .sort((left, right) => right.mtimeMs - left.mtimeMs)[0] + + if (!latestVideo) { return { success: false, message: 'No recorded video found' } } - - const latestVideo = videoFiles.sort().reverse()[0] - const videoPath = path.join(recordingsDir, latestVideo) - - return { success: true, path: videoPath } + + return { success: true, path: latestVideo.path } } catch (error) { console.error('Failed to get video path:', error) return { success: false, message: 'Failed to get video path', error: String(error) } diff --git a/electron/ipc/register/sources.ts b/electron/ipc/register/sources.ts index 81eb82da..cb24341b 100644 --- a/electron/ipc/register/sources.ts +++ b/electron/ipc/register/sources.ts @@ -283,12 +283,23 @@ export function registerSourceHandlers({ // ── 1. Bring window to front ── if (isWindow && process.platform === "darwin") { - const appName = source.appName || source.name?.split(" — ")[0]?.trim(); + const rawAppName = source.appName || source.name?.split(" — ")[0]?.trim(); + const appName = + rawAppName && /^[\w .&()+'-]{1,64}$/.test(rawAppName) ? rawAppName : null; if (appName) { try { await execFileAsync( "osascript", - ["-e", `tell application "${appName}" to activate`], + [ + "-e", + "on run argv", + "-e", + "tell application (item 1 of argv) to activate", + "-e", + "end run", + "--", + appName, + ], { timeout: 2000 }, ); await new Promise((resolve) => setTimeout(resolve, 350)); @@ -332,13 +343,23 @@ export function registerSourceHandlers({ bounds = getDisplayBoundsForSource(source); } + if (!bounds || bounds.width <= 0 || bounds.height <= 0) { + const primaryBounds = getScreen().getPrimaryDisplay().bounds; + if (primaryBounds.width <= 0 || primaryBounds.height <= 0) { + return { success: false }; + } + bounds = primaryBounds; + } + + const resolvedBounds = bounds; + // ── 3. Show traveling wave highlight ── const pad = 6; const highlightWin = new BrowserWindow({ - x: bounds.x - pad, - y: bounds.y - pad, - width: bounds.width + pad * 2, - height: bounds.height + pad * 2, + x: resolvedBounds.x - pad, + y: resolvedBounds.y - pad, + width: resolvedBounds.width + pad * 2, + height: resolvedBounds.height + pad * 2, frame: false, transparent: true, alwaysOnTop: true, diff --git a/electron/ipc/state.ts b/electron/ipc/state.ts index 75b83644..7fa98176 100644 --- a/electron/ipc/state.ts +++ b/electron/ipc/state.ts @@ -90,7 +90,7 @@ export let cachedNativeMacWindowSources: import("./types").NativeMacWindowSource export let cachedNativeMacWindowSourcesAtMs = 0; // ── Native video export ─────────────────────────────────────────────────────── -export let cachedNativeVideoEncoder: { ffmpegPath: string; encoderName: string } | null = null; +export let cachedNativeVideoEncoder: { ffmpegPath: string; encodingMode: string; encoderName: string } | null = null; // ── Native helper migration ─────────────────────────────────────────────────── export let nativeHelperMigrationPromise: Promise | null = null; @@ -164,6 +164,6 @@ export function setWindowBoundsCaptureInterval(v: NodeJS.Timeout | null) { windo export function setCachedNativeMacWindowSources(v: import("./types").NativeMacWindowSource[] | null) { cachedNativeMacWindowSources = v; } export function setCachedNativeMacWindowSourcesAtMs(v: number) { cachedNativeMacWindowSourcesAtMs = v; } -export function setCachedNativeVideoEncoder(v: { ffmpegPath: string; encoderName: string } | null) { cachedNativeVideoEncoder = v; } +export function setCachedNativeVideoEncoder(v: { ffmpegPath: string; encodingMode: string; encoderName: string } | null) { cachedNativeVideoEncoder = v; } export function setNativeHelperMigrationPromise(v: Promise | null) { nativeHelperMigrationPromise = v; } From 528a31b16f80625fcf54d2097589c5f7bc3f4998 Mon Sep 17 00:00:00 2001 From: webadderall <131426131+webadderall@users.noreply.github.com> Date: Fri, 17 Apr 2026 21:25:13 +1000 Subject: [PATCH 6/6] fix: clean up ipc edge cases --- electron/ipc/cursor/interaction.ts | 2 ++ electron/ipc/project/manager.ts | 13 +++++++++++-- electron/ipc/project/session.ts | 7 +++++-- electron/ipc/register/project.ts | 8 ++++++-- electron/ipc/register/recording.ts | 5 +++++ electron/ipc/register/sources.ts | 9 ++++++++- 6 files changed, 37 insertions(+), 7 deletions(-) diff --git a/electron/ipc/cursor/interaction.ts b/electron/ipc/cursor/interaction.ts index e0b501e1..37cfc633 100644 --- a/electron/ipc/cursor/interaction.ts +++ b/electron/ipc/cursor/interaction.ts @@ -97,6 +97,8 @@ export async function startInteractionCapture() { return; } + stopInteractionCapture(); + try { const hook = loadUiohookModule(); console.log( diff --git a/electron/ipc/project/manager.ts b/electron/ipc/project/manager.ts index 9859db0b..48541e8e 100644 --- a/electron/ipc/project/manager.ts +++ b/electron/ipc/project/manager.ts @@ -298,8 +298,17 @@ export async function listProjectLibraryEntries() { export async function loadProjectFromPath(projectPath: string) { const normalizedPath = normalizePath(projectPath); - const content = await fs.readFile(normalizedPath, "utf-8"); - const project = JSON.parse(content); + let project: unknown; + try { + const content = await fs.readFile(normalizedPath, "utf-8"); + project = JSON.parse(content); + } catch (error) { + return { + success: false, + canceled: false, + message: `Failed to read project file: ${error instanceof Error ? error.message : String(error)}`, + }; + } const mediaSources = await resolveProjectMediaSources(project); if (!mediaSources.success) { diff --git a/electron/ipc/project/session.ts b/electron/ipc/project/session.ts index 995f7193..2f0001c1 100644 --- a/electron/ipc/project/session.ts +++ b/electron/ipc/project/session.ts @@ -70,11 +70,14 @@ export async function resolveRecordingSessionManifest( } const webcamPath = path.join(path.dirname(normalizedVideoPath), webcamFileName); - await fs.access(webcamPath, fsConstants.F_OK); + const webcamExists = await fs + .access(webcamPath, fsConstants.F_OK) + .then(() => true) + .catch(() => false); return { videoPath: normalizedVideoPath, - webcamPath, + webcamPath: webcamExists ? webcamPath : null, timeOffsetMs: normalizeRecordingTimeOffsetMs(parsed.timeOffsetMs), }; } catch { diff --git a/electron/ipc/register/project.ts b/electron/ipc/register/project.ts index 2e9836d9..cf9b6208 100644 --- a/electron/ipc/register/project.ts +++ b/electron/ipc/register/project.ts @@ -355,7 +355,8 @@ export function registerProjectHandlers() { return { success: false, error: 'Only auto-generated recordings can be deleted' }; } const resolvedPath = await fs.realpath(filePath).catch(() => path.resolve(filePath)); - const recordingsDir = await getRecordingsDir(); + const recordingsDirRaw = await getRecordingsDir(); + const recordingsDir = await fs.realpath(recordingsDirRaw).catch(() => path.resolve(recordingsDirRaw)); if (!isPathInsideDirectory(resolvedPath, recordingsDir) || !isAutoRecordingPath(resolvedPath)) { return { success: false, error: 'Only auto-generated recordings can be deleted' }; } @@ -363,7 +364,10 @@ export function registerProjectHandlers() { // Also delete the cursor telemetry sidecar if it exists const telemetryPath = getTelemetryPathForVideo(resolvedPath); await fs.unlink(telemetryPath).catch(() => {}); - if (currentVideoPath === resolvedPath) { + const currentResolved = currentVideoPath + ? await fs.realpath(currentVideoPath).catch(() => currentVideoPath) + : null; + if (currentResolved === resolvedPath) { setCurrentVideoPath(null); setCurrentRecordingSession(null); } diff --git a/electron/ipc/register/recording.ts b/electron/ipc/register/recording.ts index 83d4b27b..c1551058 100644 --- a/electron/ipc/register/recording.ts +++ b/electron/ipc/register/recording.ts @@ -994,6 +994,11 @@ export function registerRecordingHandlers( return await finalizeStoredVideo(finalVideoPath) } catch (error) { console.error('Failed to stop FFmpeg recording:', error) + try { + ffmpegCaptureProcess?.kill() + } catch { + // ignore cleanup failures + } setFfmpegCaptureProcess(null) setFfmpegCaptureTargetPath(null) setFfmpegScreenRecordingActive(false) diff --git a/electron/ipc/register/sources.ts b/electron/ipc/register/sources.ts index cb24341b..84004fd0 100644 --- a/electron/ipc/register/sources.ts +++ b/electron/ipc/register/sources.ts @@ -432,7 +432,14 @@ body{background:transparent;overflow:hidden;width:100vw;height:100vh}
` - await highlightWin.loadURL(`data:text/html;charset=utf-8,${encodeURIComponent(html)}`) + try { + await highlightWin.loadURL(`data:text/html;charset=utf-8,${encodeURIComponent(html)}`) + } catch (loadError) { + if (!highlightWin.isDestroyed()) { + highlightWin.close() + } + throw loadError + } setTimeout(() => { if (!highlightWin.isDestroyed()) highlightWin.close()