Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions electron/ipc/recording/windowsFallbacks.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { describe, expect, it } from "vitest";

import { shouldUseWindowsBrowserMicrophoneFallback } from "./windowsFallbacks";

describe("shouldUseWindowsBrowserMicrophoneFallback", () => {
it("returns true when native Windows mic initialization fails", () => {
expect(
shouldUseWindowsBrowserMicrophoneFallback(
"WARNING: Failed to initialize WASAPI mic capture\nRecording started",
{ capturesMicrophone: true },
),
).toBe(true);
});

it("returns false when microphone capture was not requested", () => {
expect(
shouldUseWindowsBrowserMicrophoneFallback(
"WARNING: Failed to initialize WASAPI mic capture\nRecording started",
{ capturesMicrophone: false },
),
).toBe(false);
});
});
11 changes: 11 additions & 0 deletions electron/ipc/recording/windowsFallbacks.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
const WINDOWS_MIC_CAPTURE_INIT_WARNING = "WARNING: Failed to initialize WASAPI mic capture";

export function shouldUseWindowsBrowserMicrophoneFallback(
captureOutput: string,
options?: { capturesMicrophone?: boolean },
) {
return (
Boolean(options?.capturesMicrophone) &&
captureOutput.includes(WINDOWS_MIC_CAPTURE_INIT_WARNING)
);
}
42 changes: 28 additions & 14 deletions electron/ipc/register/recording.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ import {
attachWindowsCaptureLifecycle,
muxNativeWindowsVideoWithAudio,
} from "../recording/windows";
import { shouldUseWindowsBrowserMicrophoneFallback } from "../recording/windowsFallbacks";
import {
waitForNativeCaptureStart,
waitForNativeCaptureStop,
Expand Down Expand Up @@ -189,6 +190,9 @@ export function registerRecordingHandlers(
const recordingsDir = await getRecordingsDir()
const timestamp = Date.now()
const outputPath = path.join(recordingsDir, `recording-${timestamp}.mp4`)
let captureOutput = ''
let systemAudioPath: string | null = null
let microphonePath: string | null = null
const displayBounds = source?.id?.startsWith('window:') ? null : getDisplayBoundsForSource(source)

const config: Record<string, unknown> = {
Expand All @@ -197,20 +201,20 @@ export function registerRecordingHandlers(
}

if (options?.capturesSystemAudio) {
const audioPath = path.join(recordingsDir, `recording-${timestamp}.system.wav`)
systemAudioPath = path.join(recordingsDir, `recording-${timestamp}.system.wav`)
config.captureSystemAudio = true
config.audioOutputPath = audioPath
setWindowsSystemAudioPath(audioPath)
config.audioOutputPath = systemAudioPath
setWindowsSystemAudioPath(systemAudioPath)
}

if (options?.capturesMicrophone) {
const micPath = path.join(recordingsDir, `recording-${timestamp}.mic.wav`)
microphonePath = path.join(recordingsDir, `recording-${timestamp}.mic.wav`)
config.captureMic = true
config.micOutputPath = micPath
config.micOutputPath = microphonePath
if (options.microphoneLabel) {
config.micDeviceName = options.microphoneLabel
}
setWindowsMicAudioPath(micPath)
setWindowsMicAudioPath(microphonePath)
}
Comment on lines 210 to 218
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Fallback may leave an orphan .mic.wav on disk.

Line 213 hands config.micOutputPath = microphonePath to the spawned native helper before it attempts WASAPI init. If the helper creates (or truncates) the file handle prior to failing init — even to 0 bytes — the fallback path on 278-281 only nulls the in-memory state and doesn't unlink the file. Downstream mux / sidecar discovery code that scans the recordings directory by filename pattern could then pick up an empty .mic.wav alongside the real browser-captured sidecar.

Consider best-effort removal of the file when fallback is required, and/or only persisting setWindowsMicAudioPath after a successful start.

🧹 Proposed cleanup on fallback
         if (microphoneFallbackRequired) {
+          const orphanMicPath = microphonePath
           microphonePath = null
           setWindowsMicAudioPath(null)
+          if (orphanMicPath) {
+            void fs.unlink(orphanMicPath).catch(() => { /* best-effort */ })
+          }
         }

Also applies to: 274-281

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@electron/ipc/register/recording.ts` around lines 210 - 218, The current code
sets config.micOutputPath and calls setWindowsMicAudioPath before the native
helper successfully starts, which can leave an empty .mic.wav file if WASAPI
init fails; change the flow so setWindowsMicAudioPath (and persisting
micOutputPath) only happens after the helper reports successful start, or if you
keep the current order, perform best-effort cleanup in the fallback path by
checking for and unlinking the microphonePath when handling the mic-init failure
(the code that nulls config.captureMic/config.micOutputPath). Update the logic
around config.micOutputPath, setWindowsMicAudioPath, and the fallback cleanup
block so any created microphonePath file is removed when fallback occurs.


const windowId = parseWindowId(source?.id)
Expand Down Expand Up @@ -242,8 +246,8 @@ export function registerRecordingHandlers(
windowHandle: typeof config.windowHandle === 'number' ? config.windowHandle : null,
helperPath: exePath,
outputPath,
systemAudioPath: windowsSystemAudioPath,
microphonePath: windowsMicAudioPath,
systemAudioPath,
microphonePath,
})

setWindowsCaptureOutputBuffer('')
Expand All @@ -258,13 +262,23 @@ export function registerRecordingHandlers(
attachWindowsCaptureLifecycle(wcProc)

wcProc.stdout.on('data', (chunk: Buffer) => {
setWindowsCaptureOutputBuffer(windowsCaptureOutputBuffer + chunk.toString())
captureOutput += chunk.toString()
setWindowsCaptureOutputBuffer(captureOutput)
})
wcProc.stderr.on('data', (chunk: Buffer) => {
setWindowsCaptureOutputBuffer(windowsCaptureOutputBuffer + chunk.toString())
captureOutput += chunk.toString()
setWindowsCaptureOutputBuffer(captureOutput)
})

await waitForWindowsCaptureStart(wcProc)
const microphoneFallbackRequired = shouldUseWindowsBrowserMicrophoneFallback(
captureOutput,
options,
)
if (microphoneFallbackRequired) {
microphonePath = null
setWindowsMicAudioPath(null)
}
setWindowsNativeCaptureActive(true)
setNativeScreenRecordingActive(true)
recordNativeCaptureDiagnostics({
Expand All @@ -277,11 +291,11 @@ export function registerRecordingHandlers(
windowHandle: typeof config.windowHandle === 'number' ? config.windowHandle : null,
helperPath: exePath,
outputPath,
systemAudioPath: windowsSystemAudioPath,
microphonePath: windowsMicAudioPath,
processOutput: windowsCaptureOutputBuffer.trim() || undefined,
systemAudioPath,
microphonePath,
processOutput: captureOutput.trim() || undefined,
})
return { success: true }
return { success: true, microphoneFallbackRequired }
} catch (error) {
recordNativeCaptureDiagnostics({
backend: 'windows-wgc',
Expand Down
46 changes: 36 additions & 10 deletions src/components/video-editor/VideoEditor.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ import {
VideoExporter,
} from "@/lib/exporter";
import { resolveMediaElementSource } from "@/lib/exporter/localMediaSource";
import { resolveSourceAudioFallbackPaths } from "@/lib/exporter/sourceAudioFallback";
import {
clampMediaTimeToDuration,
estimateCompanionAudioStartDelaySeconds,
Expand Down Expand Up @@ -237,6 +238,7 @@ async function writeSmokeExportReport(
}

const DEFAULT_MP4_EXPORT_FRAME_RATE: ExportMp4FrameRate = 30;
const SOURCE_AUDIO_FALLBACK_TOAST_ID = "source-audio-fallback-error";

function getEncodingModeBitrateMultiplier(encodingMode: ExportEncodingMode): number {
switch (encodingMode) {
Expand Down Expand Up @@ -1202,7 +1204,15 @@ export default function VideoEditor() {
() => videoSourcePath ?? (videoPath ? fromFileUrl(videoPath) : null),
[videoPath, videoSourcePath],
);
const hasSourceAudioFallback = sourceAudioFallbackPaths.length > 0;
const {
hasEmbeddedSourceAudio,
externalAudioPaths: previewSourceAudioFallbackPaths,
} = useMemo(
() => resolveSourceAudioFallbackPaths(currentSourcePath, sourceAudioFallbackPaths),
[currentSourcePath, sourceAudioFallbackPaths],
);
const shouldMutePreviewVideo =
!hasEmbeddedSourceAudio && previewSourceAudioFallbackPaths.length > 0;

useEffect(() => {
let cancelled = false;
Expand All @@ -1221,10 +1231,26 @@ export default function VideoEditor() {
if (cancelled) {
return;
}
setSourceAudioFallbackPaths(result.success ? (result.paths ?? []) : []);
} catch {
if (!result.success) {
setSourceAudioFallbackPaths([]);
toast.warning(
result.error
? `Could not load companion audio sources: ${summarizeErrorMessage(result.error)}`
: "Could not load companion audio sources. Playback and export may miss microphone audio.",
{ id: SOURCE_AUDIO_FALLBACK_TOAST_ID, duration: 10000 },
);
return;
}

toast.dismiss(SOURCE_AUDIO_FALLBACK_TOAST_ID);
setSourceAudioFallbackPaths(result.paths ?? []);
} catch (error) {
if (!cancelled) {
setSourceAudioFallbackPaths([]);
toast.warning(
`Could not load companion audio sources: ${summarizeErrorMessage(String(error))}`,
{ id: SOURCE_AUDIO_FALLBACK_TOAST_ID, duration: 10000 },
);
}
}
})();
Expand Down Expand Up @@ -3434,7 +3460,7 @@ export default function VideoEditor() {
useEffect(() => {
let cancelled = false;
const existing = sourceAudioElementsRef.current;
const currentIds = new Set(sourceAudioFallbackPaths);
const currentIds = new Set(previewSourceAudioFallbackPaths);

for (const [id, audio] of existing) {
if (!currentIds.has(id)) {
Expand All @@ -3447,7 +3473,7 @@ export default function VideoEditor() {
}
}

for (const audioPath of sourceAudioFallbackPaths) {
for (const audioPath of previewSourceAudioFallbackPaths) {
let audio = existing.get(audioPath);
if (!audio) {
audio = new Audio();
Expand Down Expand Up @@ -3483,14 +3509,14 @@ export default function VideoEditor() {
audio.volume = Math.max(0, Math.min(1, previewVolume));
}

if (sourceAudioFallbackPaths.length === 0) {
if (previewSourceAudioFallbackPaths.length === 0) {
lastSourceAudioSyncTimeRef.current = null;
}

return () => {
cancelled = true;
};
}, [previewVolume, sourceAudioFallbackPaths]);
}, [previewSourceAudioFallbackPaths, previewVolume]);

useEffect(() => {
return () => {
Expand Down Expand Up @@ -3558,7 +3584,7 @@ export default function VideoEditor() {
}, [isPlaying, currentTime, audioRegions, speedRegions]);

useEffect(() => {
if (sourceAudioFallbackPaths.length === 0) {
if (previewSourceAudioFallbackPaths.length === 0) {
lastSourceAudioSyncTimeRef.current = null;
return;
}
Expand Down Expand Up @@ -3610,7 +3636,7 @@ export default function VideoEditor() {
}

lastSourceAudioSyncTimeRef.current = currentTime;
}, [currentTime, duration, isPlaying, sourceAudioFallbackPaths, speedRegions]);
}, [currentTime, duration, isPlaying, previewSourceAudioFallbackPaths, speedRegions]);

const showExportSuccessToast = useCallback((filePath: string) => {
toast.success(`Exported successfully to ${filePath}`, {
Expand Down Expand Up @@ -5159,7 +5185,7 @@ export default function VideoEditor() {
cursorClickBounceDuration
}
cursorSway={cursorSway}
volume={hasSourceAudioFallback ? 0 : previewVolume}
volume={shouldMutePreviewVideo ? 0 : previewVolume}
/>
</div>
</div>
Expand Down
23 changes: 21 additions & 2 deletions src/components/video-editor/VideoPlayback.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -1458,7 +1458,11 @@ const VideoPlayback = forwardRef<VideoPlaybackRef, VideoPlaybackProps>(
blurFilter.resolution = app.renderer.resolution;
blurFilter.blur = 0;
const motionBlurFilter = new MotionBlurFilter([0, 0], 5, 0);
videoContainer.filters = [blurFilter, motionBlurFilter];
// Don't attach filters by default — the filter pipeline forces the video
// through an intermediate RenderTexture at renderer resolution, downsampling
// the native video and destroying detail. Filters are attached conditionally
// in the ticker only when zoom motion blur is actually active.
videoContainer.filters = null;
blurFilterRef.current = blurFilter;
motionBlurFilterRef.current = motionBlurFilter;

Expand Down Expand Up @@ -1543,6 +1547,7 @@ const VideoPlayback = forwardRef<VideoPlaybackRef, VideoPlaybackProps>(
const appliedTransform = applyZoomTransform({
cameraContainer,
blurFilter: blurFilterRef.current,
motionBlurFilter: motionBlurFilterRef.current,
stageSize: stageSizeRef.current,
baseMask: baseMaskRef.current,
zoomScale: state.scale,
Expand All @@ -1553,7 +1558,6 @@ const VideoPlayback = forwardRef<VideoPlaybackRef, VideoPlaybackProps>(
motionVector,
isPlaying: isPlayingRef.current,
motionBlurAmount: zoomMotionBlurRef.current,
motionBlurFilter: motionBlurFilterRef.current,
transformOverride: transform,
motionBlurState: motionBlurStateRef.current,
frameTimeMs: performance.now(),
Expand Down Expand Up @@ -1738,6 +1742,21 @@ const VideoPlayback = forwardRef<VideoPlaybackRef, VideoPlaybackProps>(
motionIntensity,
motionVector,
);

// Conditionally attach motion blur filter only when the camera is
// actually moving. When filters are attached, PixiJS routes the video
// through an intermediate RenderTexture at renderer resolution, which
// downsamples the native video and degrades preview quality.
// Hysteresis prevents flickering when motionIntensity oscillates near threshold.
const filtersActive = Array.isArray(videoContainer.filters) && videoContainer.filters.length > 0;
const cameraIsMoving = filtersActive ? motionIntensity > 0.002 : motionIntensity > 0.008;
const needsFilters = zoomMotionBlurRef.current > 0 && isPlayingRef.current && cameraIsMoving;
if (needsFilters && !filtersActive && motionBlurFilterRef.current) {
videoContainer.filters = [motionBlurFilterRef.current];
} else if (!needsFilters && filtersActive) {
videoContainer.filters = null;
}

applyWebcamBubbleLayout(animationStateRef.current.appliedScale || 1);

const timeMs = currentTimeRef.current;
Expand Down
44 changes: 43 additions & 1 deletion src/hooks/useScreenRecorder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@ const WEBCAM_WIDTH = 1280;
const WEBCAM_HEIGHT = 720;
const WEBCAM_FRAME_RATE = 30;
const WEBCAM_SUFFIX = "-webcam";
const SOURCE_AUDIO_MUX_TOAST_ID = "recording-audio-mux-warning";
const MICROPHONE_FALLBACK_TOAST_ID = "recording-microphone-fallback";
const MICROPHONE_FALLBACK_ERROR_TOAST_ID = "recording-microphone-fallback-error";
const MICROPHONE_SIDECAR_ERROR_TOAST_ID = "recording-microphone-sidecar-error";
const LINUX_PORTAL_SOURCE: ProcessedDesktopSource = {
id: "screen:linux-portal",
name: "Linux Portal",
Expand Down Expand Up @@ -76,6 +80,14 @@ type UseScreenRecorderReturn = {
setCountdownDelay: (delay: number) => void;
};

function getErrorMessage(error: unknown) {
if (error instanceof Error && error.message) {
return error.message;
}

return String(error);
}

export function useScreenRecorder(): UseScreenRecorderReturn {
const [recording, setRecording] = useState(false);
const [paused, setPaused] = useState(false);
Expand Down Expand Up @@ -443,9 +455,22 @@ export function useScreenRecorder(): UseScreenRecorderReturn {

try {
const arrayBuffer = await micFallbackBlob.arrayBuffer();
await window.electronAPI.storeMicrophoneSidecar(arrayBuffer, finalPath);
const result = await window.electronAPI.storeMicrophoneSidecar(arrayBuffer, finalPath);
if (!result.success) {
const errorMessage =
result.error || "Failed to save the fallback microphone audio track";
console.warn("Failed to store microphone sidecar:", errorMessage);
toast.error(
`${errorMessage}. Recording was saved without the fallback microphone track.`,
{ id: MICROPHONE_SIDECAR_ERROR_TOAST_ID, duration: 10000 },
);
}
} catch (error) {
console.warn("Failed to store microphone sidecar:", error);
toast.error(
`${getErrorMessage(error)}. Recording was saved without the fallback microphone track.`,
{ id: MICROPHONE_SIDECAR_ERROR_TOAST_ID, duration: 10000 },
);
}
},
[],
Expand Down Expand Up @@ -676,6 +701,14 @@ export function useScreenRecorder(): UseScreenRecorderReturn {
await window.electronAPI.muxNativeWindowsRecording(pauseSegments);
if (!muxResult?.success) {
void logNativeCaptureDiagnostics("mux-native-windows-recording");
const warningMessage =
muxResult?.error ||
muxResult?.message ||
"Failed to finish the native Windows audio mux";
toast.warning(
`${warningMessage}. Recording was saved, but audio playback or export may be incomplete.`,
{ id: SOURCE_AUDIO_MUX_TOAST_ID, duration: 10000 },
);
}
finalPath = muxResult?.path ?? result.path;
}
Expand Down Expand Up @@ -967,6 +1000,11 @@ export function useScreenRecorder(): UseScreenRecorderReturn {
// When native mic capture is unavailable (macOS < 14), record mic
// via browser getUserMedia so it can be saved as a sidecar file.
if (nativeResult.microphoneFallbackRequired && microphoneEnabled) {
void logNativeCaptureDiagnostics("start-browser-microphone-fallback");
toast.warning(
"Native microphone capture is unavailable. Using browser microphone fallback for this recording.",
{ id: MICROPHONE_FALLBACK_TOAST_ID, duration: 8000 },
);
try {
const micStream = await navigator.mediaDevices.getUserMedia({
audio: microphoneDeviceId
Expand Down Expand Up @@ -996,6 +1034,10 @@ export function useScreenRecorder(): UseScreenRecorderReturn {
micFallbackRecorder.current = recorder;
} catch (micError) {
console.warn("Browser microphone fallback failed:", micError);
toast.error(
`${getErrorMessage(micError)}. Recording will continue without microphone audio.`,
{ id: MICROPHONE_FALLBACK_ERROR_TOAST_ID, duration: 10000 },
);
}
}

Expand Down
Loading