diff --git a/packages/producer/src/services/render/stages/assembleStage.ts b/packages/producer/src/services/render/stages/assembleStage.ts new file mode 100644 index 000000000..0697c5322 --- /dev/null +++ b/packages/producer/src/services/render/stages/assembleStage.ts @@ -0,0 +1,80 @@ +/** + * assembleStage — Stage 6 of `executeRenderJob`. Final mux + faststart. + * + * Skipped entirely for png-sequence (there's no container to mux; the + * frames were copied directly to `outputPath` by `encodeStage`). + * + * When the composition has audio, runs `muxVideoWithAudio(videoOnlyPath, + * audioOutputPath, outputPath)`. When it doesn't, runs + * `applyFaststart(videoOnlyPath, outputPath)` to move the `moov` atom to + * the front so the file plays from a partial download. + * + * Hard constraints preserved verbatim: + * - The "Assembling final video" `updateJobStatus` payload fires at + * 90% at the start of the stage. + * - "Audio muxing failed: " / "Faststart failed: " throw + * verbatim on the respective `success: false` results. + */ + +import { applyFaststart, muxVideoWithAudio } from "@hyperframes/engine"; +import { + updateJobStatus, + type ProgressCallback, + type RenderJob, +} from "../../renderOrchestrator.js"; + +export interface AssembleStageInput { + job: RenderJob; + /** Encoded video produced by `encodeStage` or `captureStreamingStage`. */ + videoOnlyPath: string; + /** Mixed audio path (only read when `hasAudio` is true). */ + audioOutputPath: string; + /** Final on-disk output. */ + outputPath: string; + hasAudio: boolean; + abortSignal: AbortSignal | undefined; + assertNotAborted: () => void; + onProgress?: ProgressCallback; +} + +export interface AssembleStageResult { + /** Wall-clock ms for the assemble phase. */ + assembleMs: number; +} + +export async function runAssembleStage(input: AssembleStageInput): Promise { + const { + job, + videoOnlyPath, + audioOutputPath, + outputPath, + hasAudio, + abortSignal, + assertNotAborted, + onProgress, + } = input; + + const stage6Start = Date.now(); + updateJobStatus(job, "assembling", "Assembling final video", 90, onProgress); + + if (hasAudio) { + const muxResult = await muxVideoWithAudio( + videoOnlyPath, + audioOutputPath, + outputPath, + abortSignal, + ); + assertNotAborted(); + if (!muxResult.success) { + throw new Error(`Audio muxing failed: ${muxResult.error}`); + } + } else { + const faststartResult = await applyFaststart(videoOnlyPath, outputPath, abortSignal); + assertNotAborted(); + if (!faststartResult.success) { + throw new Error(`Faststart failed: ${faststartResult.error}`); + } + } + + return { assembleMs: Date.now() - stage6Start }; +} diff --git a/packages/producer/src/services/render/stages/encodeStage.ts b/packages/producer/src/services/render/stages/encodeStage.ts new file mode 100644 index 000000000..8e883103a --- /dev/null +++ b/packages/producer/src/services/render/stages/encodeStage.ts @@ -0,0 +1,174 @@ +/** + * encodeStage — Stage 5 of `executeRenderJob`. Two paths share the stage: + * + * 1. png-sequence: no encoder. Captured PNGs are renamed to + * `frame_NNNNNN.png` and copied to `outputPath`. Audio (if any) is + * written as an `audio.aac` sidecar. + * 2. mp4 / webm / mov: invokes `encodeFramesFromDir` (or the chunked- + * concat variant when `enableChunkedEncode` is on) to produce + * `videoOnlyPath`. The mux + faststart pass lives in `assembleStage`. + * + * Skipped entirely when the streaming-encode fusion path + * (`captureStreamingStage`) already produced `videoOnlyPath` — the + * sequencer gates the call on `!streamingHandled`. + * + * Hard constraints preserved verbatim: + * - The "Writing PNG sequence" / "Encoding video" `updateJobStatus` + * payload fires at 75% from inside the stage. + * - The png-sequence path throws "png-sequence output requested but no + * PNGs were captured to ..." if `framesDir` is empty. + * - The png-sequence audio sidecar is only written when + * `hasAudio && existsSync(audioOutputPath)`. + * - For encoded output, `enableChunkedEncode` selects + * `encodeFramesChunkedConcat` vs `encodeFramesFromDir` — same + * branch + same args. + * - `Encoding failed: ` throws on the encoder's + * `success: false`. + */ + +import { copyFileSync, existsSync, mkdirSync, readdirSync } from "node:fs"; +import { join } from "node:path"; +import { + encodeFramesChunkedConcat, + encodeFramesFromDir, + getEncoderPreset, +} from "@hyperframes/engine"; +import type { Fps } from "@hyperframes/core"; +import type { ProducerLogger } from "../../../logger.js"; +import { + updateJobStatus, + type ProgressCallback, + type RenderJob, +} from "../../renderOrchestrator.js"; + +export interface EncodeStageInput { + job: RenderJob; + log: ProducerLogger; + /** Output path: a directory for png-sequence, a file for everything else. */ + outputPath: string; + /** Where captured frames live on disk. */ + framesDir: string; + /** Encoded video output (ignored on the png-sequence path). */ + videoOnlyPath: string; + /** Output dimensions (post-deviceScaleFactor). */ + width: number; + height: number; + fps: Fps; + /** True when the output format requires an alpha channel; selects frame extension. */ + needsAlpha: boolean; + /** True iff the composition has audio. Drives the sidecar copy. */ + hasAudio: boolean; + /** Path to the mixed audio (only read when `hasAudio` is true). */ + audioOutputPath: string; + /** Mp4 vs png-sequence vs … gates the entire stage branch. */ + isPngSequence: boolean; + /** Encoder preset (codec, preset, pixelFormat, hdr). Only used on the non-png path. */ + preset: ReturnType; + effectiveQuality: number; + effectiveBitrate: string | undefined; + useGpu: boolean | undefined; + /** Producer config — enables the chunked-concat encoder when on. */ + enableChunkedEncode: boolean; + chunkedEncodeSize: number; + abortSignal: AbortSignal | undefined; + assertNotAborted: () => void; + onProgress?: ProgressCallback; +} + +export interface EncodeStageResult { + /** Wall-clock ms for the encode (or png-copy) phase. */ + encodeMs: number; +} + +export async function runEncodeStage(input: EncodeStageInput): Promise { + const { + job, + log, + outputPath, + framesDir, + videoOnlyPath, + width, + height, + fps, + needsAlpha, + hasAudio, + audioOutputPath, + isPngSequence, + preset, + effectiveQuality, + effectiveBitrate, + useGpu, + enableChunkedEncode, + chunkedEncodeSize, + abortSignal, + assertNotAborted, + onProgress, + } = input; + + const stage5Start = Date.now(); + + if (isPngSequence) { + // ── Stage 5 (png-sequence): copy captured PNGs to outputDir ────── + // No encoder, no mux, no faststart — captured frames already carry + // alpha and are the deliverable. We rename to `frame_NNNNNN.png` + // (zero-padded) so consumers (After Effects, Nuke, Fusion, ffmpeg + // image2 demuxer) can globbed-import without surprises. + updateJobStatus(job, "encoding", "Writing PNG sequence", 75, onProgress); + if (!existsSync(outputPath)) mkdirSync(outputPath, { recursive: true }); + const captured = readdirSync(framesDir) + .filter((name) => name.endsWith(".png")) + .sort(); + if (captured.length === 0) { + throw new Error( + `[Render] png-sequence output requested but no PNGs were captured to ${framesDir}`, + ); + } + captured.forEach((name, i) => { + const dst = join(outputPath, `frame_${String(i + 1).padStart(6, "0")}.png`); + copyFileSync(join(framesDir, name), dst); + }); + if (hasAudio && existsSync(audioOutputPath)) { + // Sidecar audio for callers that need to re-mux later. png-sequence + // has no container of its own, so this is the only place audio + // can land alongside the frames. + copyFileSync(audioOutputPath, join(outputPath, "audio.aac")); + log.info(`[Render] png-sequence: audio.aac sidecar written to ${outputPath}/audio.aac`); + } + return { encodeMs: Date.now() - stage5Start }; + } + + // ── Stage 5: Encode ─────────────────────────────────────────────── + updateJobStatus(job, "encoding", "Encoding video", 75, onProgress); + + const frameExt = needsAlpha ? "png" : "jpg"; + const framePattern = `frame_%06d.${frameExt}`; + const encoderOpts = { + fps, + width, + height, + codec: preset.codec, + preset: preset.preset, + quality: effectiveQuality, + bitrate: effectiveBitrate, + pixelFormat: preset.pixelFormat, + useGpu, + hdr: preset.hdr, + }; + const encodeResult = enableChunkedEncode + ? await encodeFramesChunkedConcat( + framesDir, + framePattern, + videoOnlyPath, + encoderOpts, + chunkedEncodeSize, + abortSignal, + ) + : await encodeFramesFromDir(framesDir, framePattern, videoOnlyPath, encoderOpts, abortSignal); + assertNotAborted(); + + if (!encodeResult.success) { + throw new Error(`Encoding failed: ${encodeResult.error}`); + } + + return { encodeMs: Date.now() - stage5Start }; +} diff --git a/packages/producer/src/services/renderOrchestrator.ts b/packages/producer/src/services/renderOrchestrator.ts index bbe9f538e..ccdffa765 100644 --- a/packages/producer/src/services/renderOrchestrator.ts +++ b/packages/producer/src/services/renderOrchestrator.ts @@ -45,10 +45,6 @@ import { type CaptureSession, type BeforeCaptureHook, createVideoFrameInjector, - encodeFramesFromDir, - encodeFramesChunkedConcat, - muxVideoWithAudio, - applyFaststart, getEncoderPreset, calculateOptimalWorkers, distributeFrames, @@ -86,6 +82,8 @@ import { runAudioStage } from "./render/stages/audioStage.js"; import { runCaptureStage } from "./render/stages/captureStage.js"; import { runCaptureStreamingStage } from "./render/stages/captureStreamingStage.js"; import { runCaptureHdrStage } from "./render/stages/captureHdrStage.js"; +import { runEncodeStage } from "./render/stages/encodeStage.js"; +import { runAssembleStage } from "./render/stages/assembleStage.js"; /** * Wrap a cleanup operation so it never throws, but logs any failure. @@ -2436,78 +2434,30 @@ export async function executeRenderJob( perfStages.captureMs = Date.now() - stage4Start; - if (isPngSequence) { - // ── Stage 5 (png-sequence): copy captured PNGs to outputDir ────── - // No encoder, no mux, no faststart — captured frames already carry - // alpha and are the deliverable. We rename to `frame_NNNNNN.png` - // (zero-padded) so consumers (After Effects, Nuke, Fusion, ffmpeg - // image2 demuxer) can globbed-import without surprises. - const stage5Start = Date.now(); - updateJobStatus(job, "encoding", "Writing PNG sequence", 75, onProgress); - if (!existsSync(outputPath)) mkdirSync(outputPath, { recursive: true }); - const captured = readdirSync(framesDir) - .filter((name) => name.endsWith(".png")) - .sort(); - if (captured.length === 0) { - throw new Error( - `[Render] png-sequence output requested but no PNGs were captured to ${framesDir}`, - ); - } - captured.forEach((name, i) => { - const dst = join(outputPath, `frame_${String(i + 1).padStart(6, "0")}.png`); - copyFileSync(join(framesDir, name), dst); - }); - if (hasAudio && existsSync(audioOutputPath)) { - // Sidecar audio for callers that need to re-mux later. png-sequence - // has no container of its own, so this is the only place audio - // can land alongside the frames. - copyFileSync(audioOutputPath, join(outputPath, "audio.aac")); - log.info(`[Render] png-sequence: audio.aac sidecar written to ${outputPath}/audio.aac`); - } - perfStages.encodeMs = Date.now() - stage5Start; - } else { - // ── Stage 5: Encode ─────────────────────────────────────────────── - const stage5Start = Date.now(); - updateJobStatus(job, "encoding", "Encoding video", 75, onProgress); - - const frameExt = needsAlpha ? "png" : "jpg"; - const framePattern = `frame_%06d.${frameExt}`; - const encoderOpts = { - fps: job.config.fps, - width, - height, - codec: preset.codec, - preset: preset.preset, - quality: effectiveQuality, - bitrate: effectiveBitrate, - pixelFormat: preset.pixelFormat, - useGpu: job.config.useGpu, - hdr: preset.hdr, - }; - const encodeResult = enableChunkedEncode - ? await encodeFramesChunkedConcat( - framesDir, - framePattern, - videoOnlyPath, - encoderOpts, - chunkedEncodeSize, - abortSignal, - ) - : await encodeFramesFromDir( - framesDir, - framePattern, - videoOnlyPath, - encoderOpts, - abortSignal, - ); - assertNotAborted(); - - if (!encodeResult.success) { - throw new Error(`Encoding failed: ${encodeResult.error}`); - } - - perfStages.encodeMs = Date.now() - stage5Start; - } + const encodeRes = await runEncodeStage({ + job, + log, + outputPath, + framesDir, + videoOnlyPath, + width, + height, + fps: job.config.fps, + needsAlpha, + hasAudio, + audioOutputPath, + isPngSequence, + preset, + effectiveQuality, + effectiveBitrate, + useGpu: job.config.useGpu, + enableChunkedEncode, + chunkedEncodeSize, + abortSignal, + assertNotAborted, + onProgress, + }); + perfStages.encodeMs = encodeRes.encodeMs; } } // end SDR capture paths block @@ -2528,29 +2478,17 @@ export async function executeRenderJob( // Skipped for png-sequence — there is no encoded video to mux/faststart. // The frames were copied directly to outputPath in Stage 5. if (!isPngSequence) { - const stage6Start = Date.now(); - updateJobStatus(job, "assembling", "Assembling final video", 90, onProgress); - - if (hasAudio) { - const muxResult = await muxVideoWithAudio( - videoOnlyPath, - audioOutputPath, - outputPath, - abortSignal, - ); - assertNotAborted(); - if (!muxResult.success) { - throw new Error(`Audio muxing failed: ${muxResult.error}`); - } - } else { - const faststartResult = await applyFaststart(videoOnlyPath, outputPath, abortSignal); - assertNotAborted(); - if (!faststartResult.success) { - throw new Error(`Faststart failed: ${faststartResult.error}`); - } - } - - perfStages.assembleMs = Date.now() - stage6Start; + const assembleRes = await runAssembleStage({ + job, + videoOnlyPath, + audioOutputPath, + outputPath, + hasAudio, + abortSignal, + assertNotAborted, + onProgress, + }); + perfStages.assembleMs = assembleRes.assembleMs; } // ── Complete ─────────────────────────────────────────────────────────