Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 80 additions & 0 deletions packages/producer/src/services/render/stages/assembleStage.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
/**
* assembleStage — Stage 6 of `executeRenderJob`. Final mux + faststart.
*
* Skipped entirely for png-sequence (there's no container to mux; the
* frames were copied directly to `outputPath` by `encodeStage`).
*
* When the composition has audio, runs `muxVideoWithAudio(videoOnlyPath,
* audioOutputPath, outputPath)`. When it doesn't, runs
* `applyFaststart(videoOnlyPath, outputPath)` to move the `moov` atom to
* the front so the file plays from a partial download.
*
* Hard constraints preserved verbatim:
* - The "Assembling final video" `updateJobStatus` payload fires at
* 90% at the start of the stage.
* - "Audio muxing failed: <err>" / "Faststart failed: <err>" throw
* verbatim on the respective `success: false` results.
*/

import { applyFaststart, muxVideoWithAudio } from "@hyperframes/engine";
import {
updateJobStatus,
type ProgressCallback,
type RenderJob,
} from "../../renderOrchestrator.js";

export interface AssembleStageInput {
job: RenderJob;
/** Encoded video produced by `encodeStage` or `captureStreamingStage`. */
videoOnlyPath: string;
/** Mixed audio path (only read when `hasAudio` is true). */
audioOutputPath: string;
/** Final on-disk output. */
outputPath: string;
hasAudio: boolean;
abortSignal: AbortSignal | undefined;
assertNotAborted: () => void;
onProgress?: ProgressCallback;
}

export interface AssembleStageResult {
/** Wall-clock ms for the assemble phase. */
assembleMs: number;
}

export async function runAssembleStage(input: AssembleStageInput): Promise<AssembleStageResult> {
const {
job,
videoOnlyPath,
audioOutputPath,
outputPath,
hasAudio,
abortSignal,
assertNotAborted,
onProgress,
} = input;

const stage6Start = Date.now();
updateJobStatus(job, "assembling", "Assembling final video", 90, onProgress);

if (hasAudio) {
const muxResult = await muxVideoWithAudio(
videoOnlyPath,
audioOutputPath,
outputPath,
abortSignal,
);
assertNotAborted();
if (!muxResult.success) {
throw new Error(`Audio muxing failed: ${muxResult.error}`);
}
} else {
const faststartResult = await applyFaststart(videoOnlyPath, outputPath, abortSignal);
assertNotAborted();
if (!faststartResult.success) {
throw new Error(`Faststart failed: ${faststartResult.error}`);
}
}

return { assembleMs: Date.now() - stage6Start };
}
174 changes: 174 additions & 0 deletions packages/producer/src/services/render/stages/encodeStage.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
/**
* encodeStage — Stage 5 of `executeRenderJob`. Two paths share the stage:
*
* 1. png-sequence: no encoder. Captured PNGs are renamed to
* `frame_NNNNNN.png` and copied to `outputPath`. Audio (if any) is
* written as an `audio.aac` sidecar.
* 2. mp4 / webm / mov: invokes `encodeFramesFromDir` (or the chunked-
* concat variant when `enableChunkedEncode` is on) to produce
* `videoOnlyPath`. The mux + faststart pass lives in `assembleStage`.
*
* Skipped entirely when the streaming-encode fusion path
* (`captureStreamingStage`) already produced `videoOnlyPath` — the
* sequencer gates the call on `!streamingHandled`.
*
* Hard constraints preserved verbatim:
* - The "Writing PNG sequence" / "Encoding video" `updateJobStatus`
* payload fires at 75% from inside the stage.
* - The png-sequence path throws "png-sequence output requested but no
* PNGs were captured to ..." if `framesDir` is empty.
* - The png-sequence audio sidecar is only written when
* `hasAudio && existsSync(audioOutputPath)`.
* - For encoded output, `enableChunkedEncode` selects
* `encodeFramesChunkedConcat` vs `encodeFramesFromDir` — same
* branch + same args.
* - `Encoding failed: <err>` throws on the encoder's
* `success: false`.
*/

import { copyFileSync, existsSync, mkdirSync, readdirSync } from "node:fs";
import { join } from "node:path";
import {
encodeFramesChunkedConcat,
encodeFramesFromDir,
getEncoderPreset,
} from "@hyperframes/engine";
import type { Fps } from "@hyperframes/core";
import type { ProducerLogger } from "../../../logger.js";
import {
updateJobStatus,
type ProgressCallback,
type RenderJob,
} from "../../renderOrchestrator.js";

export interface EncodeStageInput {
job: RenderJob;
log: ProducerLogger;
/** Output path: a directory for png-sequence, a file for everything else. */
outputPath: string;
/** Where captured frames live on disk. */
framesDir: string;
/** Encoded video output (ignored on the png-sequence path). */
videoOnlyPath: string;
/** Output dimensions (post-deviceScaleFactor). */
width: number;
height: number;
fps: Fps;
/** True when the output format requires an alpha channel; selects frame extension. */
needsAlpha: boolean;
/** True iff the composition has audio. Drives the sidecar copy. */
hasAudio: boolean;
/** Path to the mixed audio (only read when `hasAudio` is true). */
audioOutputPath: string;
/** Mp4 vs png-sequence vs … gates the entire stage branch. */
isPngSequence: boolean;
/** Encoder preset (codec, preset, pixelFormat, hdr). Only used on the non-png path. */
preset: ReturnType<typeof getEncoderPreset>;
effectiveQuality: number;
effectiveBitrate: string | undefined;
useGpu: boolean | undefined;
/** Producer config — enables the chunked-concat encoder when on. */
enableChunkedEncode: boolean;
chunkedEncodeSize: number;
abortSignal: AbortSignal | undefined;
assertNotAborted: () => void;
onProgress?: ProgressCallback;
}

export interface EncodeStageResult {
/** Wall-clock ms for the encode (or png-copy) phase. */
encodeMs: number;
}

export async function runEncodeStage(input: EncodeStageInput): Promise<EncodeStageResult> {
const {
job,
log,
outputPath,
framesDir,
videoOnlyPath,
width,
height,
fps,
needsAlpha,
hasAudio,
audioOutputPath,
isPngSequence,
preset,
effectiveQuality,
effectiveBitrate,
useGpu,
enableChunkedEncode,
chunkedEncodeSize,
abortSignal,
assertNotAborted,
onProgress,
} = input;

const stage5Start = Date.now();

if (isPngSequence) {
// ── Stage 5 (png-sequence): copy captured PNGs to outputDir ──────
// No encoder, no mux, no faststart — captured frames already carry
// alpha and are the deliverable. We rename to `frame_NNNNNN.png`
// (zero-padded) so consumers (After Effects, Nuke, Fusion, ffmpeg
// image2 demuxer) can globbed-import without surprises.
updateJobStatus(job, "encoding", "Writing PNG sequence", 75, onProgress);
if (!existsSync(outputPath)) mkdirSync(outputPath, { recursive: true });
const captured = readdirSync(framesDir)
.filter((name) => name.endsWith(".png"))
.sort();
if (captured.length === 0) {
throw new Error(
`[Render] png-sequence output requested but no PNGs were captured to ${framesDir}`,
);
}
captured.forEach((name, i) => {
const dst = join(outputPath, `frame_${String(i + 1).padStart(6, "0")}.png`);
copyFileSync(join(framesDir, name), dst);
});
if (hasAudio && existsSync(audioOutputPath)) {
// Sidecar audio for callers that need to re-mux later. png-sequence
// has no container of its own, so this is the only place audio
// can land alongside the frames.
copyFileSync(audioOutputPath, join(outputPath, "audio.aac"));
log.info(`[Render] png-sequence: audio.aac sidecar written to ${outputPath}/audio.aac`);
}
return { encodeMs: Date.now() - stage5Start };
}

// ── Stage 5: Encode ───────────────────────────────────────────────
updateJobStatus(job, "encoding", "Encoding video", 75, onProgress);

const frameExt = needsAlpha ? "png" : "jpg";
const framePattern = `frame_%06d.${frameExt}`;
const encoderOpts = {
fps,
width,
height,
codec: preset.codec,
preset: preset.preset,
quality: effectiveQuality,
bitrate: effectiveBitrate,
pixelFormat: preset.pixelFormat,
useGpu,
hdr: preset.hdr,
};
const encodeResult = enableChunkedEncode
? await encodeFramesChunkedConcat(
framesDir,
framePattern,
videoOnlyPath,
encoderOpts,
chunkedEncodeSize,
abortSignal,
)
: await encodeFramesFromDir(framesDir, framePattern, videoOnlyPath, encoderOpts, abortSignal);
assertNotAborted();

if (!encodeResult.success) {
throw new Error(`Encoding failed: ${encodeResult.error}`);
}

return { encodeMs: Date.now() - stage5Start };
}
Loading
Loading