Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ dbg_macro = "deny"
let_underscore_future = "deny"
unchecked_duration_subtraction = "deny"
collapsible_if = "deny"
manual_is_multiple_of = "deny"
clone_on_copy = "deny"
redundant_closure = "deny"
ptr_arg = "deny"
Expand Down
Binary file added apps/desktop/core
Binary file not shown.
64 changes: 52 additions & 12 deletions apps/desktop/src/routes/editor/Editor.tsx
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Button } from "@cap/ui-solid";
import { NumberField } from "@kobalte/core/number-field";
import { trackDeep } from "@solid-primitives/deep";
import { throttle } from "@solid-primitives/scheduled";
import { debounce, throttle } from "@solid-primitives/scheduled";
import { makePersisted } from "@solid-primitives/storage";
import { createMutation } from "@tanstack/solid-query";
import { convertFileSrc } from "@tauri-apps/api/core";
Expand All @@ -13,6 +13,7 @@ import {
createSignal,
Match,
on,
onCleanup,
Show,
Switch,
} from "solid-js";
Expand Down Expand Up @@ -85,19 +86,49 @@ function Inner() {
const { project, editorState, setEditorState } = useEditorContext();

createTauriEventListener(events.editorStateChanged, (payload) => {
renderFrame.clear();
renderFrameThrottled.clear();
setEditorState("playbackTime", payload.playhead_position / FPS);
});

const renderFrame = throttle((time: number) => {
if (!editorState.playing) {
events.renderFrameEvent.emit({
frame_number: Math.max(Math.floor(time * FPS), 0),
fps: FPS,
resolution_base: OUTPUT_SIZE,
});
let rafId: number | null = null;
let pendingFrameTime: number | null = null;

const emitFrame = (time: number) => {
events.renderFrameEvent.emit({
frame_number: Math.max(Math.floor(time * FPS), 0),
fps: FPS,
resolution_base: OUTPUT_SIZE,
});
};

const renderFrameThrottled = throttle((time: number) => {
if (editorState.playing) return;

if (rafId !== null) {
pendingFrameTime = time;
return;
}

rafId = requestAnimationFrame(() => {
rafId = null;
const frameTime = pendingFrameTime ?? time;
pendingFrameTime = null;
emitFrame(frameTime);
});
}, 1000 / 30);

const renderFrameDebounced = debounce((time: number) => {
if (editorState.playing) return;
emitFrame(time);
}, 50);

onCleanup(() => {
if (rafId !== null) {
cancelAnimationFrame(rafId);
}
}, 1000 / FPS);
renderFrameThrottled.clear();
renderFrameDebounced.clear();
});

const frameNumberToRender = createMemo(() => {
const preview = editorState.previewTime;
Expand All @@ -108,14 +139,23 @@ function Inner() {
createEffect(
on(frameNumberToRender, (number) => {
if (editorState.playing) return;
renderFrame(number);
renderFrameThrottled(number);
}),
);

let lastProjectUpdateTime = 0;
createEffect(
on(
() => trackDeep(project),
() => renderFrame(editorState.playbackTime),
() => {
const now = performance.now();
if (now - lastProjectUpdateTime < 100) {
renderFrameDebounced(editorState.playbackTime);
} else {
renderFrameThrottled(editorState.playbackTime);
}
lastProjectUpdateTime = now;
},
),
);

Expand Down
97 changes: 58 additions & 39 deletions apps/desktop/src/routes/editor/Timeline/ClipTrack.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
createRoot,
createSignal,
For,
Index,
Match,
onCleanup,
Show,
Expand Down Expand Up @@ -188,7 +189,7 @@ export function ClipTrack(
systemAudioWaveforms,
} = useEditorContext();

const { secsPerPixel, duration } = useTimelineContext();
const { secsPerPixel, duration, isSegmentVisible } = useTimelineContext();

const segments = (): Array<TimelineSegment> =>
project.timeline?.segments ?? [{ start: 0, end: duration(), timescale: 1 }];
Expand All @@ -204,6 +205,21 @@ export function ClipTrack(
return offsets;
});

const visibleSegmentIndices = createMemo(() => {
const segs = segments();
const offsets = segmentOffsets();
const visible: number[] = [];
for (let i = 0; i < segs.length; i++) {
const seg = segs[i];
const segStart = offsets[i];
const segEnd = segStart + (seg.end - seg.start) / seg.timescale;
if (isSegmentVisible(segStart, segEnd)) {
visible.push(i);
}
}
return visible;
});

function onHandleReleased() {
const { transform } = editorState.timeline;

Expand All @@ -223,8 +239,10 @@ export function ClipTrack(
onMouseEnter={() => setEditorState("timeline", "hoveredTrack", "clip")}
onMouseLeave={() => setEditorState("timeline", "hoveredTrack", null)}
>
<For each={segments()}>
{(segment, i) => {
<Index each={visibleSegmentIndices()}>
{(segmentIndex) => {
const i = segmentIndex;
const segment = () => segments()[i()];
const [startHandleDrag, setStartHandleDrag] = createSignal<null | {
offset: number;
initialStart: number;
Expand All @@ -235,14 +253,15 @@ export function ClipTrack(
const relativeSegment = createMemo(() => {
const ds = startHandleDrag();
const offset = ds?.offset ?? 0;
const seg = segment();

return {
start: Math.max(prevDuration() + offset, 0),
end:
prevDuration() +
(offset + (segment.end - segment.start)) / segment.timescale,
timescale: segment.timescale,
recordingSegment: segment.recordingSegment,
(offset + (seg.end - seg.start)) / seg.timescale,
timescale: seg.timescale,
recordingSegment: seg.recordingSegment,
};
});

Expand All @@ -269,9 +288,10 @@ export function ClipTrack(
const isSelected = createMemo(() => {
const selection = editorState.timeline.selection;
if (!selection || selection.type !== "clip") return false;
const seg = segment();

const segmentIndex = project.timeline?.segments?.findIndex(
(s) => s.start === segment.start && s.end === segment.end,
(s) => s.start === seg.start && s.end === seg.end,
);

if (segmentIndex === undefined || segmentIndex === -1) return false;
Expand All @@ -283,7 +303,7 @@ export function ClipTrack(
if (project.audio.micVolumeDb && project.audio.micVolumeDb < -30)
return;

const idx = segment.recordingSegment ?? i();
const idx = segment().recordingSegment ?? i();
return micWaveforms()?.[idx] ?? [];
};

Expand All @@ -294,7 +314,7 @@ export function ClipTrack(
)
return;

const idx = segment.recordingSegment ?? i();
const idx = segment().recordingSegment ?? i();
return systemAudioWaveforms()?.[idx] ?? [];
};

Expand Down Expand Up @@ -401,8 +421,9 @@ export function ClipTrack(
if (editorState.timeline.interactMode === "split") {
const rect = e.currentTarget.getBoundingClientRect();
const fraction = (e.clientX - rect.left) / rect.width;
const seg = segment();

const splitTime = fraction * (segment.end - segment.start);
const splitTime = fraction * (seg.end - seg.start);

projectActions.splitClipSegment(prevDuration() + splitTime);
} else {
Expand Down Expand Up @@ -486,41 +507,41 @@ export function ClipTrack(
}
}}
>
{segment.timescale === 1 && (
{segment().timescale === 1 && (
<WaveformCanvas
micWaveform={micWaveform()}
systemWaveform={systemAudioWaveform()}
segment={segment}
segment={segment()}
/>
)}

<Markings segment={segment} prevDuration={prevDuration()} />
<Markings segment={segment()} prevDuration={prevDuration()} />

<SegmentHandle
position="start"
class="opacity-0 group-hover:opacity-100"
onMouseDown={(downEvent) => {
if (split()) return;
const seg = segment();

const initialStart = segment.start;
const initialStart = seg.start;
setStartHandleDrag({
offset: 0,
initialStart,
});

const maxSegmentDuration =
editorInstance.recordings.segments[
segment.recordingSegment ?? 0
seg.recordingSegment ?? 0
].display.duration;

const availableTimelineDuration =
editorInstance.recordingDuration -
segments().reduce(
(acc, segment, segmentI) =>
(acc, s, segmentI) =>
segmentI === i()
? acc
: acc +
(segment.end - segment.start) / segment.timescale,
: acc + (s.end - s.start) / s.timescale,
0,
);

Expand All @@ -532,24 +553,23 @@ export function ClipTrack(
const prevSegment = segments()[i() - 1];
const prevSegmentIsSameClip =
prevSegment?.recordingSegment !== undefined
? prevSegment.recordingSegment ===
segment.recordingSegment
? prevSegment.recordingSegment === seg.recordingSegment
: false;

function update(event: MouseEvent) {
const newStart =
initialStart +
(event.clientX - downEvent.clientX) *
secsPerPixel() *
segment.timescale;
seg.timescale;

const clampedStart = Math.min(
Math.max(
newStart,
prevSegmentIsSameClip ? prevSegment.end : 0,
segment.end - maxDuration,
seg.end - maxDuration,
),
segment.end - 1,
seg.end - 1,
);

setStartHandleDrag({
Expand Down Expand Up @@ -590,22 +610,23 @@ export function ClipTrack(
<SegmentContent class="relative justify-center items-center">
{(() => {
const ctx = useSegmentContext();
const seg = segment();

return (
<Show when={ctx.width() > 100}>
<div class="flex flex-col gap-1 justify-center items-center text-xs whitespace-nowrap text-gray-12">
<span class="text-white/70">
{hasMultipleRecordingSegments()
? `Clip ${segment.recordingSegment}`
? `Clip ${seg.recordingSegment}`
: "Clip"}
</span>
<div class="flex gap-1 items-center text-md dark:text-gray-12 text-gray-1">
<IconLucideClock class="size-3.5" />{" "}
{formatTime(segment.end - segment.start)}
<Show when={segment.timescale !== 1}>
{formatTime(seg.end - seg.start)}
<Show when={seg.timescale !== 1}>
<div class="w-0.5" />
<IconLucideFastForward class="size-3" />
{segment.timescale}x
{seg.timescale}x
</Show>
</div>
</div>
Expand All @@ -617,37 +638,36 @@ export function ClipTrack(
position="end"
class="opacity-0 group-hover:opacity-100"
onMouseDown={(downEvent) => {
const end = segment.end;
const seg = segment();
const end = seg.end;

if (split()) return;
const maxSegmentDuration =
editorInstance.recordings.segments[
segment.recordingSegment ?? 0
seg.recordingSegment ?? 0
].display.duration;

const availableTimelineDuration =
editorInstance.recordingDuration -
segments().reduce(
(acc, segment, segmentI) =>
(acc, s, segmentI) =>
segmentI === i()
? acc
: acc +
(segment.end - segment.start) / segment.timescale,
: acc + (s.end - s.start) / s.timescale,
0,
);

const nextSegment = segments()[i() + 1];
const nextSegmentIsSameClip =
nextSegment?.recordingSegment !== undefined
? nextSegment.recordingSegment ===
segment.recordingSegment
? nextSegment.recordingSegment === seg.recordingSegment
: false;

function update(event: MouseEvent) {
const deltaRecorded =
(event.clientX - downEvent.clientX) *
secsPerPixel() *
segment.timescale;
seg.timescale;
const newEnd = end + deltaRecorded;

setProject(
Expand All @@ -658,13 +678,12 @@ export function ClipTrack(
Math.max(
Math.min(
newEnd,
// availableTimelineDuration is in timeline seconds; convert to recorded seconds
end + availableTimelineDuration * segment.timescale,
end + availableTimelineDuration * seg.timescale,
nextSegmentIsSameClip
? nextSegment.start
: maxSegmentDuration,
),
segment.start + 1,
seg.start + 1,
),
);
}
Expand Down Expand Up @@ -737,7 +756,7 @@ export function ClipTrack(
</>
);
}}
</For>
</Index>
</TrackRoot>
);
}
Expand Down
Loading