Skip to content

Commit

Permalink
Adds frame seeking hotkeys.
Browse files Browse the repository at this point in the history
Also, refactor the method for finding the right timestamp to start/end
at when a trim exists. The new method uses a binary search, so it should
be more efficient than a linear seek. I use this method to wrap around
in the frame seeking.

Tested using trimmed videos at different speeds, and everything works as
expected.

Updated Readme with new hotkeys
  • Loading branch information
DavidXenakis committed Jan 24, 2024
1 parent 05209e1 commit dc87f85
Show file tree
Hide file tree
Showing 2 changed files with 160 additions and 75 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -140,3 +140,7 @@ Enable the capturing of replays, adds the replay filters.
Disable the automatic next scene switching function.
* **Enable next scene**
Enable the automatic next scene switching function.
* **Previous frame**
Rewind the playback head one frame. Automatically pauses playback.
* **Next frame**
Advance the playback head one frame. Automatically pauses playback.
231 changes: 156 additions & 75 deletions replay-source.c
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include <obs-frontend-api.h>
#include <obs-scene.h>
#include "replay.h"
#include <inttypes.h>
#include <string.h>

#define VISIBILITY_ACTION_RESTART 0
Expand Down Expand Up @@ -93,6 +94,8 @@ struct replay_source {
obs_hotkey_id disable_next_scene_hotkey;
obs_hotkey_id next_scene_current_hotkey;
obs_hotkey_id next_scene_hotkey;
obs_hotkey_id next_frame_hotkey;
obs_hotkey_id prev_frame_hotkey;
uint64_t start_timestamp;
uint64_t previous_frame_timestamp;
uint64_t pause_timestamp;
Expand All @@ -107,6 +110,7 @@ struct replay_source {
bool restart;
bool active;
bool end;
bool stepped;
enum saving_status saving_status;

int replay_position;
Expand Down Expand Up @@ -553,23 +557,13 @@ static void replay_update_position(struct replay_source *context, bool lock)
context->start_timestamp = obs_get_video_frame_time();
context->backward = context->backward_start;
if (!context->backward && context->current_replay.trim_front != 0) {
if (context->speed_percent == 100.0f) {
context->start_timestamp -=
context->current_replay.trim_front;
} else {
context->start_timestamp -=
(uint64_t)(context->current_replay.trim_front *
100.0 / context->speed_percent);
}
context->start_timestamp -=
(uint64_t)(context->current_replay.trim_front *
100.0 / context->speed_percent);
} else if (context->backward && context->current_replay.trim_end != 0) {
if (context->speed_percent == 100.0f) {
context->start_timestamp -=
context->current_replay.trim_end;
} else {
context->start_timestamp -=
(uint64_t)(context->current_replay.trim_end *
100.0 / context->speed_percent);
}
context->start_timestamp -=
(uint64_t)(context->current_replay.trim_end *
100.0 / context->speed_percent);
}
context->pause_timestamp = 0;
if (context->backward && context->current_replay.video_frame_count) {
Expand Down Expand Up @@ -1386,6 +1380,40 @@ static void replay_retrieve(struct replay_source *context)
}
}

// Finds closest frame in the current replay given a desired timestamp.
uint64_t find_closest_frame(void *data, uint64_t ts, bool le) {
struct replay_source *c = data;
int64_t count = c->current_replay.video_frame_count;
struct obs_source_frame **frames = c->current_replay.video_frames;
if (ts < c->current_replay.first_frame_timestamp) {
return 0;
} else if (ts > c->current_replay.last_frame_timestamp) {
return count - 1;
}

int64_t i = 0, mid = 0, j = count;
while (i < j) {
mid = (i + j) / 2;
if (frames[mid]->timestamp == ts) {
return mid;
} else if (frames[mid]->timestamp > ts) {
if (mid > 0 && frames[mid-1]->timestamp < ts) {
return le ? mid - 1 : mid;
}
j = mid;
} else if (frames[mid]->timestamp < ts) {
if (mid < count - 1 && frames[mid+1]->timestamp > ts) {
return le ? mid : mid + 1;
}
i = mid + 1;
}
}
if (i == mid + 1) {
return le ? mid : mid + 1;
}
return le ? mid - 1 : mid;
}

static void replay_hotkey(void *data, obs_hotkey_id id, obs_hotkey_t *hotkey,
bool pressed)
{
Expand Down Expand Up @@ -1534,6 +1562,64 @@ static void replay_next_scene_hotkey(void *data, obs_hotkey_id id,
}
}

void replay_step_frames(void *data, bool pressed, bool forward, uint64_t num_frames) {
struct replay_source *c = data;
if (c->current_replay.video_frame_count && pressed) {
uint64_t next_pos = c->video_frame_position;
if (forward) {
// Check for wrap at end
if (next_pos + num_frames >= c->current_replay.video_frame_count ||
c->current_replay.video_frames[next_pos + num_frames]->timestamp >
c->current_replay.last_frame_timestamp - c->current_replay.trim_end) {
next_pos = find_closest_frame(c, c->current_replay.first_frame_timestamp + c->current_replay.trim_front, false);
} else {
next_pos += num_frames;
}

} else {
// Check for wrap at beginning
if (c->video_frame_position < num_frames ||
c->current_replay.video_frames[c->video_frame_position - num_frames]->timestamp <
c->current_replay.first_frame_timestamp + c->current_replay.trim_front) {
next_pos = find_closest_frame(c, c->current_replay.last_frame_timestamp - c->current_replay.trim_end, true);
} else {
next_pos -= num_frames;
}
}
if (c->play) {
c->play = false;
c->pause_timestamp = obs_get_video_frame_time();
obs_source_signal(c->source, "media_pause");

}
c->stepped = true;
int64_t prev_time = c->current_replay.video_frames[next_pos]->timestamp;
int64_t next_time = c->current_replay.video_frames[c->video_frame_position]->timestamp;
int64_t time_diff = (int64_t)((next_time - prev_time) * 100 / c->speed_percent);
if (c->backward) {
time_diff *= -1;
}
c->start_timestamp += time_diff;
c->video_frame_position = next_pos;
}
}

static void replay_next_frame_hotkey(void *data, obs_hotkey_id id,
obs_hotkey_t *hotkey, bool pressed)
{
UNUSED_PARAMETER(id);
UNUSED_PARAMETER(hotkey);
replay_step_frames(data, pressed, true, 1);
}

static void replay_prev_frame_hotkey(void *data, obs_hotkey_id id,
obs_hotkey_t *hotkey, bool pressed)
{
UNUSED_PARAMETER(id);
UNUSED_PARAMETER(hotkey);
replay_step_frames(data, pressed, false, 1);
}

static void replay_next_hotkey(void *data, obs_hotkey_id id,
obs_hotkey_t *hotkey, bool pressed)
{
Expand Down Expand Up @@ -2079,6 +2165,10 @@ static void replay_source_update(void *data, obs_data_t *settings)
true);
} else if (strcmp(execute_action, "SwitchToNextScene") == 0) {
replay_next_scene_hotkey(context, 0, NULL, true);
} else if (strcmp(execute_action, "NextFrame") == 0) {
replay_next_frame_hotkey(context, 0, NULL, true);
} else if (strcmp(execute_action, "PrevFrame") == 0) {
replay_prev_frame_hotkey(context, 0, NULL, true);
}
obs_data_erase(settings, SETTING_EXECUTE_ACTION);
}
Expand Down Expand Up @@ -2581,6 +2671,16 @@ static void *replay_source_create(obs_data_t *settings, obs_source_t *source)
obs_hotkey_register_source(source, "ReplaySource.NextScene",
obs_module_text("SwitchToNextScene"),
replay_next_scene_hotkey, context);

context->prev_frame_hotkey =
obs_hotkey_register_source(source, "ReplaySource.PrevFrame",
obs_module_text("PrevFrame"),
replay_prev_frame_hotkey, context);

context->next_frame_hotkey =
obs_hotkey_register_source(source, "ReplaySource.NextFrame",
obs_module_text("NextFrame"),
replay_next_frame_hotkey, context);

return context;
}
Expand Down Expand Up @@ -2993,6 +3093,17 @@ static void replay_source_tick(void *data, float seconds)
obs_source_output_video(context->source, f);
obs_source_frame_destroy(f);
}
if (context->stepped) {
context->stepped = false;
struct obs_source_frame *frame =
context->current_replay
.video_frames[context->video_frame_position];
uint64_t t = frame->timestamp;
frame->timestamp = os_timestamp;
obs_source_output_video(context->source, frame);
frame->timestamp = t;
replay_update_text(context);
}
pthread_mutex_unlock(&context->video_mutex);
return;
}
Expand All @@ -3004,9 +3115,10 @@ static void replay_source_tick(void *data, float seconds)

if (context->current_replay.video_frame_count) {
if (context->video_frame_position >=
context->current_replay.video_frame_count)
context->current_replay.video_frame_count) {
context->video_frame_position =
context->current_replay.video_frame_count - 1;
}
struct obs_source_frame *frame =
context->current_replay
.video_frames[context->video_frame_position];
Expand All @@ -3022,17 +3134,11 @@ static void replay_source_tick(void *data, float seconds)
context->start_timestamp = os_timestamp;
context->restart = false;
if (context->current_replay.trim_end != 0) {
if (context->speed_percent == 100.0f) {
context->start_timestamp -=
context->current_replay
.trim_end;
} else {
context->start_timestamp -=
(uint64_t)(context->current_replay
.trim_end *
100.0 /
context->speed_percent);
}
context->start_timestamp -=
(uint64_t)(context->current_replay
.trim_end *
100.0 /
context->speed_percent);

if (context->current_replay.trim_end <
0) {
Expand All @@ -3047,21 +3153,10 @@ static void replay_source_tick(void *data, float seconds)
&context->video_mutex);
return;
}
while (frame->timestamp >
context->current_replay
.last_frame_timestamp -
context->current_replay
.trim_end) {
if (context->video_frame_position ==
0) {
context->video_frame_position =
context->current_replay
.video_frame_count;
}
context->video_frame_position--;
frame = context->current_replay.video_frames
[context->video_frame_position];
}
uint64_t desired_ts = context->current_replay.last_frame_timestamp - context->current_replay.trim_end;
int64_t desired_frame_num = find_closest_frame(context, desired_ts, true);
frame = context->current_replay.video_frames
[desired_frame_num];
}
}

Expand All @@ -3075,7 +3170,7 @@ static void replay_source_tick(void *data, float seconds)
frame->timestamp) *
100.0 / context->speed_percent);

struct obs_source_frame *output_frame = NULL;
struct obs_source_frame *output_frame = frame;
while (context->play &&
video_duration >= source_duration) {
output_frame = frame;
Expand All @@ -3102,10 +3197,10 @@ static void replay_source_tick(void *data, float seconds)
100.0 /
context->speed_percent);
}
if (output_frame) {
replay_output_frame(context, output_frame);
} else if (context->video_frame_position == 0) {
if (context->video_frame_position == 0) {
replay_source_end_action(context);
} else {
replay_output_frame(context, output_frame);
}
} else {
if (context->restart) {
Expand All @@ -3116,17 +3211,11 @@ static void replay_source_tick(void *data, float seconds)
frame = context->current_replay.video_frames
[context->video_frame_position];
if (context->current_replay.trim_front != 0) {
if (context->speed_percent == 100.0f) {
context->start_timestamp -=
context->current_replay
.trim_front;
} else {
context->start_timestamp -=
(uint64_t)(context->current_replay
.trim_front *
100.0 /
context->speed_percent);
}
context->start_timestamp -=
(uint64_t)(context->current_replay
.trim_front *
100.0 /
context->speed_percent);
if (context->current_replay.trim_front <
0) {
uint64_t t = frame->timestamp;
Expand All @@ -3140,21 +3229,13 @@ static void replay_source_tick(void *data, float seconds)
&context->video_mutex);
return;
}
while (frame->timestamp <
context->current_replay
uint64_t desired_ts = context->current_replay
.first_frame_timestamp +
context->current_replay
.trim_front) {
context->video_frame_position++;
if (context->video_frame_position >=
context->current_replay
.video_frame_count) {
context->video_frame_position =
0;
}
frame = context->current_replay.video_frames
[context->video_frame_position];
}
.trim_front;
int64_t desired_frame_num = find_closest_frame(context, desired_ts, false);
frame = context->current_replay.video_frames
[desired_frame_num];
}
}
if (context->start_timestamp > os_timestamp) {
Expand Down Expand Up @@ -3277,7 +3358,7 @@ static void replay_source_tick(void *data, float seconds)
context->current_replay
.first_frame_timestamp) *
100.0 / context->speed_percent);
struct obs_source_frame *output_frame = NULL;
struct obs_source_frame *output_frame = frame;
while (context->play &&
video_duration >= source_duration) {
output_frame = frame;
Expand Down Expand Up @@ -3306,16 +3387,16 @@ static void replay_source_tick(void *data, float seconds)
100.0 /
context->speed_percent);
}
if (output_frame) {
replay_output_frame(context, output_frame);
} else if (context->video_frame_position >=
if (context->video_frame_position >=
context->current_replay.video_frame_count -
1) {
context->video_frame_position =
context->current_replay
.video_frame_count -
1;
replay_source_end_action(context);
} else {
replay_output_frame(context, output_frame);
}
}
} else if (context->current_replay.audio_frame_count) {
Expand Down

0 comments on commit dc87f85

Please sign in to comment.