Skip to content

Commit

Permalink
Improve logic for deciding when to paint the first video frame.
Browse files Browse the repository at this point in the history
Currently we paint the first frame if !(ts + dur < start_time),
this ends up painting frame N-1 if frame N has a timestamp of
exactly start_time.

Instead only paint if our current timestamp is >= start_time or
(ts > start_time < ts + duration).

Fixed: 1410446
Change-Id: I288e639964c054625d545763890616fe5d00a56f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/4246659
Reviewed-by: Eugene Zemtsov <eugene@chromium.org>
Commit-Queue: Dale Curtis <dalecurtis@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1105919}
  • Loading branch information
dalecurtis authored and Chromium LUCI CQ committed Feb 15, 2023
1 parent 1f1ef0f commit 219f4fa
Show file tree
Hide file tree
Showing 7 changed files with 86 additions and 59 deletions.
12 changes: 11 additions & 1 deletion media/renderers/video_renderer_impl.cc
Original file line number Diff line number Diff line change
Expand Up @@ -606,6 +606,7 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadResult result) {
const bool is_eos = frame->metadata().end_of_stream;
const bool is_before_start_time = !is_eos && IsBeforeStartTime(*frame);
const bool cant_read = !video_decoder_stream_->CanReadWithoutStalling();
const bool has_best_first_frame = !is_eos && HasBestFirstFrame(*frame);

if (is_eos) {
DCHECK(!received_end_of_stream_);
Expand Down Expand Up @@ -661,7 +662,7 @@ void VideoRendererImpl::FrameReady(VideoDecoderStream::ReadResult result) {
// frame metadata.
if (!sink_started_ && !painted_first_frame_ && algorithm_->frames_queued() &&
(received_end_of_stream_ || cant_read ||
(algorithm_->effective_frames_queued() && !is_before_start_time))) {
(algorithm_->effective_frames_queued() && has_best_first_frame))) {
scoped_refptr<VideoFrame> first_frame =
algorithm_->Render(base::TimeTicks(), base::TimeTicks(), nullptr);
CheckForMetadataChanges(first_frame->format(), first_frame->natural_size());
Expand Down Expand Up @@ -963,6 +964,15 @@ bool VideoRendererImpl::IsBeforeStartTime(const VideoFrame& frame) {
start_timestamp_;
}

bool VideoRendererImpl::HasBestFirstFrame(const VideoFrame& frame) {
// We have the best first frame in the queue if our current frame has a
// timestamp after `start_timestamp_` or straddles `start_timestamp_`.
return frame.timestamp() >= start_timestamp_ ||
frame.timestamp() + frame.metadata().frame_duration.value_or(
last_decoder_stream_avg_duration_) >
start_timestamp_;
}

void VideoRendererImpl::RemoveFramesForUnderflowOrBackgroundRendering() {
// Nothing to do if frame dropping is disabled for testing or we have nothing.
if (!drop_frames_ || !algorithm_->frames_queued())
Expand Down
4 changes: 4 additions & 0 deletions media/renderers/video_renderer_impl.h
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,10 @@ class MEDIA_EXPORT VideoRendererImpl
// duration is before |start_timestamp_|.
bool IsBeforeStartTime(const VideoFrame& frame);

// Helper method for checking if we have the best possible first frame to
// paint in the queue.
bool HasBestFirstFrame(const VideoFrame& frame);

// Attempts to remove frames which are no longer effective for rendering when
// |buffering_state_| == BUFFERING_HAVE_NOTHING or |was_background_rendering_|
// is true. If the current media time as provided by |wall_clock_time_cb_| is
Expand Down
33 changes: 30 additions & 3 deletions media/renderers/video_renderer_impl_unittest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -209,16 +209,17 @@ class VideoRendererImplTest : public testing::Test {
//
// Syntax:
// nn - Queue a decoder buffer with timestamp nn * 1000us
// nndmm - Queue a decoder buffer with timestamp nn * 1000us
// and mm * 1000us duration
// abort - Queue an aborted read
// error - Queue a decoder error
//
// Examples:
// A clip that is four frames long: "0 10 20 30"
// A clip that has a decode error: "60 70 error"
void QueueFrames(const std::string& str) {
for (const std::string& token :
base::SplitString(str, " ", base::TRIM_WHITESPACE,
base::SPLIT_WANT_ALL)) {
for (base::StringPiece token : base::SplitString(
str, " ", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL)) {
if (token == "abort") {
scoped_refptr<VideoFrame> null_frame;
QueueFrame(DecoderStatus::Codes::kAborted, null_frame);
Expand All @@ -231,12 +232,25 @@ class VideoRendererImplTest : public testing::Test {
continue;
}

auto ts_tokens = base::SplitStringPiece(token, "d", base::TRIM_WHITESPACE,
base::SPLIT_WANT_ALL);
if (ts_tokens.size() > 1) {
token = ts_tokens[0];
}

int timestamp_in_ms = 0;
if (base::StringToInt(token, &timestamp_in_ms)) {
gfx::Size natural_size = TestVideoConfig::NormalCodedSize();
scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
PIXEL_FORMAT_I420, natural_size, gfx::Rect(natural_size),
natural_size, base::Milliseconds(timestamp_in_ms));

int duration_in_ms = 0;
if (ts_tokens.size() > 1 &&
base::StringToInt(ts_tokens[1], &duration_in_ms)) {
frame->metadata().frame_duration = base::Milliseconds(duration_in_ms);
}

QueueFrame(DecoderStatus::Codes::kOk, frame);
continue;
}
Expand Down Expand Up @@ -453,6 +467,19 @@ TEST_F(VideoRendererImplTest, InitializeAndStartPlayingFrom) {
Destroy();
}

TEST_F(VideoRendererImplTest, InitializeAndStartPlayingFromWithDuration) {
Initialize();
QueueFrames("0d10 10d10 20d10 30d10 40d10");
EXPECT_CALL(mock_cb_, FrameReceived(HasTimestampMatcher(10)));
EXPECT_CALL(mock_cb_, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH, _));
EXPECT_CALL(mock_cb_, OnStatisticsUpdate(_)).Times(AnyNumber());
EXPECT_CALL(mock_cb_, OnVideoNaturalSizeChange(_)).Times(1);
EXPECT_CALL(mock_cb_, OnVideoOpacityChange(_)).Times(1);
EXPECT_CALL(mock_cb_, OnVideoFrameRateChange(absl::optional<int>(100)));
StartPlayingFrom(10);
Destroy();
}

TEST_F(VideoRendererImplTest, InitializeAndEndOfStream) {
Initialize();
StartPlayingFrom(0);
Expand Down
96 changes: 41 additions & 55 deletions third_party/blink/web_tests/media/video-canvas.html
Original file line number Diff line number Diff line change
@@ -1,62 +1,48 @@
<!DOCTYPE html>
<title>Test "video" as a source for "canvas".</title>
<script src="../resources/testharness.js"></script>
<script src="../resources/testharnessreport.js"></script>
<video></video>
<video style="display: none"></video>
<canvas width="160" height="120"></canvas>
<script>
async_test(function(t) {
var ctx;
var width;
var height;
var results = {
current: 0,
values: [
// Different platforms may take different RGB conversion paths or
// have slight rounding differences, so allow multiple values here.
{ time: 0, r: [255], g: [255], b: [0] },
{ time: 2, r: [0], g: [3], b: [240, 242, 243] },
{ time: 4, r: [0], g: [31], b: [213, 215, 216] },
{ time: 6, r: [0], g: [48], b: [182, 183, 184, 185] },
{ time: 8, r: [0], g: [75, 76], b: [153, 154, 155] },
{ time: 10, r: [0], g: [96, 97], b: [126, 127, 128] }
]
};

var video = document.querySelector("video");

video.onloadedmetadata = t.step_func(function() {
width = video.videoWidth / 2;
height = video.videoHeight / 2;

ctx = document.querySelector("canvas").getContext("2d");
ctx.fillStyle = "yellow";
ctx.fillRect(0, 0, width, height);
testFrame();
});

video.onseeked = t.step_func(function() {
ctx.drawImage(video, 0, 0, width, height);
testFrame();
});

function testFrame() {
var expected = results.values[results.current];
var frame = ctx.getImageData(0, 0, width, height);
var r = frame.data[4 * 2 * width + 16 + 0];
var g = frame.data[4 * 2 * width + 16 + 1];
var b = frame.data[4 * 2 * width + 16 + 2];

assert_true(expected.r.includes(r), expected.time + " r " + r);
assert_true(expected.g.includes(g), expected.time + " g " + g);
assert_true(expected.b.includes(b), expected.time + " b " + b);
<canvas width="160" height="120"></canvas>
<canvas width="160" height="120"></canvas><br/>

if (++results.current == results.values.length)
t.done();
else
video.currentTime = results.values[results.current].time;
<canvas width="160" height="120"></canvas>
<canvas width="160" height="120"></canvas>
<canvas width="160" height="120"></canvas><br/>
<script>
if (window.testRunner)
testRunner.waitUntilDone();

window.onload = _ => {
var index = 0;
var canvii = document.querySelectorAll('canvas');
var testTimes = [0, 2, 4, 6, 8, 10];
if (testTimes.length != canvii.length) {
testRunner.fail('Unpexected number of canvas or test times.');
return;
}

var video = document.querySelector("video");
video.requestVideoFrameCallback(_ => {
let width = video.videoWidth / 2;
let height = video.videoHeight / 2;

function drawNext() {
let ctx = canvii[index++].getContext("2d");
ctx.fillStyle = "yellow";
ctx.fillRect(0, 0, width, height);
ctx.drawImage(video, 0, 0, width, height);

if (index < testTimes.length) {
video.requestVideoFrameCallback(drawNext);
video.currentTime = testTimes[index];
} else if (window.testRunner) {
testRunner.notifyDone();
}
}

video.src = "content/counting.webm";
});
drawNext();
});

video.src = "content/counting.webm";
};
</script>
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 219f4fa

Please sign in to comment.