Skip to content

Commit

Permalink
Youtube 360° video goes black
Browse files Browse the repository at this point in the history
https://bugs.webkit.org/show_bug.cgi?id=259298
rdar://112239644

Reviewed by Eric Carlson.

YouTube, when pre-rolling an advertisement before a 360 video, will start with
an element that's displayed normally, but will hide the video with display:none
when switching to canvas-based rendering for 360 videos. But at this point, the
MediaPlayerPrivate's layer has already been created and sized, and nothing will
force it to switch back to a decompression session (which is the only way for
frames to be extracted during playback).

When a video element's renderer is detached, queue a task to notify the media
player that the accelerated rendering state has changed. Similarly, do so when
changing the fullscreen mode. In MediaPlayerPrivateMediaSourceAVFObjC, use this
signal to tear down the AVSBDL when it is no longer needed.

* LayoutTests/media/media-source/media-source-paint-after-display-none-expected.txt: Added.
* LayoutTests/media/media-source/media-source-paint-after-display-none.html: Added.
* Source/WebCore/html/HTMLMediaElement.cpp:
(WebCore::HTMLMediaElement::didDetachRenderers):
(WebCore::HTMLMediaElement::setFullscreenMode):
* Source/WebCore/platform/graphics/avfoundation/objc/MediaPlayerPrivateMediaSourceAVFObjC.mm:
(WebCore::MediaPlayerPrivateMediaSourceAVFObjC::shouldEnsureLayer const):

Canonical link: https://commits.webkit.org/266162@main
  • Loading branch information
jernoble committed Jul 19, 2023
1 parent 2c35d8a commit b2a7036
Show file tree
Hide file tree
Showing 6 changed files with 137 additions and 1 deletion.
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
EVENT(sourceopen)
EVENT(canplay)
EXPECTED (canvas.getContext("2d").getImageData(1, 1, 1, 1).data[0] > '128') OK
EXPECTED (canvas.getContext("2d").getImageData(1, 1, 1, 1).data[1] < '128') OK
RUN(video.style.display = "none")
EVENT(canplay)
EVENT(playing)
EXPECTED (canvas.getContext("2d").getImageData(1, 1, 1, 1).data[0] < '128') OK
EXPECTED (canvas.getContext("2d").getImageData(1, 1, 1, 1).data[1] > '128') OK
END OF TEST

Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
<!DOCTYPE html>
<html>
<head>
<title>media-source-stalled-holds-sleep-assertion</title>
<script src="media-source-loader.js"></script>
<script src="../video-test.js"></script>
<script>

var canvas;
var loader;
var source;
var sourceBuffer;

function loaderPromise(loader) {
return new Promise((resolve, reject) => {
loader.onload = resolve;
loader.onerror = reject;
});
}

function requestVideoFramePromise(video) {
return new Promise(resolve => {
video.requestVideoFrameCallback(info => {
resolve(info);
});
});
}

async function runTest() {
findMediaElement();

var response = await fetch('content/test-red-3s-480x360.mp4');
let redMedia = await response.arrayBuffer();

response = await fetch('content/test-green-6s-320x240.mp4');
let greenMedia = await response.arrayBuffer();

source = new MediaSource();
video.src = URL.createObjectURL(source);
await waitFor(source, 'sourceopen');

sourceBuffer = source.addSourceBuffer('video/mp4');
sourceBuffer.appendBuffer(redMedia);
await Promise.all([
waitFor(sourceBuffer, 'update', true),
waitFor(video, 'canplay')
]);

let createCanvas = () => {
canvas = document.createElement('canvas');
canvas.width = video.videoWidth / 2;
canvas.height = video.videoHeight / 2;
canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height);
return canvas;
}

canvas = createCanvas();
await runUntil(
() => canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height),
() => canvas.getContext("2d").getImageData(1, 1, 1, 1).data[0] != 0,
5000);

testExpected('canvas.getContext("2d").getImageData(1, 1, 1, 1).data[0]', '128', '>');
testExpected('canvas.getContext("2d").getImageData(1, 1, 1, 1).data[1]', '128', '<');

run('video.style.display = "none"');
sourceBuffer.remove(0, video.duration);
await waitFor(sourceBuffer, 'update', true);

sourceBuffer.appendBuffer(greenMedia);
await Promise.all([
waitFor(sourceBuffer, 'update', true),
waitFor(video, 'canplay')
]);


video.play();
await waitFor(video, 'playing');

canvas = createCanvas();
await runUntil(
() => canvas.getContext('2d').drawImage(video, 0, 0, canvas.width, canvas.height),
() => canvas.getContext("2d").getImageData(1, 1, 1, 1).data[1] != 0,
5000);

testExpected('canvas.getContext("2d").getImageData(1, 1, 1, 1).data[0]', '128', '<');
testExpected('canvas.getContext("2d").getImageData(1, 1, 1, 1).data[1]', '128', '>');
}

window.addEventListener('load', event => {
runTest().then(endTest).catch(failTest);
});
</script>
</head>
<body>
<video muted playsinline></video>
<div id="canvases"></canvas>
</body>
</html>
10 changes: 10 additions & 0 deletions LayoutTests/media/video-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,16 @@ function testExpectedEventually(testFuncString, expected, comparison, timeout)
});
}

async function runUntil(run, until, timeout) {
while (timeout === undefined || timeout--) {
run();
if (until())
return;
await sleepFor(1);
}
failTest("Did not end fast enough.");
}

function testArraysEqual(testFuncString, expected)
{
var observed;
Expand Down
3 changes: 3 additions & 0 deletions LayoutTests/platform/mac/TestExpectations
Original file line number Diff line number Diff line change
Expand Up @@ -2764,3 +2764,6 @@ webkit.org/b/258181 [ Debug ] inspector/debugger/async-stack-trace-truncate.html
webkit.org/b/236128 imported/w3c/web-platform-tests/html/user-activation/activation-trigger-mouse-right.html [ Skip ]

webkit.org/b/252322 [ X86_64 ] media/media-source/media-source-video-renders.html [ ImageOnlyFailure ]

# Requires Ventura or later
[ BigSur Monterey ] media/media-source/media-source-paint-after-display-none.html [ Skip ]
11 changes: 11 additions & 0 deletions Source/WebCore/html/HTMLMediaElement.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -978,6 +978,14 @@ void HTMLMediaElement::willDetachRenderers()
void HTMLMediaElement::didDetachRenderers()
{
scheduleUpdateShouldAutoplay();

queueTaskKeepingObjectAlive(*this, TaskSource::MediaElement, [this] {
// If we detach a media element from a renderer, we may no longer need the MediaPlayerPrivate
// to vend a PlatformLayer. However, the renderer may be torn down and re-attached during a
// single run-loop as a result of layout or due to the element being re-parented.
if (!renderer() && m_player)
m_player->acceleratedRenderingStateChanged();
});
}

void HTMLMediaElement::didRecalcStyle(Style::Change)
Expand Down Expand Up @@ -8923,6 +8931,9 @@ void HTMLMediaElement::setFullscreenMode(VideoFullscreenMode mode)
m_videoFullscreenMode = mode;
visibilityStateChanged();
schedulePlaybackControlsManagerUpdate();

if (m_player)
m_player->acceleratedRenderingStateChanged();
}

#if !RELEASE_LOG_DISABLED
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -801,9 +801,11 @@ void getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types) const f
return sourceBuffer->needsVideoLayer();
}))
return true;
auto player = m_player.get();
if (player && !player->renderingCanBeAccelerated())
return false;
if (m_sampleBufferDisplayLayer)
return !CGRectIsEmpty([m_sampleBufferDisplayLayer bounds]);
auto player = m_player.get();
if (player && !player->videoInlineSize().isEmpty())
return true;
if (player && !player->playerContentBoxRect().isEmpty())
Expand Down

0 comments on commit b2a7036

Please sign in to comment.