Skip to content

Commit

Permalink
CanvasCapture: Make Pixel_WebGPUCanvasOneCopyCapture consistent.
Browse files Browse the repository at this point in the history
Pixel_WebGPUCanvasOneCopyCapture was flaky due to WebRTC's inconsistent
compression quality.
This CL changes Pixel_WebGPUCanvasOneCopyCapture to use
canvas.captureStream() as source of the video element directly.

This also requires a change in html_canvas_element to activate
OneCopyCanvasCapture feature path when the canvas rendering context's
color is opaque.
Previously this feature was only utilised when the video sink can
discard alpha (such as WebRTC).

Bug: 1349480
Bug: 1349676
Change-Id: I414389a4dec4fc1b7d8e90053c7fd1c08817f0a4
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/3805198
Reviewed-by: Kenneth Russell <kbr@chromium.org>
Reviewed-by: Sunny Sachanandani <sunnyps@chromium.org>
Commit-Queue: Quyen Le <lehoangquyen@chromium.org>
Cr-Commit-Position: refs/heads/main@{#1031446}
  • Loading branch information
kakashidinho authored and Chromium LUCI CQ committed Aug 4, 2022
1 parent 3614277 commit c0bb919
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 88 deletions.
105 changes: 19 additions & 86 deletions content/test/data/gpu/pixel_webgpu_canvas_capture_to_video.html
Expand Up @@ -11,18 +11,20 @@
<script type="text/javascript" src="pixel_webgpu_util.js"></script>
<script type="text/javascript">
var g_swapsBeforeAck = 15;
var g_videoFrameReceived = false;

async function main() {
loadWebrtc();

const gpuCanvas = document.getElementById('canvas_gpu');
const [device, swapChain] = await webGpuUtils.init(gpuCanvas);
const [device, swapChain] = await webGpuUtils.init(gpuCanvas, /*has_alpha=*/false);
if (!device || !swapChain) {
console.error("Failed to initialize WebGPU - skipping test");
domAutomationController.send("FAILURE");
return;
}

const video = document.getElementById('video_player');
video.srcObject = gpuCanvas.captureStream();

function render() {
const gpuContext = gpuCanvas.getContext('webgpu');

Expand All @@ -32,100 +34,31 @@
waitForFinish();
}

function videoFrameReceived() {
g_videoFrameReceived = true;
}

function waitForFinish() {
if (g_swapsBeforeAck == 0) {
domAutomationController.send("SUCCESS");
} else {
g_swapsBeforeAck--;
// only start count down when a video frame has been received
if (g_videoFrameReceived)
g_swapsBeforeAck--;
window.requestAnimationFrame(render);
}
}

// Start rendering loop
window.requestAnimationFrame(render);
}

function loadWebrtc() {
const gpuCanvas = document.getElementById('canvas_gpu');
const video = document.getElementById('video_player');

const stream = gpuCanvas.captureStream();

const servers = null;
var pc1 = new RTCPeerConnection(servers);
pc1.onicecandidate = e => onIceCandidate(pc1, e);
var pc2 = new RTCPeerConnection(servers);
pc2.onicecandidate = e => onIceCandidate(pc2, e);

pc1.oniceconnectionstatechange = e => onIceStateChange(pc1, e);
pc2.oniceconnectionstatechange = e => onIceStateChange(pc2, e);
pc2.ontrack = gotRemoteStream;

stream.getTracks().forEach(
track => {
pc1.addTrack(
track,
stream
);
}
);

pc1.createOffer(onCreateOfferSuccess, onCreateSessionDescriptionError);

function onCreateSessionDescriptionError(error) {
console.error(`Failed to create session description: ${error.toString()}`);
// register a callback to be invoked when a video frame has been received.
if (video.requestVideoFrameCallback) {
video.requestVideoFrameCallback(videoFrameReceived);
}

function onCreateOfferSuccess(desc) {
pc1.setLocalDescription(desc, () => onSetLocalSuccess(pc1), onSetSessionDescriptionError);
pc2.setRemoteDescription(desc, () => onSetRemoteSuccess(pc2), onSetSessionDescriptionError);
pc2.createAnswer(onCreateAnswerSuccess, onCreateSessionDescriptionError);
}

function onSetLocalSuccess(pc) {
}

function onSetRemoteSuccess(pc) {
}

function onSetSessionDescriptionError(error) {
console.error(`Failed to set session description: ${error.toString()}`);
}

function gotRemoteStream(e) {
if (video.srcObject !== e.streams[0]) {
video.srcObject = e.streams[0];
}
}

function onCreateAnswerSuccess(desc) {
pc2.setLocalDescription(desc, () => onSetLocalSuccess(pc2), onSetSessionDescriptionError);
pc1.setRemoteDescription(desc, () => onSetRemoteSuccess(pc1), onSetSessionDescriptionError);
}

function onIceCandidate(pc, event) {
getOtherPc(pc).addIceCandidate(event.candidate)
.then(
() => onAddIceCandidateSuccess(pc),
err => onAddIceCandidateError(pc, err)
);
}

function onAddIceCandidateSuccess(pc) {
}

function onAddIceCandidateError(pc, error) {
console.error(`${getName(pc)} failed to add ICE Candidate: ${error.toString()}`);
}

function onIceStateChange(pc, event) {
}

function getName(pc) {
return (pc === pc1) ? 'pc1' : 'pc2';
}

function getOtherPc(pc) {
return (pc === pc1) ? pc2 : pc1;
else {
// requestVideoFrameCallback not available
g_videoFrameReceived = true;
}
}
</script>
Expand Down
3 changes: 2 additions & 1 deletion content/test/data/gpu/pixel_webgpu_util.js
Expand Up @@ -62,7 +62,7 @@ fn main(@location(0) fragUV : vec2<f32>) -> @location(0) vec4<f32> {
};

return {
init: async function(gpuCanvas) {
init: async function(gpuCanvas, has_alpha = true) {
const adapter = navigator.gpu && await navigator.gpu.requestAdapter();
if (!adapter) {
console.error('navigator.gpu && navigator.gpu.requestAdapter failed');
Expand All @@ -85,6 +85,7 @@ fn main(@location(0) fragUV : vec2<f32>) -> @location(0) vec4<f32> {
device: device,
format: outputFormat,
usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.COPY_SRC,
alphaMode: has_alpha ? "premultiplied" : "opaque",
});

return [device, context];
Expand Down
Expand Up @@ -721,6 +721,11 @@ void HTMLCanvasElement::NotifyListenersCanvasChanged() {
WebGraphicsContext3DVideoFramePool::
IsGpuMemoryBufferReadbackFromTextureEnabled());
}

const bool context_color_is_opaque =
context_ ? context_->CanvasRenderingContextSkColorInfo().isOpaque()
: false;

for (CanvasDrawListener* listener : listeners_) {
if (!listener->NeedsNewFrame())
continue;
Expand All @@ -734,7 +739,7 @@ void HTMLCanvasElement::NotifyListenersCanvasChanged() {
// First attempt to copy directly from the rendering context to a video
// frame. Not all rendering contexts need to support this (for contexts
// where GetSourceImageForCanvasInternal is zero-copy, this is superfluous).
if (context_ && can_discard_alpha &&
if (context_ && (context_color_is_opaque || can_discard_alpha) &&
base::FeatureList::IsEnabled(kOneCopyCanvasCapture)) {
if (context_->CopyRenderingResultsToVideoFrame(
copier_->GetAcceleratedVideoFramePool(
Expand Down
11 changes: 11 additions & 0 deletions third_party/blink/renderer/modules/webgpu/gpu_canvas_context.cc
Expand Up @@ -181,6 +181,17 @@ V8OffscreenRenderingContext* GPUCanvasContext::AsV8OffscreenRenderingContext() {
return MakeGarbageCollected<V8OffscreenRenderingContext>(this);
}

SkColorInfo GPUCanvasContext::CanvasRenderingContextSkColorInfo() const {
if (!swap_buffers_)
return CanvasRenderingContext::CanvasRenderingContextSkColorInfo();
return SkColorInfo(viz::ResourceFormatToClosestSkColorType(
/*gpu_compositing=*/true, swap_buffers_->Format()),
alpha_mode_ == V8GPUCanvasAlphaMode::Enum::kOpaque
? kOpaque_SkAlphaType
: kPremul_SkAlphaType,
SkColorSpace::MakeSRGB());
}

void GPUCanvasContext::Stop() {
UnconfigureInternal();
stopped_ = true;
Expand Down
Expand Up @@ -58,6 +58,7 @@ class GPUCanvasContext : public CanvasRenderingContext,
// CanvasRenderingContext implementation
V8RenderingContext* AsV8RenderingContext() final;
V8OffscreenRenderingContext* AsV8OffscreenRenderingContext() final;
SkColorInfo CanvasRenderingContextSkColorInfo() const override;
// Produces a snapshot of the current contents of the swap chain if possible.
// If that texture has already been sent to the compositor, will produce a
// snapshot of the just released texture associated to this gpu context.
Expand Down

0 comments on commit c0bb919

Please sign in to comment.