Skip to content

Commit

Permalink
LocalSampleBufferDisplayLayer does not need to recreate its AVSampleB…
Browse files Browse the repository at this point in the history
…ufferDisplayLayer

https://bugs.webkit.org/show_bug.cgi?id=264251
rdar://117996669

Reviewed by Eric Carlson.

We did a workaround to speed up the rendering of MediaStreamTrack in case of size change.
The cost was a small flash as we were replacing one AVSBDL by another using replaceLayer.
We are removing this workaround which removes the small flash.
We still have issues with the temporary resizing that happens when the size changes.
This should be fixed as a follow-up.

* Source/WebCore/platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.h:
* Source/WebCore/platform/graphics/avfoundation/objc/LocalSampleBufferDisplayLayer.mm:
(WebCore::LocalSampleBufferDisplayLayer::initialize):
(WebCore::LocalSampleBufferDisplayLayer::updateSampleLayerBoundsAndPosition):
(WebCore::LocalSampleBufferDisplayLayer::flush):
(WebCore::LocalSampleBufferDisplayLayer::flushAndRemoveImage):
(WebCore::LocalSampleBufferDisplayLayer::enqueueVideoFrame):
(WebCore::LocalSampleBufferDisplayLayer::enqueueBufferInternal):
(WebCore::LocalSampleBufferDisplayLayer::clearVideoFrames):
(WebCore::LocalSampleBufferDisplayLayer::requestNotificationWhenReadyForVideoData):

Canonical link: https://commits.webkit.org/270426@main
  • Loading branch information
youennf committed Nov 9, 2023
1 parent a884b8e commit f514c31
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 73 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,6 @@ class WEBCORE_EXPORT LocalSampleBufferDisplayLayer final : public SampleBufferDi
private:
RetainPtr<WebAVSampleBufferStatusChangeListener> m_statusChangeListener;
RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayer;
RetainPtr<AVSampleBufferDisplayLayer> m_sampleBufferDisplayLayerForQueue WTF_GUARDED_BY_CAPABILITY(workQueue());
bool m_isReconfiguring WTF_GUARDED_BY_CAPABILITY(workQueue()) { false };
RetainPtr<CVPixelBufferRef> m_lastPixelBuffer WTF_GUARDED_BY_CAPABILITY(workQueue());
MediaTime m_lastPresentationTime WTF_GUARDED_BY_CAPABILITY(workQueue());
RetainPtr<PlatformLayer> m_rootLayer;
RenderPolicy m_renderPolicy { RenderPolicy::TimingInfo };

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,15 +169,6 @@ static void runWithoutAnimations(const WTF::Function<void()>& function)
m_sampleBufferDisplayLayer.get().anchorPoint = { .5, .5 };
m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;

m_processingQueue->dispatch([this, weakThis = ThreadSafeWeakPtr { *this }, layer = RetainPtr { m_sampleBufferDisplayLayer }]() mutable {
auto protectedThis = weakThis.get();
if (!protectedThis)
return;

assertIsCurrent(workQueue());
m_sampleBufferDisplayLayerForQueue = WTFMove(layer);
});

m_rootLayer = adoptNS([[CALayer alloc] init]);
m_rootLayer.get().hidden = hideRootLayer;

Expand Down Expand Up @@ -281,49 +272,19 @@ static void runWithoutAnimations(const WTF::Function<void()>& function)
if (!protectedThis)
return;

[m_statusChangeListener stop];

[m_sampleBufferDisplayLayer stopRequestingMediaData];
[m_sampleBufferDisplayLayer flush];
auto oldLayer = std::exchange(m_sampleBufferDisplayLayer, { });

m_sampleBufferDisplayLayer = adoptNS([PAL::allocAVSampleBufferDisplayLayerInstance() init]);
m_sampleBufferDisplayLayer.get().anchorPoint = { .5, .5 };
m_sampleBufferDisplayLayer.get().videoGravity = AVLayerVideoGravityResizeAspectFill;
[m_sampleBufferDisplayLayer setName:@"LocalSampleBufferDisplayLayer AVSampleBufferDisplayLayer"];

auto layerBounds = bounds.value_or(m_rootLayer.get().bounds);
CGPoint layerPosition { layerBounds.size.width / 2, layerBounds.size.height / 2 };
if (rotation == VideoFrame::Rotation::Right || rotation == VideoFrame::Rotation::Left)
std::swap(layerBounds.size.width, layerBounds.size.height);
runWithoutAnimations([&] {
if (bounds) {
m_rootLayer.get().position = { bounds->size.width / 2, bounds->size.height / 2 };
m_rootLayer.get().bounds = *bounds;
}
m_sampleBufferDisplayLayer.get().affineTransform = affineTransform;
m_sampleBufferDisplayLayer.get().position = layerPosition;
m_sampleBufferDisplayLayer.get().bounds = layerBounds;
m_sampleBufferDisplayLayer.get().affineTransform = affineTransform;
});

m_processingQueue->dispatch([this, weakThis = WTFMove(weakThis), newLayer = m_sampleBufferDisplayLayer, oldLayer = WTFMove(oldLayer), bounds]() mutable {
auto protectedThis = weakThis.get();
if (!protectedThis)
return;

assertIsCurrent(workQueue());
m_sampleBufferDisplayLayerForQueue = newLayer;
m_isReconfiguring = false;
if (m_lastPixelBuffer)
enqueueBufferInternal(m_lastPixelBuffer.get(), m_lastPresentationTime);

callOnMainThread([rootLayer = m_rootLayer, newLayer = WTFMove(newLayer), oldLayer = WTFMove(oldLayer), bounds] {
if (bounds) {
runWithoutAnimations([&] {
rootLayer.get().position = { bounds->size.width / 2, bounds->size.height / 2 };
rootLayer.get().bounds = *bounds;
});
}
[rootLayer replaceSublayer:oldLayer.get() with:newLayer.get()];
});
});
[m_statusChangeListener begin:m_sampleBufferDisplayLayer.get()];
});
}

Expand All @@ -334,8 +295,7 @@ static void runWithoutAnimations(const WTF::Function<void()>& function)
if (!protectedThis)
return;

assertIsCurrent(workQueue());
[m_sampleBufferDisplayLayerForQueue flush];
[m_sampleBufferDisplayLayer flush];
});
}

Expand All @@ -346,9 +306,8 @@ static void runWithoutAnimations(const WTF::Function<void()>& function)
if (!protectedThis)
return;

assertIsCurrent(workQueue());
@try {
[m_sampleBufferDisplayLayerForQueue flushAndRemoveImage];
[m_sampleBufferDisplayLayer flushAndRemoveImage];
} @catch(id exception) {
RELEASE_LOG_ERROR(WebRTC, "LocalSampleBufferDisplayLayer::flushAndRemoveImage failed");
layerErrorDidChange();
Expand Down Expand Up @@ -389,28 +348,21 @@ static inline CGAffineTransform transformationMatrixForVideoFrame(VideoFrame& vi

void LocalSampleBufferDisplayLayer::enqueueVideoFrame(VideoFrame& videoFrame)
{
bool isReconfiguring = false;
auto affineTransform = transformationMatrixForVideoFrame(videoFrame);
if (!CGAffineTransformEqualToTransform(affineTransform, m_affineTransform)) {
m_affineTransform = affineTransform;
m_videoFrameRotation = videoFrame.rotation();
isReconfiguring = true;
updateSampleLayerBoundsAndPosition({ });
}

m_processingQueue->dispatch([this, weakThis = ThreadSafeWeakPtr { *this }, pixelBuffer = RetainPtr { videoFrame.pixelBuffer() }, presentationTime = videoFrame.presentationTime(), isReconfiguring]() mutable {
m_processingQueue->dispatch([this, weakThis = ThreadSafeWeakPtr { *this }, pixelBuffer = RetainPtr { videoFrame.pixelBuffer() }, presentationTime = videoFrame.presentationTime()]() mutable {
auto protectedThis = weakThis.get();
if (!protectedThis)
return;

assertIsCurrent(workQueue());
if (isReconfiguring)
m_isReconfiguring = true;

enqueueBufferInternal(pixelBuffer.get(), presentationTime);
});

if (isReconfiguring)
updateSampleLayerBoundsAndPosition({ });
}

static const double rendererLatency = 0.02;
Expand All @@ -419,18 +371,12 @@ static inline CGAffineTransform transformationMatrixForVideoFrame(VideoFrame& vi
{
assertIsCurrent(workQueue());

m_lastPixelBuffer = pixelBuffer;
m_lastPresentationTime = presentationTime;

if (m_isReconfiguring)
return;

auto sampleBuffer = createVideoSampleBuffer(pixelBuffer, PAL::toCMTime(presentationTime));
auto now = MediaTime::createWithDouble(MonotonicTime::now().secondsSinceEpoch().value() + rendererLatency);
if (m_renderPolicy == RenderPolicy::Immediately || now >= presentationTime)
setSampleBufferAsDisplayImmediately(sampleBuffer.get());

[m_sampleBufferDisplayLayerForQueue enqueueSampleBuffer:sampleBuffer.get()];
[m_sampleBufferDisplayLayer enqueueSampleBuffer:sampleBuffer.get()];

#if !RELEASE_LOG_DISABLED
constexpr size_t frameCountPerLog = 1800; // log every minute at 30 fps
Expand Down Expand Up @@ -496,7 +442,6 @@ static inline CGAffineTransform transformationMatrixForVideoFrame(VideoFrame& vi
assertIsCurrent(workQueue());

m_pendingVideoFrameQueue.clear();
m_lastPixelBuffer = nullptr;
});
}

Expand All @@ -505,7 +450,7 @@ static inline CGAffineTransform transformationMatrixForVideoFrame(VideoFrame& vi
assertIsCurrent(workQueue());

ThreadSafeWeakPtr weakThis { *this };
[m_sampleBufferDisplayLayerForQueue requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
[m_sampleBufferDisplayLayer requestMediaDataWhenReadyOnQueue:dispatch_get_main_queue() usingBlock:^{
auto protectedThis = weakThis.get();
if (!protectedThis)
return;
Expand All @@ -515,11 +460,10 @@ static inline CGAffineTransform transformationMatrixForVideoFrame(VideoFrame& vi
if (!protectedThis)
return;

assertIsCurrent(workQueue());
[m_sampleBufferDisplayLayerForQueue stopRequestingMediaData];
[m_sampleBufferDisplayLayer stopRequestingMediaData];

while (!m_pendingVideoFrameQueue.isEmpty()) {
if (![m_sampleBufferDisplayLayerForQueue isReadyForMoreMediaData]) {
if (![m_sampleBufferDisplayLayer isReadyForMoreMediaData]) {
requestNotificationWhenReadyForVideoData();
return;
}
Expand Down

0 comments on commit f514c31

Please sign in to comment.