Skip to content

Commit

Permalink
[GStreamer] GstVideoFrame leaks
Browse files Browse the repository at this point in the history
https://bugs.webkit.org/show_bug.cgi?id=274257

Reviewed by Xabier Rodriguez-Calvar.

The ImageGStreamer no longer holds a BitmapImage, but a PlatformImagePtr. A BitmapImage is now
created by VideoFrameGStreamer when painting is required. The ImageGStreamerSkia implementation no
longer holds the mapped GstVideoFrame because that keeps un-necessary references and file
descriptors open, the needed plane data is copied instead. The static ImageDecoderGStreamer vector
now stores WeakPtrs instead of RefPtr, which semantically more correct but in practice makes no difference.

* Source/WebCore/platform/graphics/gstreamer/ImageDecoderGStreamer.cpp:
(WebCore::ensureDebugCategoryIsInitialized):
(WebCore::teardownGStreamerImageDecoders):
(WebCore::ImageDecoderGStreamer::create):
(WebCore::ImageDecoderGStreamer::ImageDecoderGStreamer):
* Source/WebCore/platform/graphics/gstreamer/ImageDecoderGStreamer.h:
* Source/WebCore/platform/graphics/gstreamer/ImageGStreamer.h:
(WebCore::ImageGStreamer::create):
(WebCore::ImageGStreamer::image const):
(WebCore::ImageGStreamer::rect):
(WebCore::ImageGStreamer::createImage): Deleted.
(WebCore::ImageGStreamer::image): Deleted.
* Source/WebCore/platform/graphics/gstreamer/ImageGStreamerCairo.cpp:
(WebCore::ImageGStreamer::ImageGStreamer):
* Source/WebCore/platform/graphics/gstreamer/ImageGStreamerSkia.cpp:
(WebCore::ImageGStreamer::ImageGStreamer):
* Source/WebCore/platform/graphics/gstreamer/MediaSampleGStreamer.h:
(WebCore::MediaSampleGStreamer::sample const):
* Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.cpp:
(WebCore::convertSampleToImage):
(WebCore::VideoFrame::paintInContext):

Canonical link: https://commits.webkit.org/279052@main
  • Loading branch information
philn committed May 21, 2024
1 parent 9245865 commit f5bc5e5
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
#include "GStreamerRegistryScanner.h"
#include "ImageGStreamer.h"
#include "MediaSampleGStreamer.h"
#include "NotImplemented.h"
#include "RuntimeApplicationChecks.h"
#include "VideoFrameGStreamer.h"
#include <gst/base/gsttypefindhelper.h>
Expand All @@ -39,13 +38,25 @@ GST_DEBUG_CATEGORY(webkit_image_decoder_debug);
#define GST_CAT_DEFAULT webkit_image_decoder_debug

static Lock s_decoderLock;
static Vector<RefPtr<ImageDecoderGStreamer>> s_imageDecoders;
static Vector<WeakPtr<ImageDecoderGStreamer>> s_imageDecoders;

static void ensureDebugCategoryIsInitialized()
{
static std::once_flag onceFlag;
std::call_once(onceFlag, [] {
GST_DEBUG_CATEGORY_INIT(webkit_image_decoder_debug, "webkitimagedecoder", 0, "WebKit image decoder");
});
}

void teardownGStreamerImageDecoders()
{
ensureDebugCategoryIsInitialized();
Locker lock { s_decoderLock };
for (auto& decoder : s_imageDecoders)
decoder->tearDown();
GST_DEBUG("Tearing down %zu image decoders", s_imageDecoders.size());
for (auto& weakDecoder : s_imageDecoders) {
if (RefPtr decoder = weakDecoder.get())
decoder->tearDown();
}
s_imageDecoders.clear();
}

Expand All @@ -60,12 +71,11 @@ class ImageDecoderGStreamerSample final : public MediaSampleGStreamer {
{
if (!m_image)
return nullptr;
return m_image->image().nativeImage()->platformImage();
return m_image->image();
}
void dropImage()
{
m_image = nullptr;
m_frame = nullptr;
}

SampleFlags flags() const override
Expand All @@ -76,8 +86,8 @@ class ImageDecoderGStreamerSample final : public MediaSampleGStreamer {
private:
ImageDecoderGStreamerSample(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize)
: MediaSampleGStreamer(WTFMove(sample), presentationSize, { })
, m_frame(VideoFrameGStreamer::createWrappedSample(this->sample()))
{
m_frame = VideoFrameGStreamer::create(GRefPtr(platformSample().sample.gstSample), presentationSize);
m_image = m_frame->convertToImage();
}

Expand All @@ -101,19 +111,15 @@ RefPtr<ImageDecoderGStreamer> ImageDecoderGStreamer::create(FragmentedSharedBuff
RefPtr decoder = adoptRef(*new ImageDecoderGStreamer(data, mimeType, alphaOption, gammaAndColorProfileOption));
{
Locker lock { s_decoderLock };
s_imageDecoders.append(decoder);
s_imageDecoders.append(WeakPtr { *decoder });
}
return decoder;
}

ImageDecoderGStreamer::ImageDecoderGStreamer(FragmentedSharedBuffer& data, const String& mimeType, AlphaOption, GammaAndColorProfileOption)
: m_mimeType(mimeType)
{
static std::once_flag onceFlag;
std::call_once(onceFlag, [] {
GST_DEBUG_CATEGORY_INIT(webkit_image_decoder_debug, "webkitimagedecoder", 0, "WebKit image decoder");
});

ensureDebugCategoryIsInitialized();
static Atomic<uint32_t> decoderId;
GRefPtr<GstElement> parsebin = gst_element_factory_make("parsebin", makeString("image-decoder-parser-"_s, decoderId.exchangeAdd(1)).utf8().data());
m_parserHarness = GStreamerElementHarness::create(WTFMove(parsebin), [](auto&, auto&&) { }, [this](auto& pad) -> RefPtr<GStreamerElementHarness> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
#include "SharedBuffer.h"
#include <wtf/Forward.h>
#include <wtf/Lock.h>
#include <wtf/WeakPtr.h>

namespace WebCore {

Expand All @@ -37,7 +38,7 @@ class ImageDecoderGStreamerSample;

void teardownGStreamerImageDecoders();

class ImageDecoderGStreamer final : public ImageDecoder {
class ImageDecoderGStreamer final : public ImageDecoder, public CanMakeWeakPtr<ImageDecoderGStreamer> {
WTF_MAKE_FAST_ALLOCATED;
WTF_MAKE_NONCOPYABLE(ImageDecoderGStreamer);
public:
Expand Down
20 changes: 7 additions & 13 deletions Source/WebCore/platform/graphics/gstreamer/ImageGStreamer.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,56 +21,50 @@

#if ENABLE(VIDEO) && USE(GSTREAMER)

#include "BitmapImage.h"
#include "FloatRect.h"
#include "GStreamerCommon.h"
#include "PlatformImage.h"

#include <gst/gst.h>
#include <gst/video/video-frame.h>

#include <wtf/Ref.h>
#include <wtf/RefCounted.h>
#include <wtf/RefPtr.h>
#include <wtf/Forward.h>

namespace WebCore {
class IntSize;

class ImageGStreamer : public RefCounted<ImageGStreamer> {
public:
static Ref<ImageGStreamer> createImage(GRefPtr<GstSample>&& sample)
static Ref<ImageGStreamer> create(GRefPtr<GstSample>&& sample)
{
return adoptRef(*new ImageGStreamer(WTFMove(sample)));
}
~ImageGStreamer();

operator bool() const { return !!m_image; }

BitmapImage& image()
{
ASSERT(m_image);
return *m_image.get();
}
PlatformImagePtr image() const { return m_image; }

void setCropRect(FloatRect rect) { m_cropRect = rect; }
FloatRect rect()
{
ASSERT(m_image);
if (!m_cropRect.isEmpty())
return FloatRect(m_cropRect);
return FloatRect(0, 0, m_image->size().width(), m_image->size().height());
return FloatRect(0, 0, m_size.width(), m_size.height());
}

bool hasAlpha() const { return m_hasAlpha; }

private:
ImageGStreamer(GRefPtr<GstSample>&&);
GRefPtr<GstSample> m_sample;
RefPtr<BitmapImage> m_image;
PlatformImagePtr m_image;
FloatRect m_cropRect;
#if USE(CAIRO)
GstVideoFrame m_videoFrame;
bool m_frameMapped { false };
#endif
FloatSize m_size;
bool m_hasAlpha { false };
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ ImageGStreamer::ImageGStreamer(GRefPtr<GstSample>&& sample)
int height = GST_VIDEO_FRAME_HEIGHT(&m_videoFrame);
RefPtr<cairo_surface_t> surface;

m_size = { static_cast<float>(width), static_cast<float>(height) };

if (m_hasAlpha || componentSwapRequired) {
uint8_t* surfaceData = static_cast<uint8_t*>(fastMalloc(height * stride));
uint8_t* surfacePixel = surfaceData;
Expand Down Expand Up @@ -156,7 +158,7 @@ ImageGStreamer::ImageGStreamer(GRefPtr<GstSample>&& sample)
surface = adoptRef(cairo_image_surface_create_for_data(bufferData, CAIRO_FORMAT_ARGB32, width, height, stride));

ASSERT(cairo_surface_status(surface.get()) == CAIRO_STATUS_SUCCESS);
m_image = BitmapImage::create(WTFMove(surface));
m_image = WTFMove(surface);

if (GstVideoCropMeta* cropMeta = gst_buffer_get_video_crop_meta(buffer))
setCropRect(FloatRect(cropMeta->x, cropMeta->y, cropMeta->width, cropMeta->height));
Expand Down
43 changes: 27 additions & 16 deletions Source/WebCore/platform/graphics/gstreamer/ImageGStreamerSkia.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -28,35 +28,39 @@

#if ENABLE(VIDEO) && USE(GSTREAMER) && USE(SKIA)

#include "GStreamerCommon.h"
#include "NotImplemented.h"
#include <gst/gst.h>
#include <gst/video/gstvideometa.h>
#include <skia/ColorSpaceSkia.h>
#include <skia/core/SkImage.h>

IGNORE_CLANG_WARNINGS_BEGIN("cast-align")
#include <skia/core/SkPixmap.h>
IGNORE_CLANG_WARNINGS_END

#include <wtf/StdLibExtras.h>
#include <wtf/glib/WTFGType.h>

namespace WebCore {

struct BufferData {
std::span<const uint8_t> planeData;
};
WEBKIT_DEFINE_ASYNC_DATA_STRUCT(BufferData);

ImageGStreamer::ImageGStreamer(GRefPtr<GstSample>&& sample)
: m_sample(WTFMove(sample))
{
GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
if (UNLIKELY(!GST_IS_BUFFER(buffer)))
return;

GstVideoInfo videoInfo;
if (!gst_video_info_from_caps(&videoInfo, gst_sample_get_caps(m_sample.get())))
return;

auto videoFrame = makeUnique<GstMappedFrame>(buffer, &videoInfo, GST_MAP_READ);
auto videoFrame = makeUnique<GstMappedFrame>(m_sample, GST_MAP_READ);
if (!*videoFrame)
return;

auto videoInfo = videoFrame->info();

// The frame has to be RGB so we can paint it.
ASSERT(GST_VIDEO_INFO_IS_RGB(&videoInfo));
ASSERT(GST_VIDEO_INFO_IS_RGB(videoInfo));

// The video buffer may have these formats in these cases:
// { BGRx, BGRA } on little endian
Expand Down Expand Up @@ -91,17 +95,24 @@ ImageGStreamer::ImageGStreamer(GRefPtr<GstSample>&& sample)
break;
}

m_size = { static_cast<float>(videoFrame->width()), static_cast<float>(videoFrame->height()) };

auto toSkiaColorSpace = [](const PlatformVideoColorSpace&) {
notImplemented();
return SkColorSpace::MakeSRGB();
};
auto imageInfo = SkImageInfo::Make(videoFrame->width(), videoFrame->height(), colorType, alphaType, toSkiaColorSpace(videoColorSpaceFromInfo(videoInfo)));
SkPixmap pixmap(imageInfo, videoFrame->planeData(0), videoFrame->planeStride(0));
auto image = SkImages::RasterFromPixmap(pixmap, [](const void*, void* context) {
std::unique_ptr<GstMappedFrame> videoFrame(static_cast<GstMappedFrame*>(context));
}, videoFrame.release());

m_image = BitmapImage::create(WTFMove(image));
auto imageInfo = SkImageInfo::Make(videoFrame->width(), videoFrame->height(), colorType, alphaType, toSkiaColorSpace(videoColorSpaceFromInfo(*videoInfo)));

// Copy the buffer data. Keeping the whole mapped GstVideoFrame alive would increase memory
// pressure and the file descriptor(s) associated with the buffer pool open. We only need the
// data here.
auto bufferData = createBufferData();
bufferData->planeData = { reinterpret_cast<const uint8_t*>(videoFrame->planeData(0)), static_cast<unsigned>(videoFrame->planeStride(0)) };

SkPixmap pixmap(imageInfo, bufferData->planeData.data(), bufferData->planeData.size_bytes());
m_image = SkImages::RasterFromPixmap(pixmap, [](const void*, void* context) {
destroyBufferData(reinterpret_cast<BufferData*>(context));
}, bufferData);

if (auto* cropMeta = gst_buffer_get_video_crop_meta(buffer))
m_cropRect = FloatRect(cropMeta->x, cropMeta->y, cropMeta->width, cropMeta->height);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ class MediaSampleGStreamer : public MediaSample {
PlatformSample::Type platformSampleType() const override { return PlatformSample::GStreamerSampleType; }
void dump(PrintStream&) const override;

const GRefPtr<GstSample>& sample() const { return m_sample; }

protected:
MediaSampleGStreamer(GRefPtr<GstSample>&&, const FloatSize& presentationSize, TrackID);
virtual ~MediaSampleGStreamer() = default;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

#if ENABLE(VIDEO) && USE(GSTREAMER)

#include "BitmapImage.h"
#include "GLContext.h"
#include "GStreamerCommon.h"
#include "GraphicsContext.h"
Expand Down Expand Up @@ -82,7 +83,7 @@ static RefPtr<ImageGStreamer> convertSampleToImage(const GRefPtr<GstSample>& sam
if (!convertedSample)
return nullptr;

return ImageGStreamer::createImage(WTFMove(convertedSample));
return ImageGStreamer::create(WTFMove(convertedSample));
}

RefPtr<VideoFrame> VideoFrame::fromNativeImage(NativeImage& image)
Expand Down Expand Up @@ -540,7 +541,7 @@ void VideoFrame::copyTo(std::span<uint8_t> destination, VideoPixelFormat pixelFo
callback({ });
}

void VideoFrame::paintInContext(GraphicsContext & context, const FloatRect& destination, const ImageOrientation& destinationImageOrientation, bool shouldDiscardAlpha)
void VideoFrame::paintInContext(GraphicsContext& context, const FloatRect& destination, const ImageOrientation& destinationImageOrientation, bool shouldDiscardAlpha)
{
auto image = convertSampleToImage(downcast<VideoFrameGStreamer>(*this).sample());
if (!image)
Expand All @@ -549,7 +550,11 @@ void VideoFrame::paintInContext(GraphicsContext & context, const FloatRect& dest
auto imageRect = image->rect();
auto source = destinationImageOrientation.usesWidthAsHeight() ? FloatRect(imageRect.location(), imageRect.size().transposedSize()) : imageRect;
auto compositeOperator = !shouldDiscardAlpha && image->hasAlpha() ? CompositeOperator::SourceOver : CompositeOperator::Copy;
context.drawImage(image->image(), destination, source, { compositeOperator, destinationImageOrientation });
auto platformImage = image->image();
auto bitmapImage = BitmapImage::create(WTFMove(platformImage));
if (!bitmapImage)
return;
context.drawImage(*bitmapImage.get(), destination, source, { compositeOperator, destinationImageOrientation });
}

uint32_t VideoFrameGStreamer::pixelFormat() const
Expand Down

0 comments on commit f5bc5e5

Please sign in to comment.