Skip to content

Commit

Permalink
[GStreamer] Refactoring of VideoFrameGStreamer::createWrappedSample()
Browse files Browse the repository at this point in the history
https://bugs.webkit.org/show_bug.cgi?id=268216

Reviewed by Xabier Rodriguez-Calvar.

The presentationTime argument is now optional, if not provided it is inferred from the sample buffer PTS.

* Source/WebCore/platform/graphics/gstreamer/ImageDecoderGStreamer.cpp:
* Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp:
(WebCore::MediaPlayerPrivateGStreamer::paint):
(WebCore::MediaPlayerPrivateGStreamer::videoFrameForCurrentTime):
* Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.cpp:
(WebCore::VideoFrameGStreamer::createWrappedSample):
(WebCore::VideoFrameGStreamer::clone):
* Source/WebCore/platform/graphics/gstreamer/VideoFrameGStreamer.h:
* Source/WebCore/platform/mediastream/gstreamer/GStreamerVideoCapturer.cpp:
(WebCore::GStreamerVideoCapturer::setSinkVideoFrameCallback):

Canonical link: https://commits.webkit.org/273744@main
  • Loading branch information
philn committed Jan 30, 2024
1 parent acc1e85 commit b4461f4
Show file tree
Hide file tree
Showing 5 changed files with 11 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class ImageDecoderGStreamerSample final : public MediaSampleGStreamer {
ImageDecoderGStreamerSample(GRefPtr<GstSample>&& sample, const FloatSize& presentationSize)
: MediaSampleGStreamer(WTFMove(sample), presentationSize, { })
{
m_frame = VideoFrameGStreamer::createWrappedSample(platformSample().sample.gstSample, MediaTime::invalidTime());
m_frame = VideoFrameGStreamer::createWrappedSample(platformSample().sample.gstSample);
m_image = m_frame->convertToImage();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3971,8 +3971,7 @@ void MediaPlayerPrivateGStreamer::paint(GraphicsContext& context, const FloatRec
if (!GST_IS_SAMPLE(m_sample.get()))
return;

auto* buffer = gst_sample_get_buffer(m_sample.get());
auto frame = VideoFrameGStreamer::createWrappedSample(m_sample, fromGstClockTime(GST_BUFFER_PTS(buffer)));
auto frame = VideoFrameGStreamer::createWrappedSample(m_sample);
frame->paintInContext(context, rect, m_videoSourceOrientation, false);
}

Expand All @@ -3988,8 +3987,7 @@ RefPtr<VideoFrame> MediaPlayerPrivateGStreamer::videoFrameForCurrentTime()
if (!GST_IS_SAMPLE(m_sample.get()))
return nullptr;

auto* buffer = gst_sample_get_buffer(m_sample.get());
auto frame = VideoFrameGStreamer::createWrappedSample(m_sample.get(), fromGstClockTime(GST_BUFFER_PTS(buffer)));
auto frame = VideoFrameGStreamer::createWrappedSample(m_sample);
auto convertedSample = frame->downloadSample(GST_VIDEO_FORMAT_BGRA);
if (!convertedSample)
return nullptr;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,10 @@ Ref<VideoFrameGStreamer> VideoFrameGStreamer::createWrappedSample(const GRefPtr<
auto presentationSize = getVideoResolutionFromCaps(caps);
RELEASE_ASSERT(presentationSize);
auto colorSpace = videoColorSpaceFromCaps(caps);
return adoptRef(*new VideoFrameGStreamer(sample, *presentationSize, presentationTime, videoRotation, WTFMove(colorSpace)));
MediaTime timeStamp = presentationTime;
if (presentationTime.isInvalid())
timeStamp = fromGstClockTime(GST_BUFFER_PTS(gst_sample_get_buffer(sample.get())));
return adoptRef(*new VideoFrameGStreamer(sample, *presentationSize, timeStamp, videoRotation, WTFMove(colorSpace)));
}

RefPtr<VideoFrameGStreamer> VideoFrameGStreamer::createFromPixelBuffer(Ref<PixelBuffer>&& pixelBuffer, CanvasContentType canvasContentType, Rotation videoRotation, const MediaTime& presentationTime, const IntSize& destinationSize, double frameRate, bool videoMirrored, std::optional<VideoFrameTimeMetadata>&& metadata, PlatformVideoColorSpace&& colorSpace)
Expand Down Expand Up @@ -516,7 +519,7 @@ RefPtr<ImageGStreamer> VideoFrameGStreamer::convertToImage()

Ref<VideoFrame> VideoFrameGStreamer::clone()
{
return createWrappedSample(m_sample, presentationTime());
return createWrappedSample(m_sample, presentationTime(), rotation());
}

#undef GST_CAT_DEFAULT
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class VideoFrameGStreamer final : public VideoFrame {

static Ref<VideoFrameGStreamer> create(GRefPtr<GstSample>&&, const FloatSize& presentationSize, const MediaTime& presentationTime = MediaTime::invalidTime(), Rotation videoRotation = Rotation::None, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& = std::nullopt, std::optional<PlatformVideoColorSpace>&& = std::nullopt);

static Ref<VideoFrameGStreamer> createWrappedSample(const GRefPtr<GstSample>&, const MediaTime& presentationTime, Rotation videoRotation = Rotation::None);
static Ref<VideoFrameGStreamer> createWrappedSample(const GRefPtr<GstSample>&, const MediaTime& presentationTime = MediaTime::invalidTime(), Rotation videoRotation = Rotation::None);

static RefPtr<VideoFrameGStreamer> createFromPixelBuffer(Ref<PixelBuffer>&&, CanvasContentType canvasContentType, Rotation videoRotation = VideoFrame::Rotation::None, const MediaTime& presentationTime = MediaTime::invalidTime(), const IntSize& destinationSize = { }, double frameRate = 1, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& metadata = std::nullopt, PlatformVideoColorSpace&& = { });

Expand All @@ -63,7 +63,7 @@ class VideoFrameGStreamer final : public VideoFrame {

private:
VideoFrameGStreamer(GRefPtr<GstSample>&&, const FloatSize& presentationSize, const MediaTime& presentationTime = MediaTime::invalidTime(), Rotation = Rotation::None, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& = std::nullopt, PlatformVideoColorSpace&& = { });
VideoFrameGStreamer(const GRefPtr<GstSample>&, const FloatSize& presentationSize, const MediaTime& presentationTime, Rotation = Rotation::None, PlatformVideoColorSpace&& = { });
VideoFrameGStreamer(const GRefPtr<GstSample>&, const FloatSize& presentationSize, const MediaTime& presentationTime = MediaTime::invalidTime(), Rotation = Rotation::None, PlatformVideoColorSpace&& = { });

bool isGStreamer() const final { return true; }
Ref<VideoFrame> clone() final;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,7 @@ void GStreamerVideoCapturer::setSinkVideoFrameCallback(SinkVideoFrameCallback&&
m_sinkVideoFrameCallback.second = WTFMove(callback);
m_sinkVideoFrameCallback.first = g_signal_connect_swapped(sink(), "new-sample", G_CALLBACK(+[](GStreamerVideoCapturer* capturer, GstElement* sink) -> GstFlowReturn {
auto gstSample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
auto presentationTime = fromGstClockTime(GST_BUFFER_PTS(gst_sample_get_buffer(gstSample.get())));
capturer->m_sinkVideoFrameCallback.second(VideoFrameGStreamer::createWrappedSample(gstSample, presentationTime));
capturer->m_sinkVideoFrameCallback.second(VideoFrameGStreamer::createWrappedSample(gstSample));
return GST_FLOW_OK;
}), this);
}
Expand Down

0 comments on commit b4461f4

Please sign in to comment.