Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions LayoutTests/platform/glib/TestExpectations
Original file line number Diff line number Diff line change
Expand Up @@ -870,8 +870,6 @@ webkit.org/b/230028 media/media-source/media-source-seek-twice.html [ Pass Timeo

# rvfc related failures
imported/w3c/web-platform-tests/video-rvfc [ Pass ]
fast/mediastream/getUserMedia-rvfc.html [ Pass Failure ]
webrtc/peerConnection-rvfc.html [ Failure ]
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-webrtc.https.html [ Skip ]
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-before-xr-session.https.html [ Skip ]
imported/w3c/web-platform-tests/video-rvfc/request-video-frame-callback-during-xr-session.https.html [ Skip ]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ uint64_t toGstUnsigned64Time(const MediaTime&);
bool isThunderRanked();
#endif

inline GstClockTime toGstClockTime(const MediaTime &mediaTime)
inline GstClockTime toGstClockTime(const MediaTime& mediaTime)
{
if (mediaTime.isInvalid())
return GST_CLOCK_TIME_NONE;
Expand All @@ -96,6 +96,11 @@ inline GstClockTime toGstClockTime(const MediaTime &mediaTime)
return static_cast<GstClockTime>(toGstUnsigned64Time(mediaTime));
}

inline GstClockTime toGstClockTime(const Seconds& seconds)
{
return toGstClockTime(MediaTime::createWithDouble(seconds.seconds()));
}

inline MediaTime fromGstClockTime(GstClockTime time)
{
if (!GST_CLOCK_TIME_IS_VALID(time))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,13 @@

namespace WebCore {

static inline void setBufferFields(GstBuffer* buffer, const MediaTime& presentationTime, double frameRate)
{
GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_LIVE);
GST_BUFFER_DTS(buffer) = GST_BUFFER_PTS(buffer) = toGstClockTime(presentationTime);
GST_BUFFER_DURATION(buffer) = toGstClockTime(1_s / frameRate);
}

Ref<VideoFrameGStreamer> VideoFrameGStreamer::createFromPixelBuffer(Ref<PixelBuffer>&& pixelBuffer, CanvasContentType canvasContentType, Rotation videoRotation, const MediaTime& presentationTime, const IntSize& destinationSize, double frameRate, bool videoMirrored, std::optional<VideoFrameTimeMetadata>&& metadata)
{
ensureGStreamerInitialized();
Expand Down Expand Up @@ -57,8 +64,6 @@ Ref<VideoFrameGStreamer> VideoFrameGStreamer::createFromPixelBuffer(Ref<PixelBuf
break;
}
const char* formatName = gst_video_format_to_string(format);
gst_buffer_add_video_meta(buffer.get(), GST_VIDEO_FRAME_FLAG_NONE, format, width, height);

int frameRateNumerator, frameRateDenominator;
gst_util_double_to_fraction(frameRate, &frameRateNumerator, &frameRateDenominator);

Expand All @@ -67,19 +72,10 @@ Ref<VideoFrameGStreamer> VideoFrameGStreamer::createFromPixelBuffer(Ref<PixelBuf

GRefPtr<GstSample> sample;

auto setBufferFields = [&](GstBuffer* buffer) {
GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_LIVE);

GST_BUFFER_DTS(buffer) = GST_BUFFER_PTS(buffer) = toGstClockTime(presentationTime);

auto duration = 1_s / frameRate;
GST_BUFFER_DURATION(buffer) = toGstClockTime(MediaTime::createWithDouble(duration.seconds()));
};

// Optionally resize the video frame to fit destinationSize. This code path is used mostly by
// the mock realtime video source when the gUM constraints specifically required exact width
// and/or height values.
if (!destinationSize.isZero()) {
if (!destinationSize.isZero() && size != destinationSize) {
GstVideoInfo inputInfo;
gst_video_info_from_caps(&inputInfo, caps.get());

Expand All @@ -97,18 +93,17 @@ Ref<VideoFrameGStreamer> VideoFrameGStreamer::createFromPixelBuffer(Ref<PixelBuf
GstMappedFrame outputFrame(outputBuffer.get(), outputInfo, GST_MAP_WRITE);
gst_video_converter_frame(converter.get(), inputFrame.get(), outputFrame.get());
}
gst_buffer_add_video_meta(outputBuffer.get(), GST_VIDEO_FRAME_FLAG_NONE, format, width, height);

if (metadata)
webkitGstBufferSetVideoFrameTimeMetadata(outputBuffer.get(), *metadata);

setBufferFields(outputBuffer.get());
setBufferFields(outputBuffer.get(), presentationTime, frameRate);
sample = adoptGRef(gst_sample_new(outputBuffer.get(), outputCaps.get(), nullptr, nullptr));
} else {
if (metadata)
buffer = webkitGstBufferSetVideoFrameTimeMetadata(buffer.get(), *metadata);

setBufferFields(buffer.get());
setBufferFields(buffer.get(), presentationTime, frameRate);
sample = adoptGRef(gst_sample_new(buffer.get(), caps.get(), nullptr, nullptr));
}

Expand All @@ -134,6 +129,45 @@ VideoFrameGStreamer::VideoFrameGStreamer(const GRefPtr<GstSample>& sample, const
{
}

Ref<VideoFrameGStreamer> VideoFrameGStreamer::resizeTo(const IntSize& destinationSize)
{
auto* caps = gst_sample_get_caps(m_sample.get());

const auto* structure = gst_caps_get_structure(caps, 0);
int frameRateNumerator, frameRateDenominator;
if (!gst_structure_get_fraction(structure, "framerate", &frameRateNumerator, &frameRateDenominator)) {
frameRateNumerator = 1;
frameRateDenominator = 1;
}

GstVideoInfo inputInfo;
gst_video_info_from_caps(&inputInfo, caps);
auto format = GST_VIDEO_INFO_FORMAT(&inputInfo);
auto width = destinationSize.width();
auto height = destinationSize.height();
auto outputCaps = adoptGRef(gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(format), "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, "framerate", GST_TYPE_FRACTION, frameRateNumerator, frameRateDenominator, nullptr));
GstVideoInfo outputInfo;
gst_video_info_from_caps(&outputInfo, outputCaps.get());

auto* buffer = gst_sample_get_buffer(m_sample.get());
auto outputBuffer = adoptGRef(gst_buffer_new_allocate(nullptr, GST_VIDEO_INFO_SIZE(&outputInfo), nullptr));
{
GUniquePtr<GstVideoConverter> converter(gst_video_converter_new(&inputInfo, &outputInfo, nullptr));
GstMappedFrame inputFrame(buffer, inputInfo, GST_MAP_READ);
GstMappedFrame outputFrame(outputBuffer.get(), outputInfo, GST_MAP_WRITE);
gst_video_converter_frame(converter.get(), inputFrame.get(), outputFrame.get());
}

double frameRate;
gst_util_fraction_to_double(frameRateNumerator, frameRateDenominator, &frameRate);

auto presentationTime = this->presentationTime();
setBufferFields(outputBuffer.get(), presentationTime, frameRate);
auto sample = adoptGRef(gst_sample_new(outputBuffer.get(), outputCaps.get(), nullptr, nullptr));
return adoptRef(*new VideoFrameGStreamer(WTFMove(sample), presentationTime, rotation()));
}

RefPtr<JSC::Uint8ClampedArray> VideoFrameGStreamer::computeRGBAImageData() const
{
auto* caps = gst_sample_get_caps(m_sample.get());
Expand All @@ -159,7 +193,6 @@ RefPtr<JSC::Uint8ClampedArray> VideoFrameGStreamer::computeRGBAImageData() const
unsigned byteLength = GST_VIDEO_INFO_SIZE(&inputInfo);
auto bufferStorage = JSC::ArrayBuffer::create(width * height, 4);
auto outputBuffer = adoptGRef(gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_NO_SHARE, bufferStorage->data(), byteLength, 0, byteLength, nullptr, [](gpointer) { }));
gst_buffer_add_video_meta(outputBuffer.get(), GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_RGBA, width, height);
GstMappedFrame outputFrame(outputBuffer.get(), outputInfo, GST_MAP_WRITE);

GUniquePtr<GstVideoConverter> converter(gst_video_converter_new(&inputInfo, &outputInfo, nullptr));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ typedef struct _GstSample GstSample;
namespace WebCore {

class PixelBuffer;
class IntSize;

class VideoFrameGStreamer final : public VideoFrame {
public:
Expand All @@ -51,6 +52,9 @@ class VideoFrameGStreamer final : public VideoFrame {

static Ref<VideoFrameGStreamer> createFromPixelBuffer(Ref<PixelBuffer>&&, CanvasContentType canvasContentType, Rotation videoRotation, const MediaTime& presentationTime = MediaTime::invalidTime(), const IntSize& destinationSize = { }, double frameRate = 1, bool videoMirrored = false, std::optional<VideoFrameTimeMetadata>&& metadata = std::nullopt);


Ref<VideoFrameGStreamer> resizeTo(const IntSize&);

GstSample* sample() const { return m_sample.get(); }
RefPtr<JSC::Uint8ClampedArray> computeRGBAImageData() const;

Expand Down
15 changes: 12 additions & 3 deletions Source/WebCore/platform/mediastream/RealtimeVideoSource.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@
#include "VideoFrameCV.h"
#endif

#if USE(GSTREAMER)
#include "VideoFrameGStreamer.h"
#endif

namespace WebCore {

RealtimeVideoSource::RealtimeVideoSource(Ref<RealtimeVideoCaptureSource>&& source, bool shouldUseIOSurface)
Expand Down Expand Up @@ -171,9 +175,9 @@ void RealtimeVideoSource::sourceStopped()
});
}

#if PLATFORM(COCOA)
RefPtr<VideoFrame> RealtimeVideoSource::adaptVideoFrame(VideoFrame& videoFrame)
{
#if PLATFORM(COCOA)
if (!m_imageTransferSession || m_imageTransferSession->pixelFormat() != videoFrame.pixelFormat())
m_imageTransferSession = ImageTransferSessionVT::create(videoFrame.pixelFormat(), m_shouldUseIOSurface);

Expand All @@ -182,11 +186,16 @@ RefPtr<VideoFrame> RealtimeVideoSource::adaptVideoFrame(VideoFrame& videoFrame)
return nullptr;

auto newVideoFrame = m_imageTransferSession->convertVideoFrame(videoFrame, size());
#elif USE(GSTREAMER)
auto newVideoFrame = reinterpret_cast<VideoFrameGStreamer&>(videoFrame).resizeTo(size());
#else
notImplemented();
return nullptr;
#endif
ASSERT(newVideoFrame);

return newVideoFrame;
}
#endif

void RealtimeVideoSource::videoFrameAvailable(VideoFrame& videoFrame, VideoFrameTimeMetadata metadata)
{
Expand All @@ -198,7 +207,7 @@ void RealtimeVideoSource::videoFrameAvailable(VideoFrame& videoFrame, VideoFrame
if (!m_frameDecimation)
m_frameDecimation = 1;

#if PLATFORM(COCOA)
#if PLATFORM(COCOA) || USE(GSTREAMER)
auto size = this->size();
if (!size.isEmpty() && size != expandedIntSize(videoFrame.presentationSize())) {
if (auto newVideoFrame = adaptVideoFrame(videoFrame)) {
Expand Down
2 changes: 0 additions & 2 deletions Source/WebCore/platform/mediastream/RealtimeVideoSource.h
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,7 @@ class RealtimeVideoSource final
// RealtimeMediaSource::VideoFrameObserver
void videoFrameAvailable(VideoFrame&, VideoFrameTimeMetadata) final;

#if PLATFORM(COCOA)
RefPtr<VideoFrame> adaptVideoFrame(VideoFrame&);
#endif

#if !RELEASE_LOG_DISABLED
void setLogger(const Logger&, const void*) final;
Expand Down