Skip to content

Commit

Permalink
[GStreamer] Update minimum required version to 1.18.4
Browse files Browse the repository at this point in the history
https://bugs.webkit.org/show_bug.cgi?id=255334

Reviewed by Xabier Rodriguez-Calvar.

This is the version shipping in Debian 11 (bullseye). Ubuntu 20.04 goes out of support on April
21st, so we can bump the required version.

* Source/WebCore/platform/audio/gstreamer/AudioSourceProviderGStreamer.cpp:
(WebCore::AudioSourceProviderGStreamer::AudioSourceProviderGStreamer):
* Source/WebCore/platform/graphics/gstreamer/GLVideoSinkGStreamer.cpp:
(webKitGLVideoSinkGetProperty):
* Source/WebCore/platform/graphics/gstreamer/GStreamerAudioMixer.cpp:
(WebCore::GStreamerAudioMixer::isAvailable):
* Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp:
(WebCore::videoColorSpaceFromInfo):
(WebCore::fillVideoInfoColorimetryFromColorSpace):
(WebCore::webkitGstVideoFormatInfoComponent): Deleted.
(WebCore::webkitGstElementGetCurrentRunningTime): Deleted.
* Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h:
* Source/WebCore/platform/graphics/gstreamer/GStreamerRegistryScanner.cpp:
(WebCore::GStreamerRegistryScanner::isAVC1CodecSupported const):
* Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp:
(WebCore::MediaPlayerPrivateGStreamer::createVideoSink):
(WebCore::MediaPlayerPrivateGStreamer::updateVideoSinkStatistics):
* Source/WebCore/platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h:
* Source/cmake/GStreamerChecks.cmake:

Canonical link: https://commits.webkit.org/263218@main
  • Loading branch information
philn committed Apr 21, 2023
1 parent b29168a commit bca3dd7
Show file tree
Hide file tree
Showing 9 changed files with 17 additions and 127 deletions.
Expand Up @@ -122,10 +122,7 @@ AudioSourceProviderGStreamer::AudioSourceProviderGStreamer(MediaStreamTrackPriva
g_signal_connect_swapped(decodebin, "pad-added", G_CALLBACK(+[](AudioSourceProviderGStreamer* provider, GstPad* pad) {
auto padCaps = adoptGRef(gst_pad_query_caps(pad, nullptr));
bool isAudio = doCapsHaveType(padCaps.get(), "audio");
if (webkitGstCheckVersion(1, 18, 0))
RELEASE_ASSERT(isAudio);
else if (!isAudio)
return;
RELEASE_ASSERT(isAudio);

auto sinkPad = adoptGRef(gst_element_get_static_pad(provider->m_audioSinkBin.get(), "sink"));
gst_pad_link(pad, sinkPad.get());
Expand Down
Expand Up @@ -186,13 +186,12 @@ static void webKitGLVideoSinkGetProperty(GObject* object, guint propertyId, GVal
WebKitGLVideoSink* sink = WEBKIT_GL_VIDEO_SINK(object);

switch (propertyId) {
case PROP_STATS:
if (webkitGstCheckVersion(1, 18, 0)) {
GUniqueOutPtr<GstStructure> stats;
g_object_get(sink->priv->appSink.get(), "stats", &stats.outPtr(), nullptr);
gst_value_set_structure(value, stats.get());
}
case PROP_STATS: {
GUniqueOutPtr<GstStructure> stats;
g_object_get(sink->priv->appSink.get(), "stats", &stats.outPtr(), nullptr);
gst_value_set_structure(value, stats.get());
break;
}
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, paramSpec);
RELEASE_ASSERT_NOT_REACHED();
Expand Down
Expand Up @@ -32,7 +32,7 @@ GST_DEBUG_CATEGORY_STATIC(webkit_media_gst_audio_mixer_debug);

bool GStreamerAudioMixer::isAvailable()
{
return webkitGstCheckVersion(1, 18, 0) && isGStreamerPluginAvailable("inter") && isGStreamerPluginAvailable("audiomixer");
return isGStreamerPluginAvailable("inter") && isGStreamerPluginAvailable("audiomixer");
}

GStreamerAudioMixer& GStreamerAudioMixer::singleton()
Expand Down
52 changes: 0 additions & 52 deletions Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.cpp
Expand Up @@ -98,24 +98,6 @@ GstPad* webkitGstGhostPadFromStaticTemplate(GstStaticPadTemplate* staticPadTempl
return pad;
}

#if !GST_CHECK_VERSION(1, 18, 0)
void webkitGstVideoFormatInfoComponent(const GstVideoFormatInfo* info, guint plane, gint components[GST_VIDEO_MAX_COMPONENTS])
{
guint c, i = 0;

/* Reverse mapping of info->plane. */
for (c = 0; c < GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info); c++) {
if (GST_VIDEO_FORMAT_INFO_PLANE(info, c) == plane) {
components[i] = c;
i++;
}
}

for (c = i; c < GST_VIDEO_MAX_COMPONENTS; c++)
components[c] = -1;
}
#endif

#if ENABLE(VIDEO)
bool getVideoSizeAndFormatFromCaps(const GstCaps* caps, WebCore::IntSize& size, GstVideoFormat& format, int& pixelAspectRatioNumerator, int& pixelAspectRatioDenominator, int& stride)
{
Expand Down Expand Up @@ -727,36 +709,6 @@ String gstStructureToJSONString(const GstStructure* structure)
return value->toJSONString();
}

#if !GST_CHECK_VERSION(1, 18, 0)
GstClockTime webkitGstElementGetCurrentRunningTime(GstElement* element)
{
g_return_val_if_fail(GST_IS_ELEMENT(element), GST_CLOCK_TIME_NONE);

auto baseTime = gst_element_get_base_time(element);
if (!GST_CLOCK_TIME_IS_VALID(baseTime)) {
GST_DEBUG_OBJECT(element, "Could not determine base time");
return GST_CLOCK_TIME_NONE;
}

auto clock = adoptGRef(gst_element_get_clock(element));
if (!clock) {
GST_DEBUG_OBJECT(element, "Element has no clock");
return GST_CLOCK_TIME_NONE;
}

auto clockTime = gst_clock_get_time(clock.get());
if (!GST_CLOCK_TIME_IS_VALID(clockTime))
return GST_CLOCK_TIME_NONE;

if (clockTime < baseTime) {
GST_DEBUG_OBJECT(element, "Got negative current running time");
return GST_CLOCK_TIME_NONE;
}

return clockTime - baseTime;
}
#endif

GstClockTime webkitGstInitTime()
{
return s_webkitGstInitTime;
Expand Down Expand Up @@ -814,7 +766,6 @@ PlatformVideoColorSpace videoColorSpaceFromInfo(const GstVideoInfo& info)
case GST_VIDEO_TRANSFER_BT709:
colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt709;
break;
#if GST_CHECK_VERSION(1, 18, 0)
case GST_VIDEO_TRANSFER_BT601:
colorSpace.transfer = PlatformVideoTransferCharacteristics::Smpte170m;
break;
Expand All @@ -827,7 +778,6 @@ PlatformVideoColorSpace videoColorSpaceFromInfo(const GstVideoInfo& info)
case GST_VIDEO_TRANSFER_BT2020_10:
colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt2020_10bit;
break;
#endif
case GST_VIDEO_TRANSFER_BT2020_12:
colorSpace.transfer = PlatformVideoTransferCharacteristics::Bt2020_12bit;
break;
Expand Down Expand Up @@ -946,7 +896,6 @@ void fillVideoInfoColorimetryFromColorSpace(GstVideoInfo* info, const PlatformVi
case PlatformVideoTransferCharacteristics::Bt709:
GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT709;
break;
#if GST_CHECK_VERSION(1, 18, 0)
case PlatformVideoTransferCharacteristics::Smpte170m:
GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT601;
break;
Expand All @@ -959,7 +908,6 @@ void fillVideoInfoColorimetryFromColorSpace(GstVideoInfo* info, const PlatformVi
case PlatformVideoTransferCharacteristics::Bt2020_10bit:
GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT2020_10;
break;
#endif
case PlatformVideoTransferCharacteristics::Bt2020_12bit:
GST_VIDEO_INFO_COLORIMETRY(info).transfer = GST_VIDEO_TRANSFER_BT2020_12;
break;
Expand Down
16 changes: 0 additions & 16 deletions Source/WebCore/platform/graphics/gstreamer/GStreamerCommon.h
Expand Up @@ -56,15 +56,6 @@ inline bool webkitGstCheckVersion(guint major, guint minor, guint micro)
return true;
}

// gst_video_format_info_component() is GStreamer 1.18 API, so for older versions we use a local
// vendored copy of the function.
#if !GST_CHECK_VERSION(1, 18, 0)
#define GST_VIDEO_MAX_COMPONENTS 4
void webkitGstVideoFormatInfoComponent(const GstVideoFormatInfo*, guint, gint components[GST_VIDEO_MAX_COMPONENTS]);

#define gst_video_format_info_component webkitGstVideoFormatInfoComponent
#endif

#define GST_VIDEO_CAPS_TYPE_PREFIX "video/"
#define GST_AUDIO_CAPS_TYPE_PREFIX "audio/"
#define GST_TEXT_CAPS_TYPE_PREFIX "text/"
Expand Down Expand Up @@ -332,13 +323,6 @@ GstElement* makeGStreamerBin(const char* description, bool ghostUnlinkedPads);

String gstStructureToJSONString(const GstStructure*);

// gst_element_get_current_running_time() is GStreamer 1.18 API, so for older versions we use a local
// vendored copy of the function.
#if !GST_CHECK_VERSION(1, 18, 0)
GstClockTime webkitGstElementGetCurrentRunningTime(GstElement*);
#define gst_element_get_current_running_time webkitGstElementGetCurrentRunningTime
#endif

GstClockTime webkitGstInitTime();

PlatformVideoColorSpace videoColorSpaceFromCaps(const GstCaps*);
Expand Down
Expand Up @@ -807,13 +807,8 @@ GStreamerRegistryScanner::CodecLookupResult GStreamerRegistryScanner::isAVC1Code
return checkH264Caps(makeString("video/x-h264, level=(string)", maxLevelString).utf8().data());
}

if (webkitGstCheckVersion(1, 18, 0)) {
GST_DEBUG("Checking video decoders for constrained caps");
return checkH264Caps(makeString("video/x-h264, level=(string)", level, ", profile=(string)", profile).utf8().data());
}

GST_DEBUG("Falling back to unconstrained caps");
return checkH264Caps("video/x-h264");
GST_DEBUG("Checking video decoders for constrained caps");
return checkH264Caps(makeString("video/x-h264, level=(string)", level, ", profile=(string)", profile).utf8().data());
}

const char* GStreamerRegistryScanner::configurationNameForLogging(Configuration configuration) const
Expand Down
Expand Up @@ -3990,29 +3990,7 @@ GstElement* MediaPlayerPrivateGStreamer::createVideoSink()
g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
}

GstElement* videoSink = nullptr;
if (!webkitGstCheckVersion(1, 18, 0)) {
m_fpsSink = makeGStreamerElement("fpsdisplaysink", "sink");
if (m_fpsSink) {
g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);

// Turn off text overlay unless tracing is enabled.
if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);

if (gstObjectHasProperty(m_fpsSink.get(), "video-sink")) {
g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
videoSink = m_fpsSink.get();
} else
m_fpsSink = nullptr;
}
}

if (!m_fpsSink)
videoSink = m_videoSink.get();

ASSERT(videoSink);
return videoSink;
return m_videoSink.get();
}

void MediaPlayerPrivateGStreamer::setStreamVolumeElement(GstStreamVolume* volume)
Expand All @@ -4038,26 +4016,16 @@ void MediaPlayerPrivateGStreamer::setStreamVolumeElement(GstStreamVolume* volume

bool MediaPlayerPrivateGStreamer::updateVideoSinkStatistics()
{
if (!webkitGstCheckVersion(1, 18, 0) && !m_fpsSink)
return false;

uint64_t totalVideoFrames = 0;
uint64_t droppedVideoFrames = 0;
if (webkitGstCheckVersion(1, 18, 0)) {
GUniqueOutPtr<GstStructure> stats;
g_object_get(m_videoSink.get(), "stats", &stats.outPtr(), nullptr);
GUniqueOutPtr<GstStructure> stats;
g_object_get(m_videoSink.get(), "stats", &stats.outPtr(), nullptr);

if (!gst_structure_get_uint64(stats.get(), "rendered", &totalVideoFrames))
return false;
if (!gst_structure_get_uint64(stats.get(), "rendered", &totalVideoFrames))
return false;

if (!gst_structure_get_uint64(stats.get(), "dropped", &droppedVideoFrames))
return false;
} else if (m_fpsSink) {
unsigned renderedFrames, droppedFrames;
g_object_get(m_fpsSink.get(), "frames-rendered", &renderedFrames, "frames-dropped", &droppedFrames, nullptr);
totalVideoFrames = renderedFrames;
droppedVideoFrames = droppedFrames;
}
if (!gst_structure_get_uint64(stats.get(), "dropped", &droppedVideoFrames))
return false;

// Caching is required so that metrics queries performed after EOS still return valid values.
if (totalVideoFrames)
Expand Down
Expand Up @@ -580,7 +580,6 @@ class MediaPlayerPrivateGStreamer : public MediaPlayerPrivateInterface
uint64_t m_networkReadPosition { 0 };
mutable uint64_t m_readPositionAtLastDidLoadingProgress { 0 };

GRefPtr<GstElement> m_fpsSink { nullptr };
uint64_t m_totalVideoFrames { 0 };
uint64_t m_droppedVideoFrames { 0 };
uint64_t m_decodedVideoFrames { 0 };
Expand Down
2 changes: 1 addition & 1 deletion Source/cmake/GStreamerChecks.cmake
Expand Up @@ -36,7 +36,7 @@ if (ENABLE_VIDEO OR ENABLE_WEB_AUDIO)
list(APPEND GSTREAMER_COMPONENTS webrtc)
endif ()

find_package(GStreamer 1.16.2 REQUIRED COMPONENTS ${GSTREAMER_COMPONENTS})
find_package(GStreamer 1.18.4 REQUIRED COMPONENTS ${GSTREAMER_COMPONENTS})

if (ENABLE_WEB_AUDIO)
if (NOT PC_GSTREAMER_AUDIO_FOUND OR NOT PC_GSTREAMER_FFT_FOUND)
Expand Down

0 comments on commit bca3dd7

Please sign in to comment.