Skip to content
Permalink
Browse files
[GStreamer] Fill in client-name property on audio sinks
https://bugs.webkit.org/show_bug.cgi?id=233826

Patch by Philippe Normand <pnormand@igalia.com> on 2021-12-08
Reviewed by Xabier Rodriguez-Calvar.

The audio sink client-name property is now set on sinks that support this property, such as
pulsesink and pipewiresink. This is picked-up by GNOME settings and the corresponding audio
streams now appear as owned by the WebKit application (MiniBrowser, ephy, etc) instead of
"WebKitWebProcess".

When the audio mixer is enabled, no media.role property is set, because the mixer can accept
music and/or video roles simultaneously, so it doesn't make sense in this context.

* platform/audio/gstreamer/AudioDestinationGStreamer.cpp:
(WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer): Drive-by refactoring, using
a lambda instead of a static callback function.
(WebCore::autoAudioSinkChildAddedCallback): Deleted.
* platform/graphics/gstreamer/GStreamerAudioMixer.cpp:
(WebCore::GStreamerAudioMixer::GStreamerAudioMixer):
* platform/graphics/gstreamer/GStreamerCommon.cpp:
(WebCore::createAutoAudioSink):
(WebCore::createPlatformAudioSink):
* platform/graphics/gstreamer/GStreamerCommon.h:
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp:
(WebCore::MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer):
(WebCore::MediaPlayerPrivateGStreamer::createAudioSink):
(WebCore::MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback): Deleted.
(WebCore::MediaPlayerPrivateGStreamer::setAudioStreamProperties): Deleted.
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h:


Canonical link: https://commits.webkit.org/244982@main
git-svn-id: https://svn.webkit.org/repository/webkit/trunk@286676 268f45cc-cd09-0410-ab3c-d52691b4dbfc
  • Loading branch information
philn authored and webkit-commit-queue committed Dec 8, 2021
1 parent 0d86c17 commit abdbc9bb85c56aaa914a59d6244ff1721b3e25ab
Showing 7 changed files with 65 additions and 40 deletions.
@@ -1,3 +1,35 @@
2021-12-08 Philippe Normand <pnormand@igalia.com>

[GStreamer] Fill in client-name property on audio sinks
https://bugs.webkit.org/show_bug.cgi?id=233826

Reviewed by Xabier Rodriguez-Calvar.

The audio sink client-name property is now set on sinks that support this property, such as
pulsesink and pipewiresink. This is picked-up by GNOME settings and the corresponding audio
streams now appear as owned by the WebKit application (MiniBrowser, ephy, etc) instead of
"WebKitWebProcess".

When the audio mixer is enabled, no media.role property is set, because the mixer can accept
music and/or video roles simultaneously, so it doesn't make sense in this context.

* platform/audio/gstreamer/AudioDestinationGStreamer.cpp:
(WebCore::AudioDestinationGStreamer::AudioDestinationGStreamer): Drive-by refactoring, using
a lambda instead of a static callback function.
(WebCore::autoAudioSinkChildAddedCallback): Deleted.
* platform/graphics/gstreamer/GStreamerAudioMixer.cpp:
(WebCore::GStreamerAudioMixer::GStreamerAudioMixer):
* platform/graphics/gstreamer/GStreamerCommon.cpp:
(WebCore::createAutoAudioSink):
(WebCore::createPlatformAudioSink):
* platform/graphics/gstreamer/GStreamerCommon.h:
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.cpp:
(WebCore::MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer):
(WebCore::MediaPlayerPrivateGStreamer::createAudioSink):
(WebCore::MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback): Deleted.
(WebCore::MediaPlayerPrivateGStreamer::setAudioStreamProperties): Deleted.
* platform/graphics/gstreamer/MediaPlayerPrivateGStreamer.h:

2021-12-08 Rob Buis <rbuis@igalia.com>

[css-contain] Prevent various kinds of propagation to RenderView
@@ -92,12 +92,6 @@ gboolean messageCallback(GstBus*, GstMessage* message, AudioDestinationGStreamer
return destination->handleMessage(message);
}

static void autoAudioSinkChildAddedCallback(GstChildProxy*, GObject* object, gchar*, gpointer)
{
if (GST_IS_AUDIO_BASE_SINK(object))
g_object_set(GST_AUDIO_BASE_SINK(object), "buffer-time", static_cast<gint64>(100000), nullptr);
}

Ref<AudioDestination> AudioDestination::create(AudioIOCallback& callback, const String&, unsigned numberOfInputChannels, unsigned numberOfOutputChannels, float sampleRate)
{
initializeDebugCategory();
@@ -135,7 +129,7 @@ AudioDestinationGStreamer::AudioDestinationGStreamer(AudioIOCallback& callback,
m_src = GST_ELEMENT_CAST(g_object_new(WEBKIT_TYPE_WEB_AUDIO_SRC, "rate", sampleRate,
"bus", m_renderBus.get(), "destination", this, "frames", AudioUtilities::renderQuantumSize, nullptr));

GRefPtr<GstElement> audioSink = createPlatformAudioSink();
GRefPtr<GstElement> audioSink = createPlatformAudioSink("music");
m_audioSinkAvailable = audioSink;
if (!audioSink) {
GST_ERROR("Failed to create GStreamer audio sink element");
@@ -145,7 +139,10 @@ AudioDestinationGStreamer::AudioDestinationGStreamer(AudioIOCallback& callback,
// Probe platform early on for a working audio output device. This is not needed for the WebKit
// custom audio sink because it doesn't rely on autoaudiosink.
if (!WEBKIT_IS_AUDIO_SINK(audioSink.get())) {
g_signal_connect(audioSink.get(), "child-added", G_CALLBACK(autoAudioSinkChildAddedCallback), nullptr);
g_signal_connect(audioSink.get(), "child-added", G_CALLBACK(+[](GstChildProxy*, GObject* object, gchar*, gpointer) {
if (GST_IS_AUDIO_BASE_SINK(object))
g_object_set(GST_AUDIO_BASE_SINK(object), "buffer-time", static_cast<gint64>(100000), nullptr);
}), nullptr);

// Autoaudiosink does the real sink detection in the GST_STATE_NULL->READY transition
// so it's best to roll it to READY as soon as possible to ensure the underlying platform
@@ -48,7 +48,7 @@ GStreamerAudioMixer::GStreamerAudioMixer()
connectSimpleBusMessageCallback(m_pipeline.get());

m_mixer = makeGStreamerElement("audiomixer", nullptr);
GstElement* audioSink = makeGStreamerElement("autoaudiosink", nullptr);
auto* audioSink = createAutoAudioSink({ });

gst_bin_add_many(GST_BIN_CAST(m_pipeline.get()), m_mixer.get(), audioSink, nullptr);
gst_element_link(m_mixer.get(), audioSink);
@@ -23,8 +23,10 @@

#if USE(GSTREAMER)

#include "ApplicationGLib.h"
#include "GLVideoSinkGStreamer.h"
#include "GStreamerAudioMixer.h"
#include "GUniquePtrGStreamer.h"
#include "GstAllocatorFastMalloc.h"
#include "IntSize.h"
#include "RuntimeApplicationChecks.h"
@@ -440,7 +442,26 @@ bool gstElementFactoryEquals(GstElement* element, const char* name)
return equal(GST_OBJECT_NAME(gst_element_get_factory(element)), name);
}

GstElement* createPlatformAudioSink()
GstElement* createAutoAudioSink(const String& role)
{
auto* audioSink = makeGStreamerElement("autoaudiosink", nullptr);
g_signal_connect_data(audioSink, "child-added", G_CALLBACK(+[](GstChildProxy*, GObject* object, gchar*, gpointer userData) {
auto* role = reinterpret_cast<StringImpl*>(userData);
auto* objectClass = G_OBJECT_GET_CLASS(object);
if (role && g_object_class_find_property(objectClass, "stream-properties")) {
GUniquePtr<GstStructure> properties(gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role->utf8().data(), nullptr));
g_object_set(object, "stream-properties", properties.get(), nullptr);
GST_DEBUG("Set media.role as %s on %" GST_PTR_FORMAT, role->utf8().data(), GST_ELEMENT_CAST(object));
}
if (g_object_class_find_property(objectClass, "client-name"))
g_object_set(object, "client-name", getApplicationName(), nullptr);
}), role.isolatedCopy().releaseImpl().leakRef(), static_cast<GClosureNotify>([](gpointer userData, GClosure*) {
reinterpret_cast<StringImpl*>(userData)->deref();
}), static_cast<GConnectFlags>(0));
return audioSink;
}

GstElement* createPlatformAudioSink(const String& role)
{
GstElement* audioSink = webkitAudioSinkNew();
if (!audioSink) {
@@ -450,7 +471,7 @@ GstElement* createPlatformAudioSink()
// runtime requirements are not fullfilled.
// - the sink was created for the WPE port, audio mixing was not requested and no
// WPEBackend-FDO audio receiver has been registered at runtime.
audioSink = makeGStreamerElement("autoaudiosink", nullptr);
audioSink = createAutoAudioSink(role);
}

return audioSink;
@@ -306,7 +306,8 @@ enum class GstVideoDecoderPlatform { ImxVPU, Video4Linux, OpenMAX };
bool isGStreamerPluginAvailable(const char* name);
bool gstElementFactoryEquals(GstElement*, const char* name);

GstElement* createPlatformAudioSink();
GstElement* createAutoAudioSink(const String& role);
GstElement* createPlatformAudioSink(const String& role);

bool webkitGstSetElementStateSynchronously(GstElement*, GstState, Function<bool(GstMessage*)>&& = [](GstMessage*) -> bool {
return true;
@@ -200,10 +200,6 @@ MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
if (m_fillTimer.isActive())
m_fillTimer.stop();

auto* sink = audioSink();
if (sink && !WEBKIT_IS_AUDIO_SINK(sink))
g_signal_handlers_disconnect_by_func(G_OBJECT(sink), reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);

m_readyTimerHandler.stop();
for (auto& missingPluginCallback : m_missingPluginCallbacks) {
if (missingPluginCallback)
@@ -884,23 +880,6 @@ void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
}
}

void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
{
player->setAudioStreamProperties(object);
}

void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
{
if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
return;

const char* role = m_player->isVideoPlayer() ? "video" : "music";
GUniquePtr<GstStructure> properties(gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr));
g_object_set(object, "stream-properties", properties.get(), nullptr);
GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get());
}

void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
{
player->sourceSetup(sourceElement);
@@ -1249,14 +1228,12 @@ void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState networ

GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
{
GstElement* audioSink = createPlatformAudioSink();
const char* role = m_player->isVideoPlayer() ? "video" : "music";
GstElement* audioSink = createPlatformAudioSink(role);
RELEASE_ASSERT(audioSink);
if (!audioSink)
return nullptr;

if (!WEBKIT_IS_AUDIO_SINK(audioSink))
g_signal_connect_swapped(audioSink, "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);

#if ENABLE(WEB_AUDIO)
GstElement* audioSinkBin = gst_bin_new("audio-sink");
ensureAudioSourceProvider();
@@ -304,13 +304,10 @@ class MediaPlayerPrivateGStreamer : public MediaPlayerPrivateInterface
template <typename TrackPrivateType> void notifyPlayerOfTrack();

void ensureAudioSourceProvider();
void setAudioStreamProperties(GObject*);

virtual bool doSeek(const MediaTime& position, float rate, GstSeekFlags);
void invalidateCachedPosition() const;

static void setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer*, GObject*);

static void sourceSetupCallback(MediaPlayerPrivateGStreamer*, GstElement*);
static void videoChangedCallback(MediaPlayerPrivateGStreamer*);
static void audioChangedCallback(MediaPlayerPrivateGStreamer*);

0 comments on commit abdbc9b

Please sign in to comment.