2 * Copyright (C) 2011, 2012 Igalia S.L
3 * Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Lesser General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Lesser General Public License for more details.
15 * You should have received a copy of the GNU Lesser General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 #include "WebKitWebAudioSourceGStreamer.h"
24 #if ENABLE(WEB_AUDIO) && USE(GSTREAMER)
27 #include "AudioIOCallback.h"
28 #include "GRefPtrGStreamer.h"
29 #include "GStreamerUtilities.h"
30 #include <gst/app/gstappsrc.h>
31 #include <gst/audio/audio-info.h>
32 #include <gst/pbutils/missing-plugins.h>
33 #include <wtf/glib/GUniquePtr.h>
35 using namespace WebCore;
37 typedef struct _WebKitWebAudioSrcClass WebKitWebAudioSrcClass;
38 typedef struct _WebKitWebAudioSourcePrivate WebKitWebAudioSourcePrivate;
40 struct _WebKitWebAudioSrc {
43 WebKitWebAudioSourcePrivate* priv;
46 struct _WebKitWebAudioSrcClass {
47 GstBinClass parentClass;
50 #define WEBKIT_WEB_AUDIO_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_WEBAUDIO_SRC, WebKitWebAudioSourcePrivate))
51 struct _WebKitWebAudioSourcePrivate {
54 AudioIOCallback* provider;
58 GRefPtr<GstElement> interleave;
60 GRefPtr<GstTask> task;
63 // List of appsrc. One appsrc for each planar audio channel.
64 Vector<GRefPtr<GstElement>> sources;
66 // src pad of the element, interleaved wav data is pushed to it.
69 guint64 numberOfSamples;
71 GRefPtr<GstBufferPool> pool;
81 static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src",
84 GST_STATIC_CAPS(GST_AUDIO_CAPS_MAKE(GST_AUDIO_NE(F32))));
86 GST_DEBUG_CATEGORY_STATIC(webkit_web_audio_src_debug);
87 #define GST_CAT_DEFAULT webkit_web_audio_src_debug
89 static void webKitWebAudioSrcConstructed(GObject*);
90 static void webKitWebAudioSrcFinalize(GObject*);
91 static void webKitWebAudioSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
92 static void webKitWebAudioSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
93 static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement*, GstStateChange);
94 static void webKitWebAudioSrcLoop(WebKitWebAudioSrc*);
96 static GstCaps* getGStreamerMonoAudioCaps(float sampleRate)
98 return gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(sampleRate),
99 "channels", G_TYPE_INT, 1,
100 "format", G_TYPE_STRING, GST_AUDIO_NE(F32),
101 "layout", G_TYPE_STRING, "interleaved", nullptr);
104 static GstAudioChannelPosition webKitWebAudioGStreamerChannelPosition(int channelIndex)
106 GstAudioChannelPosition position = GST_AUDIO_CHANNEL_POSITION_NONE;
108 switch (channelIndex) {
109 case AudioBus::ChannelLeft:
110 position = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
112 case AudioBus::ChannelRight:
113 position = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
115 case AudioBus::ChannelCenter:
116 position = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
118 case AudioBus::ChannelLFE:
119 position = GST_AUDIO_CHANNEL_POSITION_LFE1;
121 case AudioBus::ChannelSurroundLeft:
122 position = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;
124 case AudioBus::ChannelSurroundRight:
125 position = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;
134 #define webkit_web_audio_src_parent_class parent_class
135 G_DEFINE_TYPE_WITH_CODE(WebKitWebAudioSrc, webkit_web_audio_src, GST_TYPE_BIN, GST_DEBUG_CATEGORY_INIT(webkit_web_audio_src_debug, \
136 "webkitwebaudiosrc", \
138 "webaudiosrc element"));
140 static void webkit_web_audio_src_class_init(WebKitWebAudioSrcClass* webKitWebAudioSrcClass)
142 GObjectClass* objectClass = G_OBJECT_CLASS(webKitWebAudioSrcClass);
143 GstElementClass* elementClass = GST_ELEMENT_CLASS(webKitWebAudioSrcClass);
145 gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&srcTemplate));
146 gst_element_class_set_metadata(elementClass, "WebKit WebAudio source element", "Source", "Handles WebAudio data from WebCore", "Philippe Normand <pnormand@igalia.com>");
148 objectClass->constructed = webKitWebAudioSrcConstructed;
149 objectClass->finalize = webKitWebAudioSrcFinalize;
150 elementClass->change_state = webKitWebAudioSrcChangeState;
152 objectClass->set_property = webKitWebAudioSrcSetProperty;
153 objectClass->get_property = webKitWebAudioSrcGetProperty;
155 GParamFlags flags = static_cast<GParamFlags>(G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE);
156 g_object_class_install_property(objectClass,
158 g_param_spec_float("rate", "rate",
159 "Sample rate", G_MINDOUBLE, G_MAXDOUBLE,
162 g_object_class_install_property(objectClass,
164 g_param_spec_pointer("bus", "bus",
167 g_object_class_install_property(objectClass,
169 g_param_spec_pointer("provider", "provider",
172 g_object_class_install_property(objectClass,
174 g_param_spec_uint("frames", "frames",
175 "Number of audio frames to pull at each iteration",
176 0, G_MAXUINT8, 128, flags));
178 g_type_class_add_private(webKitWebAudioSrcClass, sizeof(WebKitWebAudioSourcePrivate));
181 static void webkit_web_audio_src_init(WebKitWebAudioSrc* src)
183 WebKitWebAudioSourcePrivate* priv = G_TYPE_INSTANCE_GET_PRIVATE(src, WEBKIT_TYPE_WEB_AUDIO_SRC, WebKitWebAudioSourcePrivate);
185 new (priv) WebKitWebAudioSourcePrivate();
187 priv->sourcePad = webkitGstGhostPadFromStaticTemplate(&srcTemplate, "src", 0);
188 gst_element_add_pad(GST_ELEMENT(src), priv->sourcePad);
193 g_rec_mutex_init(&priv->mutex);
194 priv->task = adoptGRef(gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, 0));
196 gst_task_set_lock(priv->task.get(), &priv->mutex);
199 static void webKitWebAudioSrcConstructed(GObject* object)
201 WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
202 WebKitWebAudioSourcePrivate* priv = src->priv;
205 ASSERT(priv->provider);
206 ASSERT(priv->sampleRate);
208 priv->interleave = gst_element_factory_make("interleave", nullptr);
210 if (!priv->interleave) {
211 GST_ERROR_OBJECT(src, "Failed to create interleave");
215 gst_bin_add(GST_BIN(src), priv->interleave.get());
217 // For each channel of the bus create a new upstream branch for interleave, like:
218 // appsrc ! . which is plugged to a new interleave request sinkpad.
219 for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) {
220 GUniquePtr<gchar> appsrcName(g_strdup_printf("webaudioSrc%u", channelIndex));
221 GRefPtr<GstElement> appsrc = gst_element_factory_make("appsrc", appsrcName.get());
222 GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
225 gst_audio_info_from_caps(&info, monoCaps.get());
226 GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(channelIndex);
227 GRefPtr<GstCaps> caps = adoptGRef(gst_audio_info_to_caps(&info));
229 // Configure the appsrc for minimal latency.
230 g_object_set(appsrc.get(), "max-bytes", static_cast<guint64>(2 * priv->bufferSize), "block", TRUE,
231 "blocksize", priv->bufferSize,
232 "format", GST_FORMAT_TIME, "caps", caps.get(), nullptr);
234 priv->sources.append(appsrc);
236 gst_bin_add(GST_BIN(src), appsrc.get());
237 gst_element_link_pads_full(appsrc.get(), "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING);
240 // interleave's src pad is the only visible pad of our element.
241 GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->interleave.get(), "src"));
242 gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get());
245 static void webKitWebAudioSrcFinalize(GObject* object)
247 WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
248 WebKitWebAudioSourcePrivate* priv = src->priv;
250 g_rec_mutex_clear(&priv->mutex);
252 priv->~WebKitWebAudioSourcePrivate();
253 GST_CALL_PARENT(G_OBJECT_CLASS, finalize, ((GObject* )(src)));
256 static void webKitWebAudioSrcSetProperty(GObject* object, guint propertyId, const GValue* value, GParamSpec* pspec)
258 WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
259 WebKitWebAudioSourcePrivate* priv = src->priv;
261 switch (propertyId) {
263 priv->sampleRate = g_value_get_float(value);
266 priv->bus = static_cast<AudioBus*>(g_value_get_pointer(value));
269 priv->provider = static_cast<AudioIOCallback*>(g_value_get_pointer(value));
272 priv->framesToPull = g_value_get_uint(value);
273 priv->bufferSize = sizeof(float) * priv->framesToPull;
276 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
281 static void webKitWebAudioSrcGetProperty(GObject* object, guint propertyId, GValue* value, GParamSpec* pspec)
283 WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
284 WebKitWebAudioSourcePrivate* priv = src->priv;
286 switch (propertyId) {
288 g_value_set_float(value, priv->sampleRate);
291 g_value_set_pointer(value, priv->bus);
294 g_value_set_pointer(value, priv->provider);
297 g_value_set_uint(value, priv->framesToPull);
300 G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
305 static void webKitWebAudioSrcLoop(WebKitWebAudioSrc* src)
307 WebKitWebAudioSourcePrivate* priv = src->priv;
310 ASSERT(priv->provider);
311 if (!priv->provider || !priv->bus) {
312 GST_ELEMENT_ERROR(src, CORE, FAILED, ("Internal WebAudioSrc error"), ("Can't start without provider or bus"));
313 gst_task_stop(src->priv->task.get());
318 GstClockTime timestamp = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate);
319 priv->numberOfSamples += priv->framesToPull;
320 GstClockTime duration = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate) - timestamp;
322 Vector<GRefPtr<GstBuffer>> channelBufferList;
323 channelBufferList.reserveInitialCapacity(priv->sources.size());
324 for (unsigned i = 0; i < priv->sources.size(); ++i) {
325 GRefPtr<GstBuffer> buffer;
326 GstFlowReturn ret = gst_buffer_pool_acquire_buffer(priv->pool.get(), &buffer.outPtr(), nullptr);
327 if (ret != GST_FLOW_OK) {
328 for (auto& buffer : channelBufferList)
329 unmapGstBuffer(buffer.get());
331 // FLUSHING and EOS are not errors.
332 if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED)
333 GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to allocate buffer for flow: %s", gst_flow_get_name(ret)));
334 gst_task_stop(src->priv->task.get());
339 GST_BUFFER_TIMESTAMP(buffer.get()) = timestamp;
340 GST_BUFFER_DURATION(buffer.get()) = duration;
341 mapGstBuffer(buffer.get(), GST_MAP_READWRITE);
342 priv->bus->setChannelMemory(i, reinterpret_cast<float*>(getGstBufferDataPointer(buffer.get())), priv->framesToPull);
343 channelBufferList.uncheckedAppend(WTFMove(buffer));
346 // FIXME: Add support for local/live audio input.
347 priv->provider->render(0, priv->bus, priv->framesToPull);
349 ASSERT(channelBufferList.size() == priv->sources.size());
351 for (unsigned i = 0; i < priv->sources.size(); ++i) {
352 // Unmap before passing on the buffer.
353 auto& buffer = channelBufferList[i];
354 unmapGstBuffer(buffer.get());
359 auto& appsrc = priv->sources[i];
360 // Leak the buffer ref, because gst_app_src_push_buffer steals it.
361 GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc.get()), buffer.leakRef());
362 if (ret != GST_FLOW_OK) {
363 // FLUSHING and EOS are not errors.
364 if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED)
365 GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s flow: %s", GST_OBJECT_NAME(appsrc.get()), gst_flow_get_name(ret)));
366 gst_task_stop(src->priv->task.get());
372 static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement* element, GstStateChange transition)
374 GstStateChangeReturn returnValue = GST_STATE_CHANGE_SUCCESS;
375 WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(element);
377 switch (transition) {
378 case GST_STATE_CHANGE_NULL_TO_READY:
379 if (!src->priv->interleave) {
380 gst_element_post_message(element, gst_missing_element_message_new(element, "interleave"));
381 GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no interleave"));
382 return GST_STATE_CHANGE_FAILURE;
384 src->priv->numberOfSamples = 0;
390 returnValue = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
391 if (UNLIKELY(returnValue == GST_STATE_CHANGE_FAILURE)) {
392 GST_DEBUG_OBJECT(src, "State change failed");
396 switch (transition) {
397 case GST_STATE_CHANGE_READY_TO_PAUSED: {
398 GST_DEBUG_OBJECT(src, "READY->PAUSED");
400 src->priv->pool = gst_buffer_pool_new();
401 GstStructure* config = gst_buffer_pool_get_config(src->priv->pool.get());
402 gst_buffer_pool_config_set_params(config, nullptr, src->priv->bufferSize, 0, 0);
403 gst_buffer_pool_set_config(src->priv->pool.get(), config);
404 if (!gst_buffer_pool_set_active(src->priv->pool.get(), TRUE))
405 returnValue = GST_STATE_CHANGE_FAILURE;
406 else if (!gst_task_start(src->priv->task.get()))
407 returnValue = GST_STATE_CHANGE_FAILURE;
410 case GST_STATE_CHANGE_PAUSED_TO_READY:
411 GST_DEBUG_OBJECT(src, "PAUSED->READY");
413 #if GST_CHECK_VERSION(1, 4, 0)
414 gst_buffer_pool_set_flushing(src->priv->pool.get(), TRUE);
416 if (!gst_task_join(src->priv->task.get()))
417 returnValue = GST_STATE_CHANGE_FAILURE;
418 gst_buffer_pool_set_active(src->priv->pool.get(), FALSE);
419 src->priv->pool = nullptr;
428 #endif // ENABLE(WEB_AUDIO) && USE(GSTREAMER)