[GStreamer] Adopt nullptr
[WebKit-https.git] / Source / WebCore / platform / audio / gstreamer / WebKitWebAudioSourceGStreamer.cpp
1 /*
2  *  Copyright (C) 2011, 2012 Igalia S.L
3  *  Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
4  *
5  *  This library is free software; you can redistribute it and/or
6  *  modify it under the terms of the GNU Lesser General Public
7  *  License as published by the Free Software Foundation; either
8  *  version 2 of the License, or (at your option) any later version.
9  *
10  *  This library is distributed in the hope that it will be useful,
11  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
12  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  *  Lesser General Public License for more details.
14  *
15  *  You should have received a copy of the GNU Lesser General Public
16  *  License along with this library; if not, write to the Free Software
17  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
18  */
19
20 #include "config.h"
21
22 #include "WebKitWebAudioSourceGStreamer.h"
23
24 #if ENABLE(WEB_AUDIO) && USE(GSTREAMER)
25
26 #include "AudioBus.h"
27 #include "AudioIOCallback.h"
28 #include "GRefPtrGStreamer.h"
29 #include "GStreamerUtilities.h"
30 #include <gst/app/gstappsrc.h>
31 #include <gst/audio/audio-info.h>
32 #include <gst/pbutils/missing-plugins.h>
33 #include <wtf/glib/GUniquePtr.h>
34
35 using namespace WebCore;
36
37 typedef struct _WebKitWebAudioSrcClass   WebKitWebAudioSrcClass;
38 typedef struct _WebKitWebAudioSourcePrivate WebKitWebAudioSourcePrivate;
39
40 struct _WebKitWebAudioSrc {
41     GstBin parent;
42
43     WebKitWebAudioSourcePrivate* priv;
44 };
45
46 struct _WebKitWebAudioSrcClass {
47     GstBinClass parentClass;
48 };
49
50 #define WEBKIT_WEB_AUDIO_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_WEBAUDIO_SRC, WebKitWebAudioSourcePrivate))
51 struct _WebKitWebAudioSourcePrivate {
52     gfloat sampleRate;
53     AudioBus* bus;
54     AudioIOCallback* provider;
55     guint framesToPull;
56     guint bufferSize;
57
58     GRefPtr<GstElement> interleave;
59
60     GRefPtr<GstTask> task;
61     GRecMutex mutex;
62
63     // List of appsrc. One appsrc for each planar audio channel.
64     Vector<GRefPtr<GstElement>> sources;
65
66     // src pad of the element, interleaved wav data is pushed to it.
67     GstPad* sourcePad;
68
69     guint64 numberOfSamples;
70
71     GRefPtr<GstBufferPool> pool;
72 };
73
74 enum {
75     PROP_RATE = 1,
76     PROP_BUS,
77     PROP_PROVIDER,
78     PROP_FRAMES
79 };
80
81 static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src",
82     GST_PAD_SRC,
83     GST_PAD_ALWAYS,
84     GST_STATIC_CAPS(GST_AUDIO_CAPS_MAKE(GST_AUDIO_NE(F32))));
85
86 GST_DEBUG_CATEGORY_STATIC(webkit_web_audio_src_debug);
87 #define GST_CAT_DEFAULT webkit_web_audio_src_debug
88
89 static void webKitWebAudioSrcConstructed(GObject*);
90 static void webKitWebAudioSrcFinalize(GObject*);
91 static void webKitWebAudioSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
92 static void webKitWebAudioSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
93 static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement*, GstStateChange);
94 static void webKitWebAudioSrcLoop(WebKitWebAudioSrc*);
95
96 static GstCaps* getGStreamerMonoAudioCaps(float sampleRate)
97 {
98     return gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(sampleRate),
99         "channels", G_TYPE_INT, 1,
100         "format", G_TYPE_STRING, GST_AUDIO_NE(F32),
101         "layout", G_TYPE_STRING, "interleaved", nullptr);
102 }
103
104 static GstAudioChannelPosition webKitWebAudioGStreamerChannelPosition(int channelIndex)
105 {
106     GstAudioChannelPosition position = GST_AUDIO_CHANNEL_POSITION_NONE;
107
108     switch (channelIndex) {
109     case AudioBus::ChannelLeft:
110         position = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
111         break;
112     case AudioBus::ChannelRight:
113         position = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
114         break;
115     case AudioBus::ChannelCenter:
116         position = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
117         break;
118     case AudioBus::ChannelLFE:
119         position = GST_AUDIO_CHANNEL_POSITION_LFE1;
120         break;
121     case AudioBus::ChannelSurroundLeft:
122         position = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;
123         break;
124     case AudioBus::ChannelSurroundRight:
125         position = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;
126         break;
127     default:
128         break;
129     };
130
131     return position;
132 }
133
134 #define webkit_web_audio_src_parent_class parent_class
135 G_DEFINE_TYPE_WITH_CODE(WebKitWebAudioSrc, webkit_web_audio_src, GST_TYPE_BIN, GST_DEBUG_CATEGORY_INIT(webkit_web_audio_src_debug, \
136                             "webkitwebaudiosrc", \
137                             0, \
138                             "webaudiosrc element"));
139
140 static void webkit_web_audio_src_class_init(WebKitWebAudioSrcClass* webKitWebAudioSrcClass)
141 {
142     GObjectClass* objectClass = G_OBJECT_CLASS(webKitWebAudioSrcClass);
143     GstElementClass* elementClass = GST_ELEMENT_CLASS(webKitWebAudioSrcClass);
144
145     gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&srcTemplate));
146     gst_element_class_set_metadata(elementClass, "WebKit WebAudio source element", "Source", "Handles WebAudio data from WebCore", "Philippe Normand <pnormand@igalia.com>");
147
148     objectClass->constructed = webKitWebAudioSrcConstructed;
149     objectClass->finalize = webKitWebAudioSrcFinalize;
150     elementClass->change_state = webKitWebAudioSrcChangeState;
151
152     objectClass->set_property = webKitWebAudioSrcSetProperty;
153     objectClass->get_property = webKitWebAudioSrcGetProperty;
154
155     GParamFlags flags = static_cast<GParamFlags>(G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE);
156     g_object_class_install_property(objectClass,
157                                     PROP_RATE,
158                                     g_param_spec_float("rate", "rate",
159                                                        "Sample rate", G_MINDOUBLE, G_MAXDOUBLE,
160                                                        44100.0, flags));
161
162     g_object_class_install_property(objectClass,
163                                     PROP_BUS,
164                                     g_param_spec_pointer("bus", "bus",
165                                                          "Bus", flags));
166
167     g_object_class_install_property(objectClass,
168                                     PROP_PROVIDER,
169                                     g_param_spec_pointer("provider", "provider",
170                                                          "Provider", flags));
171
172     g_object_class_install_property(objectClass,
173                                     PROP_FRAMES,
174                                     g_param_spec_uint("frames", "frames",
175                                                       "Number of audio frames to pull at each iteration",
176                                                       0, G_MAXUINT8, 128, flags));
177
178     g_type_class_add_private(webKitWebAudioSrcClass, sizeof(WebKitWebAudioSourcePrivate));
179 }
180
181 static void webkit_web_audio_src_init(WebKitWebAudioSrc* src)
182 {
183     WebKitWebAudioSourcePrivate* priv = G_TYPE_INSTANCE_GET_PRIVATE(src, WEBKIT_TYPE_WEB_AUDIO_SRC, WebKitWebAudioSourcePrivate);
184     src->priv = priv;
185     new (priv) WebKitWebAudioSourcePrivate();
186
187     priv->sourcePad = webkitGstGhostPadFromStaticTemplate(&srcTemplate, "src", nullptr);
188     gst_element_add_pad(GST_ELEMENT(src), priv->sourcePad);
189
190     priv->provider = nullptr;
191     priv->bus = nullptr;
192
193     g_rec_mutex_init(&priv->mutex);
194     priv->task = adoptGRef(gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, nullptr));
195
196     gst_task_set_lock(priv->task.get(), &priv->mutex);
197 }
198
199 static void webKitWebAudioSrcConstructed(GObject* object)
200 {
201     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
202     WebKitWebAudioSourcePrivate* priv = src->priv;
203
204     ASSERT(priv->bus);
205     ASSERT(priv->provider);
206     ASSERT(priv->sampleRate);
207
208     priv->interleave = gst_element_factory_make("interleave", nullptr);
209
210     if (!priv->interleave) {
211         GST_ERROR_OBJECT(src, "Failed to create interleave");
212         return;
213     }
214
215     gst_bin_add(GST_BIN(src), priv->interleave.get());
216
217     // For each channel of the bus create a new upstream branch for interleave, like:
218     // appsrc ! . which is plugged to a new interleave request sinkpad.
219     for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) {
220         GUniquePtr<gchar> appsrcName(g_strdup_printf("webaudioSrc%u", channelIndex));
221         GRefPtr<GstElement> appsrc = gst_element_factory_make("appsrc", appsrcName.get());
222         GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
223
224         GstAudioInfo info;
225         gst_audio_info_from_caps(&info, monoCaps.get());
226         GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(channelIndex);
227         GRefPtr<GstCaps> caps = adoptGRef(gst_audio_info_to_caps(&info));
228
229         // Configure the appsrc for minimal latency.
230         g_object_set(appsrc.get(), "max-bytes", static_cast<guint64>(2 * priv->bufferSize), "block", TRUE,
231             "blocksize", priv->bufferSize,
232             "format", GST_FORMAT_TIME, "caps", caps.get(), nullptr);
233
234         priv->sources.append(appsrc);
235
236         gst_bin_add(GST_BIN(src), appsrc.get());
237         gst_element_link_pads_full(appsrc.get(), "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING);
238     }
239
240     // interleave's src pad is the only visible pad of our element.
241     GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->interleave.get(), "src"));
242     gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get());
243 }
244
245 static void webKitWebAudioSrcFinalize(GObject* object)
246 {
247     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
248     WebKitWebAudioSourcePrivate* priv = src->priv;
249
250     g_rec_mutex_clear(&priv->mutex);
251
252     priv->~WebKitWebAudioSourcePrivate();
253     GST_CALL_PARENT(G_OBJECT_CLASS, finalize, ((GObject* )(src)));
254 }
255
256 static void webKitWebAudioSrcSetProperty(GObject* object, guint propertyId, const GValue* value, GParamSpec* pspec)
257 {
258     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
259     WebKitWebAudioSourcePrivate* priv = src->priv;
260
261     switch (propertyId) {
262     case PROP_RATE:
263         priv->sampleRate = g_value_get_float(value);
264         break;
265     case PROP_BUS:
266         priv->bus = static_cast<AudioBus*>(g_value_get_pointer(value));
267         break;
268     case PROP_PROVIDER:
269         priv->provider = static_cast<AudioIOCallback*>(g_value_get_pointer(value));
270         break;
271     case PROP_FRAMES:
272         priv->framesToPull = g_value_get_uint(value);
273         priv->bufferSize = sizeof(float) * priv->framesToPull;
274         break;
275     default:
276         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
277         break;
278     }
279 }
280
281 static void webKitWebAudioSrcGetProperty(GObject* object, guint propertyId, GValue* value, GParamSpec* pspec)
282 {
283     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
284     WebKitWebAudioSourcePrivate* priv = src->priv;
285
286     switch (propertyId) {
287     case PROP_RATE:
288         g_value_set_float(value, priv->sampleRate);
289         break;
290     case PROP_BUS:
291         g_value_set_pointer(value, priv->bus);
292         break;
293     case PROP_PROVIDER:
294         g_value_set_pointer(value, priv->provider);
295         break;
296     case PROP_FRAMES:
297         g_value_set_uint(value, priv->framesToPull);
298         break;
299     default:
300         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
301         break;
302     }
303 }
304
305 static void webKitWebAudioSrcLoop(WebKitWebAudioSrc* src)
306 {
307     WebKitWebAudioSourcePrivate* priv = src->priv;
308
309     ASSERT(priv->bus);
310     ASSERT(priv->provider);
311     if (!priv->provider || !priv->bus) {
312         GST_ELEMENT_ERROR(src, CORE, FAILED, ("Internal WebAudioSrc error"), ("Can't start without provider or bus"));
313         gst_task_stop(src->priv->task.get());
314         return;
315     }
316
317     ASSERT(priv->pool);
318     GstClockTime timestamp = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate);
319     priv->numberOfSamples += priv->framesToPull;
320     GstClockTime duration = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate) - timestamp;
321
322     Vector<GRefPtr<GstBuffer>> channelBufferList;
323     channelBufferList.reserveInitialCapacity(priv->sources.size());
324     for (unsigned i = 0; i < priv->sources.size(); ++i) {
325         GRefPtr<GstBuffer> buffer;
326         GstFlowReturn ret = gst_buffer_pool_acquire_buffer(priv->pool.get(), &buffer.outPtr(), nullptr);
327         if (ret != GST_FLOW_OK) {
328             for (auto& buffer : channelBufferList)
329                 unmapGstBuffer(buffer.get());
330
331             // FLUSHING and EOS are not errors.
332             if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED)
333                 GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to allocate buffer for flow: %s", gst_flow_get_name(ret)));
334             gst_task_stop(src->priv->task.get());
335             return;
336         }
337
338         ASSERT(buffer);
339         GST_BUFFER_TIMESTAMP(buffer.get()) = timestamp;
340         GST_BUFFER_DURATION(buffer.get()) = duration;
341         mapGstBuffer(buffer.get(), GST_MAP_READWRITE);
342         priv->bus->setChannelMemory(i, reinterpret_cast<float*>(getGstBufferDataPointer(buffer.get())), priv->framesToPull);
343         channelBufferList.uncheckedAppend(WTFMove(buffer));
344     }
345
346     // FIXME: Add support for local/live audio input.
347     priv->provider->render(nullptr, priv->bus, priv->framesToPull);
348
349     ASSERT(channelBufferList.size() == priv->sources.size());
350     bool failed = false;
351     for (unsigned i = 0; i < priv->sources.size(); ++i) {
352         // Unmap before passing on the buffer.
353         auto& buffer = channelBufferList[i];
354         unmapGstBuffer(buffer.get());
355
356         if (failed)
357             continue;
358
359         auto& appsrc = priv->sources[i];
360         // Leak the buffer ref, because gst_app_src_push_buffer steals it.
361         GstFlowReturn ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc.get()), buffer.leakRef());
362         if (ret != GST_FLOW_OK) {
363             // FLUSHING and EOS are not errors.
364             if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED)
365                 GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s flow: %s", GST_OBJECT_NAME(appsrc.get()), gst_flow_get_name(ret)));
366             gst_task_stop(src->priv->task.get());
367             failed = true;
368         }
369     }
370 }
371
372 static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement* element, GstStateChange transition)
373 {
374     GstStateChangeReturn returnValue = GST_STATE_CHANGE_SUCCESS;
375     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(element);
376
377     switch (transition) {
378     case GST_STATE_CHANGE_NULL_TO_READY:
379         if (!src->priv->interleave) {
380             gst_element_post_message(element, gst_missing_element_message_new(element, "interleave"));
381             GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (nullptr), ("no interleave"));
382             return GST_STATE_CHANGE_FAILURE;
383         }
384         src->priv->numberOfSamples = 0;
385         break;
386     default:
387         break;
388     }
389
390     returnValue = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
391     if (UNLIKELY(returnValue == GST_STATE_CHANGE_FAILURE)) {
392         GST_DEBUG_OBJECT(src, "State change failed");
393         return returnValue;
394     }
395
396     switch (transition) {
397     case GST_STATE_CHANGE_READY_TO_PAUSED: {
398         GST_DEBUG_OBJECT(src, "READY->PAUSED");
399
400         src->priv->pool = gst_buffer_pool_new();
401         GstStructure* config = gst_buffer_pool_get_config(src->priv->pool.get());
402         gst_buffer_pool_config_set_params(config, nullptr, src->priv->bufferSize, 0, 0);
403         gst_buffer_pool_set_config(src->priv->pool.get(), config);
404         if (!gst_buffer_pool_set_active(src->priv->pool.get(), TRUE))
405             returnValue = GST_STATE_CHANGE_FAILURE;
406         else if (!gst_task_start(src->priv->task.get()))
407             returnValue = GST_STATE_CHANGE_FAILURE;
408         break;
409     }
410     case GST_STATE_CHANGE_PAUSED_TO_READY:
411         GST_DEBUG_OBJECT(src, "PAUSED->READY");
412
413 #if GST_CHECK_VERSION(1, 4, 0)
414         gst_buffer_pool_set_flushing(src->priv->pool.get(), TRUE);
415 #endif
416         if (!gst_task_join(src->priv->task.get()))
417             returnValue = GST_STATE_CHANGE_FAILURE;
418         gst_buffer_pool_set_active(src->priv->pool.get(), FALSE);
419         src->priv->pool = nullptr;
420         break;
421     default:
422         break;
423     }
424
425     return returnValue;
426 }
427
428 #endif // ENABLE(WEB_AUDIO) && USE(GSTREAMER)