20414a90ffd36f23d24b87cfeabab1e2dd7edceb
[WebKit-https.git] / Source / WebCore / platform / audio / gstreamer / WebKitWebAudioSourceGStreamer.cpp
1 /*
2  *  Copyright (C) 2011, 2012 Igalia S.L
3  *  Copyright (C) 2014 Sebastian Dröge <sebastian@centricular.com>
4  *
5  *  This library is free software; you can redistribute it and/or
6  *  modify it under the terms of the GNU Lesser General Public
7  *  License as published by the Free Software Foundation; either
8  *  version 2 of the License, or (at your option) any later version.
9  *
10  *  This library is distributed in the hope that it will be useful,
11  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
12  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13  *  Lesser General Public License for more details.
14  *
15  *  You should have received a copy of the GNU Lesser General Public
16  *  License along with this library; if not, write to the Free Software
17  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
18  */
19
20 #include "config.h"
21
22 #include "WebKitWebAudioSourceGStreamer.h"
23
24 #if ENABLE(WEB_AUDIO) && USE(GSTREAMER)
25
26 #include "AudioBus.h"
27 #include "AudioIOCallback.h"
28 #include "GRefPtrGStreamer.h"
29 #include "GStreamerUtilities.h"
30 #include <gst/app/app.h>
31 #include <gst/audio/audio.h>
32 #include <gst/pbutils/pbutils.h>
33 #include <wtf/gobject/GUniquePtr.h>
34
35 using namespace WebCore;
36
37 typedef struct _WebKitWebAudioSrcClass   WebKitWebAudioSrcClass;
38 typedef struct _WebKitWebAudioSourcePrivate WebKitWebAudioSourcePrivate;
39
40 struct _WebKitWebAudioSrc {
41     GstBin parent;
42
43     WebKitWebAudioSourcePrivate* priv;
44 };
45
46 struct _WebKitWebAudioSrcClass {
47     GstBinClass parentClass;
48 };
49
50 #define WEBKIT_WEB_AUDIO_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_WEBAUDIO_SRC, WebKitWebAudioSourcePrivate))
51 struct _WebKitWebAudioSourcePrivate {
52     gfloat sampleRate;
53     AudioBus* bus;
54     AudioIOCallback* provider;
55     guint framesToPull;
56     guint bufferSize;
57
58     GRefPtr<GstElement> interleave;
59
60     GRefPtr<GstTask> task;
61     GRecMutex mutex;
62
63     GSList* sources; // List of appsrc. One appsrc for each planar audio channel.
64     GstPad* sourcePad; // src pad of the element, interleaved wav data is pushed to it.
65
66     guint64 numberOfSamples;
67
68     GstBufferPool* pool;
69 };
70
71 enum {
72     PROP_RATE = 1,
73     PROP_BUS,
74     PROP_PROVIDER,
75     PROP_FRAMES
76 };
77
78 typedef struct {
79     GstBuffer* buffer;
80     GstMapInfo info;
81 } AudioSrcBuffer;
82
83 static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src",
84     GST_PAD_SRC,
85     GST_PAD_ALWAYS,
86     GST_STATIC_CAPS(GST_AUDIO_CAPS_MAKE(GST_AUDIO_NE(F32))));
87
88 GST_DEBUG_CATEGORY_STATIC(webkit_web_audio_src_debug);
89 #define GST_CAT_DEFAULT webkit_web_audio_src_debug
90
91 static void webKitWebAudioSrcConstructed(GObject*);
92 static void webKitWebAudioSrcFinalize(GObject*);
93 static void webKitWebAudioSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
94 static void webKitWebAudioSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
95 static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement*, GstStateChange);
96 static void webKitWebAudioSrcLoop(WebKitWebAudioSrc*);
97
98 static GstCaps* getGStreamerMonoAudioCaps(float sampleRate)
99 {
100     return gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(sampleRate),
101         "channels", G_TYPE_INT, 1,
102         "format", G_TYPE_STRING, GST_AUDIO_NE(F32),
103         "layout", G_TYPE_STRING, "interleaved", nullptr);
104 }
105
106 static GstAudioChannelPosition webKitWebAudioGStreamerChannelPosition(int channelIndex)
107 {
108     GstAudioChannelPosition position = GST_AUDIO_CHANNEL_POSITION_NONE;
109
110     switch (channelIndex) {
111     case AudioBus::ChannelLeft:
112         position = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
113         break;
114     case AudioBus::ChannelRight:
115         position = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
116         break;
117     case AudioBus::ChannelCenter:
118         position = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
119         break;
120     case AudioBus::ChannelLFE:
121         position = GST_AUDIO_CHANNEL_POSITION_LFE1;
122         break;
123     case AudioBus::ChannelSurroundLeft:
124         position = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;
125         break;
126     case AudioBus::ChannelSurroundRight:
127         position = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;
128         break;
129     default:
130         break;
131     };
132
133     return position;
134 }
135
136 #define webkit_web_audio_src_parent_class parent_class
137 G_DEFINE_TYPE_WITH_CODE(WebKitWebAudioSrc, webkit_web_audio_src, GST_TYPE_BIN, GST_DEBUG_CATEGORY_INIT(webkit_web_audio_src_debug, \
138                             "webkitwebaudiosrc", \
139                             0, \
140                             "webaudiosrc element"));
141
142 static void webkit_web_audio_src_class_init(WebKitWebAudioSrcClass* webKitWebAudioSrcClass)
143 {
144     GObjectClass* objectClass = G_OBJECT_CLASS(webKitWebAudioSrcClass);
145     GstElementClass* elementClass = GST_ELEMENT_CLASS(webKitWebAudioSrcClass);
146
147     gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&srcTemplate));
148     gst_element_class_set_metadata(elementClass, "WebKit WebAudio source element", "Source", "Handles WebAudio data from WebCore", "Philippe Normand <pnormand@igalia.com>");
149
150     objectClass->constructed = webKitWebAudioSrcConstructed;
151     objectClass->finalize = webKitWebAudioSrcFinalize;
152     elementClass->change_state = webKitWebAudioSrcChangeState;
153
154     objectClass->set_property = webKitWebAudioSrcSetProperty;
155     objectClass->get_property = webKitWebAudioSrcGetProperty;
156
157     GParamFlags flags = static_cast<GParamFlags>(G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE);
158     g_object_class_install_property(objectClass,
159                                     PROP_RATE,
160                                     g_param_spec_float("rate", "rate",
161                                                        "Sample rate", G_MINDOUBLE, G_MAXDOUBLE,
162                                                        44100.0, flags));
163
164     g_object_class_install_property(objectClass,
165                                     PROP_BUS,
166                                     g_param_spec_pointer("bus", "bus",
167                                                          "Bus", flags));
168
169     g_object_class_install_property(objectClass,
170                                     PROP_PROVIDER,
171                                     g_param_spec_pointer("provider", "provider",
172                                                          "Provider", flags));
173
174     g_object_class_install_property(objectClass,
175                                     PROP_FRAMES,
176                                     g_param_spec_uint("frames", "frames",
177                                                       "Number of audio frames to pull at each iteration",
178                                                       0, G_MAXUINT8, 128, flags));
179
180     g_type_class_add_private(webKitWebAudioSrcClass, sizeof(WebKitWebAudioSourcePrivate));
181 }
182
183 static void webkit_web_audio_src_init(WebKitWebAudioSrc* src)
184 {
185     WebKitWebAudioSourcePrivate* priv = G_TYPE_INSTANCE_GET_PRIVATE(src, WEBKIT_TYPE_WEB_AUDIO_SRC, WebKitWebAudioSourcePrivate);
186     src->priv = priv;
187     new (priv) WebKitWebAudioSourcePrivate();
188
189     priv->sourcePad = webkitGstGhostPadFromStaticTemplate(&srcTemplate, "src", 0);
190     gst_element_add_pad(GST_ELEMENT(src), priv->sourcePad);
191
192     priv->provider = 0;
193     priv->bus = 0;
194
195     g_rec_mutex_init(&priv->mutex);
196     priv->task = gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, 0);
197
198     gst_task_set_lock(priv->task.get(), &priv->mutex);
199 }
200
201 static void webKitWebAudioSrcConstructed(GObject* object)
202 {
203     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
204     WebKitWebAudioSourcePrivate* priv = src->priv;
205
206     ASSERT(priv->bus);
207     ASSERT(priv->provider);
208     ASSERT(priv->sampleRate);
209
210     priv->interleave = gst_element_factory_make("interleave", nullptr);
211
212     if (!priv->interleave) {
213         GST_ERROR_OBJECT(src, "Failed to create interleave");
214         return;
215     }
216
217     gst_bin_add(GST_BIN(src), priv->interleave.get());
218
219     // For each channel of the bus create a new upstream branch for interleave, like:
220     // appsrc ! . which is plugged to a new interleave request sinkpad.
221     for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) {
222         GUniquePtr<gchar> appsrcName(g_strdup_printf("webaudioSrc%u", channelIndex));
223         GstElement* appsrc = gst_element_factory_make("appsrc", appsrcName.get());
224         GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
225
226         GstAudioInfo info;
227         gst_audio_info_from_caps(&info, monoCaps.get());
228         GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(channelIndex);
229         GRefPtr<GstCaps> caps = adoptGRef(gst_audio_info_to_caps(&info));
230
231         // Configure the appsrc for minimal latency.
232         g_object_set(appsrc, "max-bytes", 2 * priv->bufferSize, "block", TRUE,
233             "format", GST_FORMAT_TIME, "caps", caps.get(), nullptr);
234
235         priv->sources = g_slist_prepend(priv->sources, gst_object_ref(appsrc));
236
237         gst_bin_add(GST_BIN(src), appsrc);
238         gst_element_link_pads_full(appsrc, "src", priv->interleave.get(), "sink_%u", GST_PAD_LINK_CHECK_NOTHING);
239     }
240     priv->sources = g_slist_reverse(priv->sources);
241
242     // interleave's src pad is the only visible pad of our element.
243     GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->interleave.get(), "src"));
244     gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get());
245 }
246
247 static void webKitWebAudioSrcFinalize(GObject* object)
248 {
249     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
250     WebKitWebAudioSourcePrivate* priv = src->priv;
251
252     g_rec_mutex_clear(&priv->mutex);
253
254     g_slist_free_full(priv->sources, reinterpret_cast<GDestroyNotify>(gst_object_unref));
255
256     priv->~WebKitWebAudioSourcePrivate();
257     GST_CALL_PARENT(G_OBJECT_CLASS, finalize, ((GObject* )(src)));
258 }
259
260 static void webKitWebAudioSrcSetProperty(GObject* object, guint propertyId, const GValue* value, GParamSpec* pspec)
261 {
262     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
263     WebKitWebAudioSourcePrivate* priv = src->priv;
264
265     switch (propertyId) {
266     case PROP_RATE:
267         priv->sampleRate = g_value_get_float(value);
268         break;
269     case PROP_BUS:
270         priv->bus = static_cast<AudioBus*>(g_value_get_pointer(value));
271         break;
272     case PROP_PROVIDER:
273         priv->provider = static_cast<AudioIOCallback*>(g_value_get_pointer(value));
274         break;
275     case PROP_FRAMES:
276         priv->framesToPull = g_value_get_uint(value);
277         priv->bufferSize = sizeof(float) * priv->framesToPull;
278         break;
279     default:
280         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
281         break;
282     }
283 }
284
285 static void webKitWebAudioSrcGetProperty(GObject* object, guint propertyId, GValue* value, GParamSpec* pspec)
286 {
287     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
288     WebKitWebAudioSourcePrivate* priv = src->priv;
289
290     switch (propertyId) {
291     case PROP_RATE:
292         g_value_set_float(value, priv->sampleRate);
293         break;
294     case PROP_BUS:
295         g_value_set_pointer(value, priv->bus);
296         break;
297     case PROP_PROVIDER:
298         g_value_set_pointer(value, priv->provider);
299         break;
300     case PROP_FRAMES:
301         g_value_set_uint(value, priv->framesToPull);
302         break;
303     default:
304         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
305         break;
306     }
307 }
308
309 static void webKitWebAudioSrcLoop(WebKitWebAudioSrc* src)
310 {
311     WebKitWebAudioSourcePrivate* priv = src->priv;
312
313     ASSERT(priv->bus);
314     ASSERT(priv->provider);
315     if (!priv->provider || !priv->bus) {
316         GST_ELEMENT_ERROR(src, CORE, FAILED, ("Internal WebAudioSrc error"), ("Can't start without provider or bus"));
317         gst_task_stop(src->priv->task.get());
318         return;
319     }
320
321     GstClockTime timestamp = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate);
322     priv->numberOfSamples += priv->framesToPull;
323     GstClockTime duration = gst_util_uint64_scale(priv->numberOfSamples, GST_SECOND, priv->sampleRate) - timestamp;
324
325     GSList* channelBufferList = 0;
326     for (int i = g_slist_length(priv->sources) - 1; i >= 0; i--) {
327         AudioSrcBuffer* buffer = g_new(AudioSrcBuffer, 1);
328         GstBuffer* channelBuffer;
329
330         GstFlowReturn ret = gst_buffer_pool_acquire_buffer(priv->pool, &channelBuffer, nullptr);
331
332         if (ret != GST_FLOW_OK) {
333             g_free(buffer);
334             while (channelBufferList) {
335                 buffer = static_cast<AudioSrcBuffer*>(channelBufferList->data);
336                 gst_buffer_unmap(buffer->buffer, &buffer->info);
337                 gst_buffer_unref(buffer->buffer);
338                 g_free(buffer);
339                 channelBufferList = g_slist_delete_link(channelBufferList, channelBufferList);
340             }
341
342             // FLUSHING and EOS are not errors.
343             if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED)
344                 GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to allocate buffer for flow: %s", gst_flow_get_name(ret)));
345             gst_task_stop(src->priv->task.get());
346             return;
347         }
348
349         ASSERT(channelBuffer);
350         buffer->buffer = channelBuffer;
351         GST_BUFFER_TIMESTAMP(channelBuffer) = timestamp;
352         GST_BUFFER_DURATION(channelBuffer) = duration;
353         gst_buffer_map(channelBuffer, &buffer->info, (GstMapFlags) GST_MAP_READWRITE);
354         priv->bus->setChannelMemory(i, reinterpret_cast<float*>(buffer->info.data), priv->framesToPull);
355         channelBufferList = g_slist_prepend(channelBufferList, buffer);
356     }
357
358     // FIXME: Add support for local/live audio input.
359     priv->provider->render(0, priv->bus, priv->framesToPull);
360
361     GSList* sourcesIt = priv->sources;
362     GSList* buffersIt = channelBufferList;
363
364     GstFlowReturn ret = GST_FLOW_OK;
365     for (int i = 0; sourcesIt && buffersIt; sourcesIt = g_slist_next(sourcesIt), buffersIt = g_slist_next(buffersIt), ++i) {
366         GstElement* appsrc = static_cast<GstElement*>(sourcesIt->data);
367         AudioSrcBuffer* buffer = static_cast<AudioSrcBuffer*>(buffersIt->data);
368         GstBuffer* channelBuffer = buffer->buffer;
369
370         // Unmap before passing on the buffer.
371         gst_buffer_unmap(channelBuffer, &buffer->info);
372         g_free(buffer);
373
374         if (ret == GST_FLOW_OK) {
375             ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), channelBuffer);
376             if (ret != GST_FLOW_OK) {
377                 // FLUSHING and EOS are not errors.
378                 if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED)
379                     GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s flow: %s", GST_OBJECT_NAME(appsrc), gst_flow_get_name(ret)));
380                 gst_task_stop(src->priv->task.get());
381             }
382         } else
383             gst_buffer_unref(channelBuffer);
384     }
385
386     g_slist_free(channelBufferList);
387 }
388
389 static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement* element, GstStateChange transition)
390 {
391     GstStateChangeReturn returnValue = GST_STATE_CHANGE_SUCCESS;
392     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(element);
393
394     switch (transition) {
395     case GST_STATE_CHANGE_NULL_TO_READY:
396         if (!src->priv->interleave) {
397             gst_element_post_message(element, gst_missing_element_message_new(element, "interleave"));
398             GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no interleave"));
399             return GST_STATE_CHANGE_FAILURE;
400         }
401         src->priv->numberOfSamples = 0;
402         break;
403     default:
404         break;
405     }
406
407     returnValue = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
408     if (UNLIKELY(returnValue == GST_STATE_CHANGE_FAILURE)) {
409         GST_DEBUG_OBJECT(src, "State change failed");
410         return returnValue;
411     }
412
413     switch (transition) {
414     case GST_STATE_CHANGE_READY_TO_PAUSED: {
415         GST_DEBUG_OBJECT(src, "READY->PAUSED");
416         src->priv->pool = gst_buffer_pool_new();
417         GstStructure* config = gst_buffer_pool_get_config(src->priv->pool);
418         gst_buffer_pool_config_set_params(config, nullptr, src->priv->bufferSize, 0, 0);
419         gst_buffer_pool_set_config(src->priv->pool, config);
420         if (!gst_buffer_pool_set_active(src->priv->pool, TRUE))
421             returnValue = GST_STATE_CHANGE_FAILURE;
422         else if (!gst_task_start(src->priv->task.get()))
423             returnValue = GST_STATE_CHANGE_FAILURE;
424         break;
425     }
426     case GST_STATE_CHANGE_PAUSED_TO_READY:
427         GST_DEBUG_OBJECT(src, "PAUSED->READY");
428 #if GST_CHECK_VERSION(1, 4, 0)
429         gst_buffer_pool_set_flushing(src->priv->pool, TRUE);
430 #endif
431         if (!gst_task_join(src->priv->task.get()))
432             returnValue = GST_STATE_CHANGE_FAILURE;
433         gst_buffer_pool_set_active(src->priv->pool, FALSE);
434         gst_object_unref(src->priv->pool);
435         src->priv->pool = nullptr;
436         break;
437     default:
438         break;
439     }
440
441     return returnValue;
442 }
443
444 #endif // ENABLE(WEB_AUDIO) && USE(GSTREAMER)