[GLIB] Use GUniquePtr instead of GOwnPtr
[WebKit-https.git] / Source / WebCore / platform / audio / gstreamer / WebKitWebAudioSourceGStreamer.cpp
1 /*
2  *  Copyright (C) 2011, 2012 Igalia S.L
3  *
4  *  This library is free software; you can redistribute it and/or
5  *  modify it under the terms of the GNU Lesser General Public
6  *  License as published by the Free Software Foundation; either
7  *  version 2 of the License, or (at your option) any later version.
8  *
9  *  This library is distributed in the hope that it will be useful,
10  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
11  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  *  Lesser General Public License for more details.
13  *
14  *  You should have received a copy of the GNU Lesser General Public
15  *  License along with this library; if not, write to the Free Software
16  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
17  */
18
19 #include "config.h"
20
21 #include "WebKitWebAudioSourceGStreamer.h"
22
23 #if ENABLE(WEB_AUDIO) && USE(GSTREAMER)
24
25 #include "AudioBus.h"
26 #include "AudioIOCallback.h"
27 #include "GRefPtrGStreamer.h"
28 #include "GStreamerUtilities.h"
29 #include <gst/audio/audio.h>
30 #include <gst/pbutils/pbutils.h>
31 #include <wtf/gobject/GUniquePtr.h>
32
33 using namespace WebCore;
34
35 typedef struct _WebKitWebAudioSrcClass   WebKitWebAudioSrcClass;
36 typedef struct _WebKitWebAudioSourcePrivate WebKitWebAudioSourcePrivate;
37
38 struct _WebKitWebAudioSrc {
39     GstBin parent;
40
41     WebKitWebAudioSourcePrivate* priv;
42 };
43
44 struct _WebKitWebAudioSrcClass {
45     GstBinClass parentClass;
46 };
47
48 #define WEBKIT_WEB_AUDIO_SRC_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj), WEBKIT_TYPE_WEBAUDIO_SRC, WebKitWebAudioSourcePrivate))
49 struct _WebKitWebAudioSourcePrivate {
50     gfloat sampleRate;
51     AudioBus* bus;
52     AudioIOCallback* provider;
53     guint framesToPull;
54
55     GRefPtr<GstElement> interleave;
56     GRefPtr<GstElement> wavEncoder;
57
58     GRefPtr<GstTask> task;
59     GRecMutex mutex;
60
61     GSList* pads; // List of queue sink pads. One queue for each planar audio channel.
62     GstPad* sourcePad; // src pad of the element, interleaved wav data is pushed to it.
63
64     bool newStreamEventPending;
65     GstSegment segment;
66 };
67
68 enum {
69     PROP_RATE = 1,
70     PROP_BUS,
71     PROP_PROVIDER,
72     PROP_FRAMES
73 };
74
75 static GstStaticPadTemplate srcTemplate = GST_STATIC_PAD_TEMPLATE("src",
76                                                                   GST_PAD_SRC,
77                                                                   GST_PAD_ALWAYS,
78                                                                   GST_STATIC_CAPS("audio/x-wav"));
79
80 GST_DEBUG_CATEGORY_STATIC(webkit_web_audio_src_debug);
81 #define GST_CAT_DEFAULT webkit_web_audio_src_debug
82
83 static void webKitWebAudioSrcConstructed(GObject*);
84 static void webKitWebAudioSrcFinalize(GObject*);
85 static void webKitWebAudioSrcSetProperty(GObject*, guint propertyId, const GValue*, GParamSpec*);
86 static void webKitWebAudioSrcGetProperty(GObject*, guint propertyId, GValue*, GParamSpec*);
87 static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement*, GstStateChange);
88 static void webKitWebAudioSrcLoop(WebKitWebAudioSrc*);
89
90 static GstCaps* getGStreamerMonoAudioCaps(float sampleRate)
91 {
92     return gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT, static_cast<int>(sampleRate),
93         "channels", G_TYPE_INT, 1,
94         "format", G_TYPE_STRING, gst_audio_format_to_string(GST_AUDIO_FORMAT_F32),
95         "layout", G_TYPE_STRING, "non-interleaved", NULL);
96 }
97
98 static GstAudioChannelPosition webKitWebAudioGStreamerChannelPosition(int channelIndex)
99 {
100     GstAudioChannelPosition position = GST_AUDIO_CHANNEL_POSITION_NONE;
101
102     switch (channelIndex) {
103     case AudioBus::ChannelLeft:
104         position = GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT;
105         break;
106     case AudioBus::ChannelRight:
107         position = GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT;
108         break;
109     case AudioBus::ChannelCenter:
110         position = GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER;
111         break;
112     case AudioBus::ChannelLFE:
113         position = GST_AUDIO_CHANNEL_POSITION_LFE1;
114         break;
115     case AudioBus::ChannelSurroundLeft:
116         position = GST_AUDIO_CHANNEL_POSITION_REAR_LEFT;
117         break;
118     case AudioBus::ChannelSurroundRight:
119         position = GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT;
120         break;
121     default:
122         break;
123     };
124
125     return position;
126 }
127
128 #define webkit_web_audio_src_parent_class parent_class
129 G_DEFINE_TYPE_WITH_CODE(WebKitWebAudioSrc, webkit_web_audio_src, GST_TYPE_BIN, GST_DEBUG_CATEGORY_INIT(webkit_web_audio_src_debug, \
130                             "webkitwebaudiosrc", \
131                             0, \
132                             "webaudiosrc element"));
133
134 static void webkit_web_audio_src_class_init(WebKitWebAudioSrcClass* webKitWebAudioSrcClass)
135 {
136     GObjectClass* objectClass = G_OBJECT_CLASS(webKitWebAudioSrcClass);
137     GstElementClass* elementClass = GST_ELEMENT_CLASS(webKitWebAudioSrcClass);
138
139     gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&srcTemplate));
140     gst_element_class_set_metadata(elementClass, "WebKit WebAudio source element", "Source", "Handles WebAudio data from WebCore", "Philippe Normand <pnormand@igalia.com>");
141
142     objectClass->constructed = webKitWebAudioSrcConstructed;
143     objectClass->finalize = webKitWebAudioSrcFinalize;
144     elementClass->change_state = webKitWebAudioSrcChangeState;
145
146     objectClass->set_property = webKitWebAudioSrcSetProperty;
147     objectClass->get_property = webKitWebAudioSrcGetProperty;
148
149     GParamFlags flags = static_cast<GParamFlags>(G_PARAM_CONSTRUCT_ONLY | G_PARAM_READWRITE);
150     g_object_class_install_property(objectClass,
151                                     PROP_RATE,
152                                     g_param_spec_float("rate", "rate",
153                                                        "Sample rate", G_MINDOUBLE, G_MAXDOUBLE,
154                                                        44100.0, flags));
155
156     g_object_class_install_property(objectClass,
157                                     PROP_BUS,
158                                     g_param_spec_pointer("bus", "bus",
159                                                          "Bus", flags));
160
161     g_object_class_install_property(objectClass,
162                                     PROP_PROVIDER,
163                                     g_param_spec_pointer("provider", "provider",
164                                                          "Provider", flags));
165
166     g_object_class_install_property(objectClass,
167                                     PROP_FRAMES,
168                                     g_param_spec_uint("frames", "frames",
169                                                       "Number of audio frames to pull at each iteration",
170                                                       0, G_MAXUINT8, 128, flags));
171
172     g_type_class_add_private(webKitWebAudioSrcClass, sizeof(WebKitWebAudioSourcePrivate));
173 }
174
175 static void webkit_web_audio_src_init(WebKitWebAudioSrc* src)
176 {
177     WebKitWebAudioSourcePrivate* priv = G_TYPE_INSTANCE_GET_PRIVATE(src, WEBKIT_TYPE_WEB_AUDIO_SRC, WebKitWebAudioSourcePrivate);
178     src->priv = priv;
179     new (priv) WebKitWebAudioSourcePrivate();
180
181     priv->sourcePad = webkitGstGhostPadFromStaticTemplate(&srcTemplate, "src", 0);
182     gst_element_add_pad(GST_ELEMENT(src), priv->sourcePad);
183
184     priv->provider = 0;
185     priv->bus = 0;
186
187     priv->newStreamEventPending = true;
188     gst_segment_init(&priv->segment, GST_FORMAT_TIME);
189
190     g_rec_mutex_init(&priv->mutex);
191     priv->task = gst_task_new(reinterpret_cast<GstTaskFunction>(webKitWebAudioSrcLoop), src, 0);
192
193     gst_task_set_lock(priv->task.get(), &priv->mutex);
194 }
195
196 static void webKitWebAudioSrcConstructed(GObject* object)
197 {
198     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
199     WebKitWebAudioSourcePrivate* priv = src->priv;
200
201     ASSERT(priv->bus);
202     ASSERT(priv->provider);
203     ASSERT(priv->sampleRate);
204
205     priv->interleave = gst_element_factory_make("interleave", 0);
206     priv->wavEncoder = gst_element_factory_make("wavenc", 0);
207
208     if (!priv->interleave) {
209         GST_ERROR_OBJECT(src, "Failed to create interleave");
210         return;
211     }
212
213     if (!priv->wavEncoder) {
214         GST_ERROR_OBJECT(src, "Failed to create wavenc");
215         return;
216     }
217
218     gst_bin_add_many(GST_BIN(src), priv->interleave.get(), priv->wavEncoder.get(), NULL);
219     gst_element_link_pads_full(priv->interleave.get(), "src", priv->wavEncoder.get(), "sink", GST_PAD_LINK_CHECK_NOTHING);
220
221     // For each channel of the bus create a new upstream branch for interleave, like:
222     // queue ! capsfilter ! audioconvert. which is plugged to a new interleave request sinkpad.
223     for (unsigned channelIndex = 0; channelIndex < priv->bus->numberOfChannels(); channelIndex++) {
224         GUniquePtr<gchar> queueName(g_strdup_printf("webaudioQueue%u", channelIndex));
225         GstElement* queue = gst_element_factory_make("queue", queueName.get());
226         GstElement* capsfilter = gst_element_factory_make("capsfilter", 0);
227         GstElement* audioconvert = gst_element_factory_make("audioconvert", 0);
228
229         GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
230
231         GstAudioInfo info;
232         gst_audio_info_from_caps(&info, monoCaps.get());
233         GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(channelIndex);
234         GRefPtr<GstCaps> caps = adoptGRef(gst_audio_info_to_caps(&info));
235         g_object_set(capsfilter, "caps", caps.get(), NULL);
236
237         // Configure the queue for minimal latency.
238         g_object_set(queue, "max-size-buffers", static_cast<guint>(1), NULL);
239
240         GstPad* pad = gst_element_get_static_pad(queue, "sink");
241         priv->pads = g_slist_prepend(priv->pads, pad);
242
243         gst_bin_add_many(GST_BIN(src), queue, capsfilter, audioconvert, NULL);
244         gst_element_link_pads_full(queue, "src", capsfilter, "sink", GST_PAD_LINK_CHECK_NOTHING);
245         gst_element_link_pads_full(capsfilter, "src", audioconvert, "sink", GST_PAD_LINK_CHECK_NOTHING);
246         gst_element_link_pads_full(audioconvert, "src", priv->interleave.get(), 0, GST_PAD_LINK_CHECK_NOTHING);
247
248     }
249     priv->pads = g_slist_reverse(priv->pads);
250
251     // wavenc's src pad is the only visible pad of our element.
252     GRefPtr<GstPad> targetPad = adoptGRef(gst_element_get_static_pad(priv->wavEncoder.get(), "src"));
253     gst_ghost_pad_set_target(GST_GHOST_PAD(priv->sourcePad), targetPad.get());
254 }
255
256 static void webKitWebAudioSrcFinalize(GObject* object)
257 {
258     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
259     WebKitWebAudioSourcePrivate* priv = src->priv;
260
261     g_rec_mutex_clear(&priv->mutex);
262
263     g_slist_free_full(priv->pads, reinterpret_cast<GDestroyNotify>(gst_object_unref));
264
265     priv->~WebKitWebAudioSourcePrivate();
266     GST_CALL_PARENT(G_OBJECT_CLASS, finalize, ((GObject* )(src)));
267 }
268
269 static void webKitWebAudioSrcSetProperty(GObject* object, guint propertyId, const GValue* value, GParamSpec* pspec)
270 {
271     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
272     WebKitWebAudioSourcePrivate* priv = src->priv;
273
274     switch (propertyId) {
275     case PROP_RATE:
276         priv->sampleRate = g_value_get_float(value);
277         break;
278     case PROP_BUS:
279         priv->bus = static_cast<AudioBus*>(g_value_get_pointer(value));
280         break;
281     case PROP_PROVIDER:
282         priv->provider = static_cast<AudioIOCallback*>(g_value_get_pointer(value));
283         break;
284     case PROP_FRAMES:
285         priv->framesToPull = g_value_get_uint(value);
286         break;
287     default:
288         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
289         break;
290     }
291 }
292
293 static void webKitWebAudioSrcGetProperty(GObject* object, guint propertyId, GValue* value, GParamSpec* pspec)
294 {
295     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(object);
296     WebKitWebAudioSourcePrivate* priv = src->priv;
297
298     switch (propertyId) {
299     case PROP_RATE:
300         g_value_set_float(value, priv->sampleRate);
301         break;
302     case PROP_BUS:
303         g_value_set_pointer(value, priv->bus);
304         break;
305     case PROP_PROVIDER:
306         g_value_set_pointer(value, priv->provider);
307         break;
308     case PROP_FRAMES:
309         g_value_set_uint(value, priv->framesToPull);
310         break;
311     default:
312         G_OBJECT_WARN_INVALID_PROPERTY_ID(object, propertyId, pspec);
313         break;
314     }
315 }
316
317 static void webKitWebAudioSrcLoop(WebKitWebAudioSrc* src)
318 {
319     WebKitWebAudioSourcePrivate* priv = src->priv;
320
321     ASSERT(priv->bus);
322     ASSERT(priv->provider);
323     if (!priv->provider || !priv->bus)
324         return;
325
326     GSList* channelBufferList = 0;
327     register int i;
328     unsigned bufferSize = priv->framesToPull * sizeof(float);
329     for (i = g_slist_length(priv->pads) - 1; i >= 0; i--) {
330         GstBuffer* channelBuffer = gst_buffer_new_and_alloc(bufferSize);
331         ASSERT(channelBuffer);
332         channelBufferList = g_slist_prepend(channelBufferList, channelBuffer);
333         GstMapInfo info;
334         gst_buffer_map(channelBuffer, &info, GST_MAP_READ);
335         priv->bus->setChannelMemory(i, reinterpret_cast<float*>(info.data), priv->framesToPull);
336         gst_buffer_unmap(channelBuffer, &info);
337     }
338
339     // FIXME: Add support for local/live audio input.
340     priv->provider->render(0, priv->bus, priv->framesToPull);
341
342     GSList* padsIt = priv->pads;
343     GSList* buffersIt = channelBufferList;
344
345 #if GST_CHECK_VERSION(1, 2, 0)
346     guint groupId = 0;
347     if (priv->newStreamEventPending)
348         groupId = gst_util_group_id_next();
349 #endif
350
351     for (i = 0; padsIt && buffersIt; padsIt = g_slist_next(padsIt), buffersIt = g_slist_next(buffersIt), ++i) {
352         GstPad* pad = static_cast<GstPad*>(padsIt->data);
353         GstBuffer* channelBuffer = static_cast<GstBuffer*>(buffersIt->data);
354
355         // Send stream-start, segment and caps events downstream, along with the first buffer.
356         if (priv->newStreamEventPending) {
357             GRefPtr<GstElement> queue = adoptGRef(gst_pad_get_parent_element(pad));
358             GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(queue.get(), "sink"));
359             GUniquePtr<gchar> queueName(gst_element_get_name(queue.get()));
360             GUniquePtr<gchar> streamId(g_strdup_printf("webaudio/%s", queueName.get()));
361             GstEvent* streamStartEvent = gst_event_new_stream_start(streamId.get());
362 #if GST_CHECK_VERSION(1, 2, 0)
363             gst_event_set_group_id(streamStartEvent, groupId);
364 #endif
365             gst_pad_send_event(sinkPad.get(), streamStartEvent);
366
367             GRefPtr<GstCaps> monoCaps = adoptGRef(getGStreamerMonoAudioCaps(priv->sampleRate));
368             GstAudioInfo info;
369             gst_audio_info_from_caps(&info, monoCaps.get());
370             GST_AUDIO_INFO_POSITION(&info, 0) = webKitWebAudioGStreamerChannelPosition(i);
371             GRefPtr<GstCaps> capsWithChannelPosition = adoptGRef(gst_audio_info_to_caps(&info));
372             gst_pad_send_event(sinkPad.get(), gst_event_new_caps(capsWithChannelPosition.get()));
373
374             gst_pad_send_event(sinkPad.get(), gst_event_new_segment(&priv->segment));
375         }
376
377         GstFlowReturn ret = gst_pad_chain(pad, channelBuffer);
378         if (ret != GST_FLOW_OK)
379             GST_ELEMENT_ERROR(src, CORE, PAD, ("Internal WebAudioSrc error"), ("Failed to push buffer on %s:%s flow: %s", GST_DEBUG_PAD_NAME(pad), gst_flow_get_name(ret)));
380     }
381
382     priv->newStreamEventPending = false;
383
384     g_slist_free(channelBufferList);
385 }
386
387 static GstStateChangeReturn webKitWebAudioSrcChangeState(GstElement* element, GstStateChange transition)
388 {
389     GstStateChangeReturn returnValue = GST_STATE_CHANGE_SUCCESS;
390     WebKitWebAudioSrc* src = WEBKIT_WEB_AUDIO_SRC(element);
391
392     switch (transition) {
393     case GST_STATE_CHANGE_NULL_TO_READY:
394         if (!src->priv->interleave) {
395             gst_element_post_message(element, gst_missing_element_message_new(element, "interleave"));
396             GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no interleave"));
397             return GST_STATE_CHANGE_FAILURE;
398         }
399         if (!src->priv->wavEncoder) {
400             gst_element_post_message(element, gst_missing_element_message_new(element, "wavenc"));
401             GST_ELEMENT_ERROR(src, CORE, MISSING_PLUGIN, (0), ("no wavenc"));
402             return GST_STATE_CHANGE_FAILURE;
403         }
404         break;
405     default:
406         break;
407     }
408
409     returnValue = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
410     if (UNLIKELY(returnValue == GST_STATE_CHANGE_FAILURE)) {
411         GST_DEBUG_OBJECT(src, "State change failed");
412         return returnValue;
413     }
414
415     switch (transition) {
416     case GST_STATE_CHANGE_READY_TO_PAUSED:
417         GST_DEBUG_OBJECT(src, "READY->PAUSED");
418         if (!gst_task_start(src->priv->task.get()))
419             returnValue = GST_STATE_CHANGE_FAILURE;
420         break;
421     case GST_STATE_CHANGE_PAUSED_TO_READY:
422         src->priv->newStreamEventPending = true;
423         GST_DEBUG_OBJECT(src, "PAUSED->READY");
424         if (!gst_task_join(src->priv->task.get()))
425             returnValue = GST_STATE_CHANGE_FAILURE;
426         break;
427     default:
428         break;
429     }
430
431     return returnValue;
432 }
433
434 #endif // ENABLE(WEB_AUDIO) && USE(GSTREAMER)