[CoordinatedGraphics] VideoRenderRequestScheduler shouldn't dispatch to main thread...
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / VideoSinkGStreamer.cpp
1 /*
2  *  Copyright (C) 2007 OpenedHand
3  *  Copyright (C) 2007 Alp Toker <alp@atoker.com>
4  *  Copyright (C) 2009, 2010, 2011, 2012 Igalia S.L
5  *
6  *  This library is free software; you can redistribute it and/or
7  *  modify it under the terms of the GNU Lesser General Public
8  *  License as published by the Free Software Foundation; either
9  *  version 2 of the License, or (at your option) any later version.
10  *
11  *  This library is distributed in the hope that it will be useful,
12  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  *  Lesser General Public License for more details.
15  *
16  *  You should have received a copy of the GNU Lesser General Public
17  *  License along with this library; if not, write to the Free Software
18  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
19  */
20
21 /*
22  *
23  * WebKitVideoSink is a GStreamer sink element that triggers
24  * repaints in the WebKit GStreamer media player for the
25  * current video buffer.
26  */
27
28 #include "config.h"
29 #include "VideoSinkGStreamer.h"
30
31 #if ENABLE(VIDEO) && USE(GSTREAMER)
32 #include "GRefPtrGStreamer.h"
33 #include "GStreamerUtilities.h"
34 #include "IntSize.h"
35 #include <glib.h>
36 #include <gst/gst.h>
37 #include <gst/video/gstvideometa.h>
38 #include <wtf/Condition.h>
39 #include <wtf/RunLoop.h>
40
41 using namespace WebCore;
42
43 // CAIRO_FORMAT_RGB24 used to render the video buffers is little/big endian dependant.
44 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
45 #define GST_CAPS_FORMAT "{ BGRx, BGRA }"
46 #else
47 #define GST_CAPS_FORMAT "{ xRGB, ARGB }"
48 #endif
49 #if GST_CHECK_VERSION(1, 1, 0)
50 #define GST_FEATURED_CAPS GST_VIDEO_CAPS_MAKE_WITH_FEATURES(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, GST_CAPS_FORMAT) ";"
51 #else
52 #define GST_FEATURED_CAPS
53 #endif
54
55 #define WEBKIT_VIDEO_SINK_PAD_CAPS GST_FEATURED_CAPS GST_VIDEO_CAPS_MAKE(GST_CAPS_FORMAT)
56
57 static GstStaticPadTemplate s_sinkTemplate = GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(WEBKIT_VIDEO_SINK_PAD_CAPS));
58
59
60 GST_DEBUG_CATEGORY_STATIC(webkitVideoSinkDebug);
61 #define GST_CAT_DEFAULT webkitVideoSinkDebug
62
63 enum {
64     REPAINT_REQUESTED,
65     LAST_SIGNAL
66 };
67
68 static guint webkitVideoSinkSignals[LAST_SIGNAL] = { 0, };
69
70 static void webkitVideoSinkRepaintRequested(WebKitVideoSink*, GstSample*);
71 static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink*, GstBuffer*);
72
73 class VideoRenderRequestScheduler {
74 public:
75     VideoRenderRequestScheduler()
76 #if !USE(COORDINATED_GRAPHICS_THREADED)
77         : m_timer(RunLoop::main(), this, &VideoRenderRequestScheduler::render)
78 #endif
79     {
80 #if PLATFORM(GTK) && !USE(COORDINATED_GRAPHICS_THREADED)
81         // Use a higher priority than WebCore timers (G_PRIORITY_HIGH_IDLE + 20).
82         m_timer.setPriority(G_PRIORITY_HIGH_IDLE + 19);
83 #endif
84     }
85
86     void start()
87     {
88         LockHolder locker(m_sampleMutex);
89         m_unlocked = false;
90     }
91
92     void stop()
93     {
94         LockHolder locker(m_sampleMutex);
95         m_sample = nullptr;
96         m_unlocked = true;
97 #if !USE(COORDINATED_GRAPHICS_THREADED)
98         m_timer.stop();
99         m_dataCondition.notifyOne();
100 #endif
101     }
102
103     bool requestRender(WebKitVideoSink* sink, GstBuffer* buffer)
104     {
105         LockHolder locker(m_sampleMutex);
106         if (m_unlocked)
107             return true;
108
109         m_sample = webkitVideoSinkRequestRender(sink, buffer);
110         if (!m_sample)
111             return false;
112
113 #if USE(COORDINATED_GRAPHICS_THREADED)
114         if (LIKELY(GST_IS_SAMPLE(m_sample.get())))
115             webkitVideoSinkRepaintRequested(sink, m_sample.get());
116         m_sample = nullptr;
117 #else
118         m_sink = sink;
119         m_timer.startOneShot(0);
120         m_dataCondition.wait(m_sampleMutex);
121 #endif
122         return true;
123     }
124
125 private:
126
127 #if !USE(COORDINATED_GRAPHICS_THREADED)
128     void render()
129     {
130         LockHolder locker(m_sampleMutex);
131         GRefPtr<GstSample> sample = WTF::move(m_sample);
132         GRefPtr<WebKitVideoSink> sink = WTF::move(m_sink);
133         if (sample && !m_unlocked && LIKELY(GST_IS_SAMPLE(sample.get())))
134             webkitVideoSinkRepaintRequested(sink.get(), sample.get());
135         m_dataCondition.notifyOne();
136     }
137 #endif
138
139     Lock m_sampleMutex;
140     GRefPtr<GstSample> m_sample;
141
142 #if !USE(COORDINATED_GRAPHICS_THREADED)
143     RunLoop::Timer<VideoRenderRequestScheduler> m_timer;
144     Condition m_dataCondition;
145     GRefPtr<WebKitVideoSink> m_sink;
146 #endif
147
148     // If this is true all processing should finish ASAP
149     // This is necessary because there could be a race between
150     // unlock() and render(), where unlock() wins, signals the
151     // Condition, then render() tries to render a frame although
152     // everything else isn't running anymore. This will lead
153     // to deadlocks because render() holds the stream lock.
154     //
155     // Protected by the sample mutex
156     bool m_unlocked { false };
157 };
158
159 struct _WebKitVideoSinkPrivate {
160     _WebKitVideoSinkPrivate()
161     {
162         gst_video_info_init(&info);
163     }
164
165     ~_WebKitVideoSinkPrivate()
166     {
167         if (currentCaps)
168             gst_caps_unref(currentCaps);
169     }
170
171     VideoRenderRequestScheduler scheduler;
172     GstVideoInfo info;
173     GstCaps* currentCaps;
174 };
175
176 #define webkit_video_sink_parent_class parent_class
177 G_DEFINE_TYPE_WITH_CODE(WebKitVideoSink, webkit_video_sink, GST_TYPE_VIDEO_SINK, GST_DEBUG_CATEGORY_INIT(webkitVideoSinkDebug, "webkitsink", 0, "webkit video sink"));
178
179
180 static void webkit_video_sink_init(WebKitVideoSink* sink)
181 {
182     sink->priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, WEBKIT_TYPE_VIDEO_SINK, WebKitVideoSinkPrivate);
183     g_object_set(GST_BASE_SINK(sink), "enable-last-sample", FALSE, NULL);
184     new (sink->priv) WebKitVideoSinkPrivate();
185 }
186
187 static void webkitVideoSinkRepaintRequested(WebKitVideoSink* sink, GstSample* sample)
188 {
189     g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_REQUESTED], 0, sample);
190 }
191
192 static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink* sink, GstBuffer* buffer)
193 {
194     WebKitVideoSinkPrivate* priv = sink->priv;
195     GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, priv->currentCaps, nullptr, nullptr));
196
197     // The video info structure is valid only if the sink handled an allocation query.
198     GstVideoFormat format = GST_VIDEO_INFO_FORMAT(&priv->info);
199     if (format == GST_VIDEO_FORMAT_UNKNOWN)
200         return nullptr;
201
202 #if !(USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS))
203     // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
204     // Here we convert to Cairo's ARGB.
205     if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
206         // Because GstBaseSink::render() only owns the buffer reference in the
207         // method scope we can't use gst_buffer_make_writable() here. Also
208         // The buffer content should not be changed here because the same buffer
209         // could be passed multiple times to this method (in theory).
210
211         GstBuffer* newBuffer = WebCore::createGstBuffer(buffer);
212
213         // Check if allocation failed.
214         if (UNLIKELY(!newBuffer)) {
215             gst_buffer_unref(buffer);
216             return nullptr;
217         }
218
219         // We don't use Color::premultipliedARGBFromColor() here because
220         // one function call per video pixel is just too expensive:
221         // For 720p/PAL for example this means 1280*720*25=23040000
222         // function calls per second!
223         GstVideoFrame sourceFrame;
224         GstVideoFrame destinationFrame;
225
226         if (!gst_video_frame_map(&sourceFrame, &priv->info, buffer, GST_MAP_READ)) {
227             gst_buffer_unref(newBuffer);
228             return nullptr;
229         }
230         if (!gst_video_frame_map(&destinationFrame, &priv->info, newBuffer, GST_MAP_WRITE)) {
231             gst_video_frame_unmap(&sourceFrame);
232             gst_buffer_unref(newBuffer);
233             return nullptr;
234         }
235
236         const guint8* source = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&sourceFrame, 0));
237         guint8* destination = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&destinationFrame, 0));
238
239         for (int x = 0; x < GST_VIDEO_FRAME_HEIGHT(&sourceFrame); x++) {
240             for (int y = 0; y < GST_VIDEO_FRAME_WIDTH(&sourceFrame); y++) {
241 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
242                 unsigned short alpha = source[3];
243                 destination[0] = (source[0] * alpha + 128) / 255;
244                 destination[1] = (source[1] * alpha + 128) / 255;
245                 destination[2] = (source[2] * alpha + 128) / 255;
246                 destination[3] = alpha;
247 #else
248                 unsigned short alpha = source[0];
249                 destination[0] = alpha;
250                 destination[1] = (source[1] * alpha + 128) / 255;
251                 destination[2] = (source[2] * alpha + 128) / 255;
252                 destination[3] = (source[3] * alpha + 128) / 255;
253 #endif
254                 source += 4;
255                 destination += 4;
256             }
257         }
258
259         gst_video_frame_unmap(&sourceFrame);
260         gst_video_frame_unmap(&destinationFrame);
261         sample = adoptGRef(gst_sample_new(newBuffer, priv->currentCaps, nullptr, nullptr));
262     }
263 #endif
264
265     return sample;
266 }
267
268 static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
269 {
270     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
271     return sink->priv->scheduler.requestRender(sink, buffer) ? GST_FLOW_OK : GST_FLOW_ERROR;
272 }
273
274 static void webkitVideoSinkFinalize(GObject* object)
275 {
276     WEBKIT_VIDEO_SINK(object)->priv->~WebKitVideoSinkPrivate();
277     G_OBJECT_CLASS(parent_class)->finalize(object);
278 }
279
280 static gboolean webkitVideoSinkUnlock(GstBaseSink* baseSink)
281 {
282     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
283
284     priv->scheduler.stop();
285
286     return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock, (baseSink), TRUE);
287 }
288
289 static gboolean webkitVideoSinkUnlockStop(GstBaseSink* baseSink)
290 {
291     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
292
293     priv->scheduler.start();
294
295     return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop, (baseSink), TRUE);
296 }
297
298 static gboolean webkitVideoSinkStop(GstBaseSink* baseSink)
299 {
300     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
301
302     priv->scheduler.stop();
303     if (priv->currentCaps) {
304         gst_caps_unref(priv->currentCaps);
305         priv->currentCaps = nullptr;
306     }
307
308     return TRUE;
309 }
310
311 static gboolean webkitVideoSinkStart(GstBaseSink* baseSink)
312 {
313     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
314
315     priv->scheduler.start();
316
317     return TRUE;
318 }
319
320 static gboolean webkitVideoSinkSetCaps(GstBaseSink* baseSink, GstCaps* caps)
321 {
322     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
323     WebKitVideoSinkPrivate* priv = sink->priv;
324
325     GST_DEBUG_OBJECT(sink, "Current caps %" GST_PTR_FORMAT ", setting caps %" GST_PTR_FORMAT, priv->currentCaps, caps);
326
327     GstVideoInfo videoInfo;
328     gst_video_info_init(&videoInfo);
329     if (!gst_video_info_from_caps(&videoInfo, caps)) {
330         GST_ERROR_OBJECT(sink, "Invalid caps %" GST_PTR_FORMAT, caps);
331         return FALSE;
332     }
333
334     priv->info = videoInfo;
335     gst_caps_replace(&priv->currentCaps, caps);
336     return TRUE;
337 }
338
339 static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query)
340 {
341     GstCaps* caps;
342     gst_query_parse_allocation(query, &caps, 0);
343     if (!caps)
344         return FALSE;
345
346     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
347     if (!gst_video_info_from_caps(&sink->priv->info, caps))
348         return FALSE;
349
350     gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);
351     gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0);
352 #if GST_CHECK_VERSION(1, 1, 0)
353     gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0);
354 #endif
355     return TRUE;
356 }
357
358 static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
359 {
360     GObjectClass* gobjectClass = G_OBJECT_CLASS(klass);
361     GstBaseSinkClass* baseSinkClass = GST_BASE_SINK_CLASS(klass);
362     GstElementClass* elementClass = GST_ELEMENT_CLASS(klass);
363
364     gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&s_sinkTemplate));
365     gst_element_class_set_metadata(elementClass, "WebKit video sink", "Sink/Video", "Sends video data from a GStreamer pipeline to WebKit", "Igalia, Alp Toker <alp@atoker.com>");
366
367     g_type_class_add_private(klass, sizeof(WebKitVideoSinkPrivate));
368
369     gobjectClass->finalize = webkitVideoSinkFinalize;
370
371     baseSinkClass->unlock = webkitVideoSinkUnlock;
372     baseSinkClass->unlock_stop = webkitVideoSinkUnlockStop;
373     baseSinkClass->render = webkitVideoSinkRender;
374     baseSinkClass->preroll = webkitVideoSinkRender;
375     baseSinkClass->stop = webkitVideoSinkStop;
376     baseSinkClass->start = webkitVideoSinkStart;
377     baseSinkClass->set_caps = webkitVideoSinkSetCaps;
378     baseSinkClass->propose_allocation = webkitVideoSinkProposeAllocation;
379
380     webkitVideoSinkSignals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
381             G_TYPE_FROM_CLASS(klass),
382             static_cast<GSignalFlags>(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
383             0, // Class offset
384             0, // Accumulator
385             0, // Accumulator data
386             g_cclosure_marshal_generic,
387             G_TYPE_NONE, // Return type
388             1, // Only one parameter
389             GST_TYPE_SAMPLE);
390 }
391
392
393 GstElement* webkitVideoSinkNew()
394 {
395     return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
396 }
397
398 #endif // ENABLE(VIDEO) && USE(GSTREAMER)