REGRESSION(r207879-207891): [GStreamer] Introduced many layout test failures and...
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / VideoSinkGStreamer.cpp
1 /*
2  *  Copyright (C) 2007 OpenedHand
3  *  Copyright (C) 2007 Alp Toker <alp@atoker.com>
4  *  Copyright (C) 2009, 2010, 2011, 2012, 2015, 2016 Igalia S.L
5  *  Copyright (C) 2015, 2016 Metrological Group B.V.
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public
9  *  License as published by the Free Software Foundation; either
10  *  version 2 of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free Software
19  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
20  */
21
22 /*
23  *
24  * WebKitVideoSink is a GStreamer sink element that triggers
25  * repaints in the WebKit GStreamer media player for the
26  * current video buffer.
27  */
28
29 #include "config.h"
30 #include "VideoSinkGStreamer.h"
31
32 #if ENABLE(VIDEO) && USE(GSTREAMER)
33 #include "GRefPtrGStreamer.h"
34 #include "GStreamerUtilities.h"
35 #include "IntSize.h"
36 #include <glib.h>
37 #include <gst/gst.h>
38 #include <gst/video/gstvideometa.h>
39 #include <wtf/Condition.h>
40 #include <wtf/RunLoop.h>
41
42 using namespace WebCore;
43
44 // CAIRO_FORMAT_RGB24 used to render the video buffers is little/big endian dependant.
45 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
46 #define GST_CAPS_FORMAT "{ BGRx, BGRA }"
47 #else
48 #define GST_CAPS_FORMAT "{ xRGB, ARGB }"
49 #endif
50
51 #define WEBKIT_VIDEO_SINK_PAD_CAPS GST_VIDEO_CAPS_MAKE_WITH_FEATURES(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, GST_CAPS_FORMAT) ";" GST_VIDEO_CAPS_MAKE(GST_CAPS_FORMAT)
52
53 static GstStaticPadTemplate s_sinkTemplate = GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(WEBKIT_VIDEO_SINK_PAD_CAPS));
54
55
56 GST_DEBUG_CATEGORY_STATIC(webkitVideoSinkDebug);
57 #define GST_CAT_DEFAULT webkitVideoSinkDebug
58
59 enum {
60     REPAINT_REQUESTED,
61     LAST_SIGNAL
62 };
63
64 static guint webkitVideoSinkSignals[LAST_SIGNAL] = { 0, };
65
66 static void webkitVideoSinkRepaintRequested(WebKitVideoSink*, GstSample*);
67 static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink*, GstBuffer*);
68
69 class VideoRenderRequestScheduler {
70 public:
71     VideoRenderRequestScheduler()
72 #if !USE(COORDINATED_GRAPHICS_THREADED)
73         : m_timer(RunLoop::main(), this, &VideoRenderRequestScheduler::render)
74 #endif
75     {
76 #if PLATFORM(GTK) && !USE(COORDINATED_GRAPHICS_THREADED)
77         // Use a higher priority than WebCore timers (G_PRIORITY_HIGH_IDLE + 20).
78         m_timer.setPriority(G_PRIORITY_HIGH_IDLE + 19);
79 #endif
80     }
81
82     void start()
83     {
84         LockHolder locker(m_sampleMutex);
85         m_unlocked = false;
86     }
87
88     void stop()
89     {
90         LockHolder locker(m_sampleMutex);
91         m_sample = nullptr;
92         m_unlocked = true;
93 #if !USE(COORDINATED_GRAPHICS_THREADED)
94         m_timer.stop();
95         m_dataCondition.notifyOne();
96 #endif
97     }
98
99     void drain()
100     {
101         LockHolder locker(m_sampleMutex);
102         m_sample = nullptr;
103     }
104
105     bool requestRender(WebKitVideoSink* sink, GstBuffer* buffer)
106     {
107         LockHolder locker(m_sampleMutex);
108         if (m_unlocked)
109             return true;
110
111         m_sample = webkitVideoSinkRequestRender(sink, buffer);
112         if (!m_sample)
113             return false;
114
115 #if USE(COORDINATED_GRAPHICS_THREADED)
116         if (LIKELY(GST_IS_SAMPLE(m_sample.get())))
117             webkitVideoSinkRepaintRequested(sink, m_sample.get());
118         m_sample = nullptr;
119 #else
120         m_sink = sink;
121         m_timer.startOneShot(0);
122         m_dataCondition.wait(m_sampleMutex);
123 #endif
124         return true;
125     }
126
127 private:
128
129 #if !USE(COORDINATED_GRAPHICS_THREADED)
130     void render()
131     {
132         LockHolder locker(m_sampleMutex);
133         GRefPtr<GstSample> sample = WTFMove(m_sample);
134         GRefPtr<WebKitVideoSink> sink = WTFMove(m_sink);
135         if (sample && !m_unlocked && LIKELY(GST_IS_SAMPLE(sample.get())))
136             webkitVideoSinkRepaintRequested(sink.get(), sample.get());
137         m_dataCondition.notifyOne();
138     }
139 #endif
140
141     Lock m_sampleMutex;
142     GRefPtr<GstSample> m_sample;
143
144 #if !USE(COORDINATED_GRAPHICS_THREADED)
145     RunLoop::Timer<VideoRenderRequestScheduler> m_timer;
146     Condition m_dataCondition;
147     GRefPtr<WebKitVideoSink> m_sink;
148 #endif
149
150     // If this is true all processing should finish ASAP
151     // This is necessary because there could be a race between
152     // unlock() and render(), where unlock() wins, signals the
153     // Condition, then render() tries to render a frame although
154     // everything else isn't running anymore. This will lead
155     // to deadlocks because render() holds the stream lock.
156     //
157     // Protected by the sample mutex
158     bool m_unlocked { false };
159 };
160
161 struct _WebKitVideoSinkPrivate {
162     _WebKitVideoSinkPrivate()
163     {
164         gst_video_info_init(&info);
165     }
166
167     ~_WebKitVideoSinkPrivate()
168     {
169         if (currentCaps)
170             gst_caps_unref(currentCaps);
171     }
172
173     VideoRenderRequestScheduler scheduler;
174     GstVideoInfo info;
175     GstCaps* currentCaps;
176 };
177
178 #define webkit_video_sink_parent_class parent_class
179 G_DEFINE_TYPE_WITH_CODE(WebKitVideoSink, webkit_video_sink, GST_TYPE_VIDEO_SINK, GST_DEBUG_CATEGORY_INIT(webkitVideoSinkDebug, "webkitsink", 0, "webkit video sink"));
180
181
182 static void webkit_video_sink_init(WebKitVideoSink* sink)
183 {
184     sink->priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, WEBKIT_TYPE_VIDEO_SINK, WebKitVideoSinkPrivate);
185     g_object_set(GST_BASE_SINK(sink), "enable-last-sample", FALSE, NULL);
186     new (sink->priv) WebKitVideoSinkPrivate();
187 }
188
189 static void webkitVideoSinkRepaintRequested(WebKitVideoSink* sink, GstSample* sample)
190 {
191     g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_REQUESTED], 0, sample);
192 }
193
194 static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink* sink, GstBuffer* buffer)
195 {
196     WebKitVideoSinkPrivate* priv = sink->priv;
197     GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, priv->currentCaps, nullptr, nullptr));
198
199     // The video info structure is valid only if the sink handled an allocation query.
200     GstVideoFormat format = GST_VIDEO_INFO_FORMAT(&priv->info);
201     if (format == GST_VIDEO_FORMAT_UNKNOWN)
202         return nullptr;
203
204 #if !(USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS))
205     // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
206     // Here we convert to Cairo's ARGB.
207     if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
208         // Because GstBaseSink::render() only owns the buffer reference in the
209         // method scope we can't use gst_buffer_make_writable() here. Also
210         // The buffer content should not be changed here because the same buffer
211         // could be passed multiple times to this method (in theory).
212
213         GstBuffer* newBuffer = WebCore::createGstBuffer(buffer);
214
215         // Check if allocation failed.
216         if (UNLIKELY(!newBuffer))
217             return nullptr;
218
219         // We don't use Color::premultipliedARGBFromColor() here because
220         // one function call per video pixel is just too expensive:
221         // For 720p/PAL for example this means 1280*720*25=23040000
222         // function calls per second!
223         GstVideoFrame sourceFrame;
224         GstVideoFrame destinationFrame;
225
226         if (!gst_video_frame_map(&sourceFrame, &priv->info, buffer, GST_MAP_READ)) {
227             gst_buffer_unref(newBuffer);
228             return nullptr;
229         }
230         if (!gst_video_frame_map(&destinationFrame, &priv->info, newBuffer, GST_MAP_WRITE)) {
231             gst_video_frame_unmap(&sourceFrame);
232             gst_buffer_unref(newBuffer);
233             return nullptr;
234         }
235
236         const guint8* source = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&sourceFrame, 0));
237         guint8* destination = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&destinationFrame, 0));
238
239         for (int x = 0; x < GST_VIDEO_FRAME_HEIGHT(&sourceFrame); x++) {
240             for (int y = 0; y < GST_VIDEO_FRAME_WIDTH(&sourceFrame); y++) {
241 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
242                 unsigned short alpha = source[3];
243                 destination[0] = (source[0] * alpha + 128) / 255;
244                 destination[1] = (source[1] * alpha + 128) / 255;
245                 destination[2] = (source[2] * alpha + 128) / 255;
246                 destination[3] = alpha;
247 #else
248                 unsigned short alpha = source[0];
249                 destination[0] = alpha;
250                 destination[1] = (source[1] * alpha + 128) / 255;
251                 destination[2] = (source[2] * alpha + 128) / 255;
252                 destination[3] = (source[3] * alpha + 128) / 255;
253 #endif
254                 source += 4;
255                 destination += 4;
256             }
257         }
258
259         gst_video_frame_unmap(&sourceFrame);
260         gst_video_frame_unmap(&destinationFrame);
261         sample = adoptGRef(gst_sample_new(newBuffer, priv->currentCaps, nullptr, nullptr));
262         gst_buffer_unref(newBuffer);
263     }
264 #endif
265
266     return sample;
267 }
268
269 static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
270 {
271     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
272     return sink->priv->scheduler.requestRender(sink, buffer) ? GST_FLOW_OK : GST_FLOW_ERROR;
273 }
274
275 static void webkitVideoSinkFinalize(GObject* object)
276 {
277     WEBKIT_VIDEO_SINK(object)->priv->~WebKitVideoSinkPrivate();
278     G_OBJECT_CLASS(parent_class)->finalize(object);
279 }
280
281 static gboolean webkitVideoSinkUnlock(GstBaseSink* baseSink)
282 {
283     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
284
285     priv->scheduler.stop();
286
287     return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock, (baseSink), TRUE);
288 }
289
290 static gboolean webkitVideoSinkUnlockStop(GstBaseSink* baseSink)
291 {
292     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
293
294     priv->scheduler.start();
295
296     return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop, (baseSink), TRUE);
297 }
298
299 static gboolean webkitVideoSinkStop(GstBaseSink* baseSink)
300 {
301     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
302
303     priv->scheduler.stop();
304     if (priv->currentCaps) {
305         gst_caps_unref(priv->currentCaps);
306         priv->currentCaps = nullptr;
307     }
308
309     return TRUE;
310 }
311
312 static gboolean webkitVideoSinkStart(GstBaseSink* baseSink)
313 {
314     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
315
316     priv->scheduler.start();
317
318     return TRUE;
319 }
320
321 static gboolean webkitVideoSinkSetCaps(GstBaseSink* baseSink, GstCaps* caps)
322 {
323     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
324     WebKitVideoSinkPrivate* priv = sink->priv;
325
326     GST_DEBUG_OBJECT(sink, "Current caps %" GST_PTR_FORMAT ", setting caps %" GST_PTR_FORMAT, priv->currentCaps, caps);
327
328     GstVideoInfo videoInfo;
329     gst_video_info_init(&videoInfo);
330     if (!gst_video_info_from_caps(&videoInfo, caps)) {
331         GST_ERROR_OBJECT(sink, "Invalid caps %" GST_PTR_FORMAT, caps);
332         return FALSE;
333     }
334
335     priv->info = videoInfo;
336     gst_caps_replace(&priv->currentCaps, caps);
337     return TRUE;
338 }
339
340 static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query)
341 {
342     GstCaps* caps;
343     gst_query_parse_allocation(query, &caps, 0);
344     if (!caps)
345         return FALSE;
346
347     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
348     if (!gst_video_info_from_caps(&sink->priv->info, caps))
349         return FALSE;
350
351     gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);
352     gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0);
353     gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0);
354     return TRUE;
355 }
356
357 static gboolean webkitVideoSinkEvent(GstBaseSink* baseSink, GstEvent* event)
358 {
359     switch (GST_EVENT_TYPE(event)) {
360     case GST_EVENT_FLUSH_START: {
361         WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
362         sink->priv->scheduler.drain();
363
364         GST_DEBUG_OBJECT(sink, "Flush-start, releasing m_sample");
365         }
366         FALLTHROUGH;
367     default:
368         return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, event, (baseSink, event), TRUE);
369     }
370 }
371
372 static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
373 {
374     GObjectClass* gobjectClass = G_OBJECT_CLASS(klass);
375     GstBaseSinkClass* baseSinkClass = GST_BASE_SINK_CLASS(klass);
376     GstElementClass* elementClass = GST_ELEMENT_CLASS(klass);
377
378     gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&s_sinkTemplate));
379     gst_element_class_set_metadata(elementClass, "WebKit video sink", "Sink/Video", "Sends video data from a GStreamer pipeline to WebKit", "Igalia, Alp Toker <alp@atoker.com>");
380
381     g_type_class_add_private(klass, sizeof(WebKitVideoSinkPrivate));
382
383     gobjectClass->finalize = webkitVideoSinkFinalize;
384
385     baseSinkClass->unlock = webkitVideoSinkUnlock;
386     baseSinkClass->unlock_stop = webkitVideoSinkUnlockStop;
387     baseSinkClass->render = webkitVideoSinkRender;
388     baseSinkClass->preroll = webkitVideoSinkRender;
389     baseSinkClass->stop = webkitVideoSinkStop;
390     baseSinkClass->start = webkitVideoSinkStart;
391     baseSinkClass->set_caps = webkitVideoSinkSetCaps;
392     baseSinkClass->propose_allocation = webkitVideoSinkProposeAllocation;
393     baseSinkClass->event = webkitVideoSinkEvent;
394
395     webkitVideoSinkSignals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
396             G_TYPE_FROM_CLASS(klass),
397             static_cast<GSignalFlags>(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
398             0, // Class offset
399             0, // Accumulator
400             0, // Accumulator data
401             g_cclosure_marshal_generic,
402             G_TYPE_NONE, // Return type
403             1, // Only one parameter
404             GST_TYPE_SAMPLE);
405 }
406
407
408 GstElement* webkitVideoSinkNew()
409 {
410     return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
411 }
412
413 #endif // ENABLE(VIDEO) && USE(GSTREAMER)