[GTK] PLATFORM(GTK) && !USE(COORDINATED_GRAPHICS_THREADED) is no longer possible
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / VideoSinkGStreamer.cpp
1 /*
2  *  Copyright (C) 2007 OpenedHand
3  *  Copyright (C) 2007 Alp Toker <alp@atoker.com>
4  *  Copyright (C) 2009, 2010, 2011, 2012, 2015, 2016 Igalia S.L
5  *  Copyright (C) 2015, 2016 Metrological Group B.V.
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public
9  *  License as published by the Free Software Foundation; either
10  *  version 2 of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free Software
19  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
20  */
21
22 /*
23  *
24  * WebKitVideoSink is a GStreamer sink element that triggers
25  * repaints in the WebKit GStreamer media player for the
26  * current video buffer.
27  */
28
29 #include "config.h"
30 #include "VideoSinkGStreamer.h"
31
32 #if ENABLE(VIDEO) && USE(GSTREAMER)
33 #include "GRefPtrGStreamer.h"
34 #include "GStreamerUtilities.h"
35 #include "IntSize.h"
36 #include <glib.h>
37 #include <gst/gst.h>
38 #include <gst/video/gstvideometa.h>
39 #include <wtf/Condition.h>
40 #include <wtf/RunLoop.h>
41
42 using namespace WebCore;
43
44 // CAIRO_FORMAT_RGB24 used to render the video buffers is little/big endian dependant.
45 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
46 #define GST_CAPS_FORMAT "{ BGRx, BGRA }"
47 #else
48 #define GST_CAPS_FORMAT "{ xRGB, ARGB }"
49 #endif
50
51 #define WEBKIT_VIDEO_SINK_PAD_CAPS GST_VIDEO_CAPS_MAKE_WITH_FEATURES(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, GST_CAPS_FORMAT) ";" GST_VIDEO_CAPS_MAKE(GST_CAPS_FORMAT)
52
53 static GstStaticPadTemplate s_sinkTemplate = GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(WEBKIT_VIDEO_SINK_PAD_CAPS));
54
55
56 GST_DEBUG_CATEGORY_STATIC(webkitVideoSinkDebug);
57 #define GST_CAT_DEFAULT webkitVideoSinkDebug
58
59 enum {
60     REPAINT_REQUESTED,
61     LAST_SIGNAL
62 };
63
64 static guint webkitVideoSinkSignals[LAST_SIGNAL] = { 0, };
65
66 static void webkitVideoSinkRepaintRequested(WebKitVideoSink*, GstSample*);
67 static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink*, GstBuffer*);
68
69 class VideoRenderRequestScheduler {
70 public:
71     VideoRenderRequestScheduler()
72 #if !USE(COORDINATED_GRAPHICS_THREADED)
73         : m_timer(RunLoop::main(), this, &VideoRenderRequestScheduler::render)
74 #endif
75     {
76     }
77
78     void start()
79     {
80         LockHolder locker(m_sampleMutex);
81         m_unlocked = false;
82     }
83
84     void stop()
85     {
86         LockHolder locker(m_sampleMutex);
87         m_sample = nullptr;
88         m_unlocked = true;
89 #if !USE(COORDINATED_GRAPHICS_THREADED)
90         m_timer.stop();
91         m_dataCondition.notifyOne();
92 #endif
93     }
94
95     void drain()
96     {
97         LockHolder locker(m_sampleMutex);
98         m_sample = nullptr;
99     }
100
101     bool requestRender(WebKitVideoSink* sink, GstBuffer* buffer)
102     {
103         LockHolder locker(m_sampleMutex);
104         if (m_unlocked)
105             return true;
106
107         m_sample = webkitVideoSinkRequestRender(sink, buffer);
108         if (!m_sample)
109             return false;
110
111 #if USE(COORDINATED_GRAPHICS_THREADED)
112         auto sample = WTFMove(m_sample);
113         locker.unlockEarly();
114         if (LIKELY(GST_IS_SAMPLE(sample.get())))
115             webkitVideoSinkRepaintRequested(sink, sample.get());
116 #else
117         m_sink = sink;
118         m_timer.startOneShot(0);
119         m_dataCondition.wait(m_sampleMutex);
120 #endif
121         return true;
122     }
123
124 private:
125
126 #if !USE(COORDINATED_GRAPHICS_THREADED)
127     void render()
128     {
129         LockHolder locker(m_sampleMutex);
130         GRefPtr<GstSample> sample = WTFMove(m_sample);
131         GRefPtr<WebKitVideoSink> sink = WTFMove(m_sink);
132         if (sample && !m_unlocked && LIKELY(GST_IS_SAMPLE(sample.get())))
133             webkitVideoSinkRepaintRequested(sink.get(), sample.get());
134         m_dataCondition.notifyOne();
135     }
136 #endif
137
138     Lock m_sampleMutex;
139     GRefPtr<GstSample> m_sample;
140
141 #if !USE(COORDINATED_GRAPHICS_THREADED)
142     RunLoop::Timer<VideoRenderRequestScheduler> m_timer;
143     Condition m_dataCondition;
144     GRefPtr<WebKitVideoSink> m_sink;
145 #endif
146
147     // If this is true all processing should finish ASAP
148     // This is necessary because there could be a race between
149     // unlock() and render(), where unlock() wins, signals the
150     // Condition, then render() tries to render a frame although
151     // everything else isn't running anymore. This will lead
152     // to deadlocks because render() holds the stream lock.
153     //
154     // Protected by the sample mutex
155     bool m_unlocked { false };
156 };
157
158 struct _WebKitVideoSinkPrivate {
159     _WebKitVideoSinkPrivate()
160     {
161         gst_video_info_init(&info);
162     }
163
164     ~_WebKitVideoSinkPrivate()
165     {
166         if (currentCaps)
167             gst_caps_unref(currentCaps);
168     }
169
170     VideoRenderRequestScheduler scheduler;
171     GstVideoInfo info;
172     GstCaps* currentCaps;
173 };
174
175 #define webkit_video_sink_parent_class parent_class
176 G_DEFINE_TYPE_WITH_CODE(WebKitVideoSink, webkit_video_sink, GST_TYPE_VIDEO_SINK, GST_DEBUG_CATEGORY_INIT(webkitVideoSinkDebug, "webkitsink", 0, "webkit video sink"));
177
178
179 static void webkit_video_sink_init(WebKitVideoSink* sink)
180 {
181     sink->priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, WEBKIT_TYPE_VIDEO_SINK, WebKitVideoSinkPrivate);
182     g_object_set(GST_BASE_SINK(sink), "enable-last-sample", FALSE, nullptr);
183     new (sink->priv) WebKitVideoSinkPrivate();
184 }
185
186 static void webkitVideoSinkRepaintRequested(WebKitVideoSink* sink, GstSample* sample)
187 {
188     g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_REQUESTED], 0, sample);
189 }
190
191 static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink* sink, GstBuffer* buffer)
192 {
193     WebKitVideoSinkPrivate* priv = sink->priv;
194     GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, priv->currentCaps, nullptr, nullptr));
195
196     // The video info structure is valid only if the sink handled an allocation query.
197     GstVideoFormat format = GST_VIDEO_INFO_FORMAT(&priv->info);
198     if (format == GST_VIDEO_FORMAT_UNKNOWN)
199         return nullptr;
200
201 #if !(USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS))
202     // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
203     // Here we convert to Cairo's ARGB.
204     if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
205         // Because GstBaseSink::render() only owns the buffer reference in the
206         // method scope we can't use gst_buffer_make_writable() here. Also
207         // The buffer content should not be changed here because the same buffer
208         // could be passed multiple times to this method (in theory).
209
210         GstBuffer* newBuffer = WebCore::createGstBuffer(buffer);
211
212         // Check if allocation failed.
213         if (UNLIKELY(!newBuffer))
214             return nullptr;
215
216         // We don't use Color::premultipliedARGBFromColor() here because
217         // one function call per video pixel is just too expensive:
218         // For 720p/PAL for example this means 1280*720*25=23040000
219         // function calls per second!
220         GstVideoFrame sourceFrame;
221         GstVideoFrame destinationFrame;
222
223         if (!gst_video_frame_map(&sourceFrame, &priv->info, buffer, GST_MAP_READ)) {
224             gst_buffer_unref(newBuffer);
225             return nullptr;
226         }
227         if (!gst_video_frame_map(&destinationFrame, &priv->info, newBuffer, GST_MAP_WRITE)) {
228             gst_video_frame_unmap(&sourceFrame);
229             gst_buffer_unref(newBuffer);
230             return nullptr;
231         }
232
233         const guint8* source = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&sourceFrame, 0));
234         guint8* destination = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&destinationFrame, 0));
235
236         for (int x = 0; x < GST_VIDEO_FRAME_HEIGHT(&sourceFrame); x++) {
237             for (int y = 0; y < GST_VIDEO_FRAME_WIDTH(&sourceFrame); y++) {
238 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
239                 unsigned short alpha = source[3];
240                 destination[0] = (source[0] * alpha + 128) / 255;
241                 destination[1] = (source[1] * alpha + 128) / 255;
242                 destination[2] = (source[2] * alpha + 128) / 255;
243                 destination[3] = alpha;
244 #else
245                 unsigned short alpha = source[0];
246                 destination[0] = alpha;
247                 destination[1] = (source[1] * alpha + 128) / 255;
248                 destination[2] = (source[2] * alpha + 128) / 255;
249                 destination[3] = (source[3] * alpha + 128) / 255;
250 #endif
251                 source += 4;
252                 destination += 4;
253             }
254         }
255
256         gst_video_frame_unmap(&sourceFrame);
257         gst_video_frame_unmap(&destinationFrame);
258         sample = adoptGRef(gst_sample_new(newBuffer, priv->currentCaps, nullptr, nullptr));
259         gst_buffer_unref(newBuffer);
260     }
261 #endif
262
263     return sample;
264 }
265
266 static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
267 {
268     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
269     return sink->priv->scheduler.requestRender(sink, buffer) ? GST_FLOW_OK : GST_FLOW_ERROR;
270 }
271
272 static void webkitVideoSinkFinalize(GObject* object)
273 {
274     WEBKIT_VIDEO_SINK(object)->priv->~WebKitVideoSinkPrivate();
275     G_OBJECT_CLASS(parent_class)->finalize(object);
276 }
277
278 static gboolean webkitVideoSinkUnlock(GstBaseSink* baseSink)
279 {
280     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
281
282     priv->scheduler.stop();
283
284     return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock, (baseSink), TRUE);
285 }
286
287 static gboolean webkitVideoSinkUnlockStop(GstBaseSink* baseSink)
288 {
289     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
290
291     priv->scheduler.start();
292
293     return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop, (baseSink), TRUE);
294 }
295
296 static gboolean webkitVideoSinkStop(GstBaseSink* baseSink)
297 {
298     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
299
300     priv->scheduler.stop();
301     if (priv->currentCaps) {
302         gst_caps_unref(priv->currentCaps);
303         priv->currentCaps = nullptr;
304     }
305
306     return TRUE;
307 }
308
309 static gboolean webkitVideoSinkStart(GstBaseSink* baseSink)
310 {
311     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
312
313     priv->scheduler.start();
314
315     return TRUE;
316 }
317
318 static gboolean webkitVideoSinkSetCaps(GstBaseSink* baseSink, GstCaps* caps)
319 {
320     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
321     WebKitVideoSinkPrivate* priv = sink->priv;
322
323     GST_DEBUG_OBJECT(sink, "Current caps %" GST_PTR_FORMAT ", setting caps %" GST_PTR_FORMAT, priv->currentCaps, caps);
324
325     GstVideoInfo videoInfo;
326     gst_video_info_init(&videoInfo);
327     if (!gst_video_info_from_caps(&videoInfo, caps)) {
328         GST_ERROR_OBJECT(sink, "Invalid caps %" GST_PTR_FORMAT, caps);
329         return FALSE;
330     }
331
332     priv->info = videoInfo;
333     gst_caps_replace(&priv->currentCaps, caps);
334     return TRUE;
335 }
336
337 static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query)
338 {
339     GstCaps* caps;
340     gst_query_parse_allocation(query, &caps, nullptr);
341     if (!caps)
342         return FALSE;
343
344     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
345     if (!gst_video_info_from_caps(&sink->priv->info, caps))
346         return FALSE;
347
348     gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, nullptr);
349     gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, nullptr);
350     gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, nullptr);
351     return TRUE;
352 }
353
354 static gboolean webkitVideoSinkEvent(GstBaseSink* baseSink, GstEvent* event)
355 {
356     switch (GST_EVENT_TYPE(event)) {
357     case GST_EVENT_FLUSH_START: {
358         WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
359         sink->priv->scheduler.drain();
360
361         GST_DEBUG_OBJECT(sink, "Flush-start, releasing m_sample");
362         }
363         FALLTHROUGH;
364     default:
365         return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, event, (baseSink, event), TRUE);
366     }
367 }
368
369 static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
370 {
371     GObjectClass* gobjectClass = G_OBJECT_CLASS(klass);
372     GstBaseSinkClass* baseSinkClass = GST_BASE_SINK_CLASS(klass);
373     GstElementClass* elementClass = GST_ELEMENT_CLASS(klass);
374
375     gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&s_sinkTemplate));
376     gst_element_class_set_metadata(elementClass, "WebKit video sink", "Sink/Video", "Sends video data from a GStreamer pipeline to WebKit", "Igalia, Alp Toker <alp@atoker.com>");
377
378     g_type_class_add_private(klass, sizeof(WebKitVideoSinkPrivate));
379
380     gobjectClass->finalize = webkitVideoSinkFinalize;
381
382     baseSinkClass->unlock = webkitVideoSinkUnlock;
383     baseSinkClass->unlock_stop = webkitVideoSinkUnlockStop;
384     baseSinkClass->render = webkitVideoSinkRender;
385     baseSinkClass->preroll = webkitVideoSinkRender;
386     baseSinkClass->stop = webkitVideoSinkStop;
387     baseSinkClass->start = webkitVideoSinkStart;
388     baseSinkClass->set_caps = webkitVideoSinkSetCaps;
389     baseSinkClass->propose_allocation = webkitVideoSinkProposeAllocation;
390     baseSinkClass->event = webkitVideoSinkEvent;
391
392     webkitVideoSinkSignals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
393             G_TYPE_FROM_CLASS(klass),
394             static_cast<GSignalFlags>(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
395             0, // Class offset
396             0, // Accumulator
397             0, // Accumulator data
398             g_cclosure_marshal_generic,
399             G_TYPE_NONE, // Return type
400             1, // Only one parameter
401             GST_TYPE_SAMPLE);
402 }
403
404
405 GstElement* webkitVideoSinkNew()
406 {
407     return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, nullptr));
408 }
409
410 #endif // ENABLE(VIDEO) && USE(GSTREAMER)