b5b7cb799b3ea85f827d65affdc20ab2f2153ebc
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / VideoSinkGStreamer.cpp
1 /*
2  *  Copyright (C) 2007 OpenedHand
3  *  Copyright (C) 2007 Alp Toker <alp@atoker.com>
4  *  Copyright (C) 2009, 2010, 2011, 2012, 2015, 2016 Igalia S.L
5  *  Copyright (C) 2015, 2016 Metrological Group B.V.
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public
9  *  License as published by the Free Software Foundation; either
10  *  version 2 of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free Software
19  *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
20  */
21
22 /*
23  *
24  * WebKitVideoSink is a GStreamer sink element that triggers
25  * repaints in the WebKit GStreamer media player for the
26  * current video buffer.
27  */
28
29 #include "config.h"
30 #include "VideoSinkGStreamer.h"
31
32 #if ENABLE(VIDEO) && USE(GSTREAMER)
33 #include "GRefPtrGStreamer.h"
34 #include "GStreamerUtilities.h"
35 #include "IntSize.h"
36 #include <glib.h>
37 #include <gst/gst.h>
38 #include <gst/video/gstvideometa.h>
39 #include <wtf/Condition.h>
40 #include <wtf/RunLoop.h>
41
42 using namespace WebCore;
43
44 // CAIRO_FORMAT_RGB24 used to render the video buffers is little/big endian dependant.
45 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
46 #define GST_CAPS_FORMAT "{ BGRx, BGRA }"
47 #else
48 #define GST_CAPS_FORMAT "{ xRGB, ARGB }"
49 #endif
50
51 #define WEBKIT_VIDEO_SINK_PAD_CAPS GST_VIDEO_CAPS_MAKE_WITH_FEATURES(GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, GST_CAPS_FORMAT) ";" GST_VIDEO_CAPS_MAKE(GST_CAPS_FORMAT)
52
53 static GstStaticPadTemplate s_sinkTemplate = GST_STATIC_PAD_TEMPLATE("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS(WEBKIT_VIDEO_SINK_PAD_CAPS));
54
55
56 GST_DEBUG_CATEGORY_STATIC(webkitVideoSinkDebug);
57 #define GST_CAT_DEFAULT webkitVideoSinkDebug
58
59 enum {
60     REPAINT_REQUESTED,
61     LAST_SIGNAL
62 };
63
64 static guint webkitVideoSinkSignals[LAST_SIGNAL] = { 0, };
65
66 static void webkitVideoSinkRepaintRequested(WebKitVideoSink*, GstSample*);
67 static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink*, GstBuffer*);
68
69 class VideoRenderRequestScheduler {
70 public:
71     VideoRenderRequestScheduler()
72 #if !USE(COORDINATED_GRAPHICS_THREADED)
73         : m_timer(RunLoop::main(), this, &VideoRenderRequestScheduler::render)
74 #endif
75     {
76 #if PLATFORM(GTK) && !USE(COORDINATED_GRAPHICS_THREADED)
77         // Use a higher priority than WebCore timers (G_PRIORITY_HIGH_IDLE + 20).
78         m_timer.setPriority(G_PRIORITY_HIGH_IDLE + 19);
79 #endif
80     }
81
82     void start()
83     {
84         LockHolder locker(m_sampleMutex);
85         m_unlocked = false;
86     }
87
88     void stop()
89     {
90         LockHolder locker(m_sampleMutex);
91         m_sample = nullptr;
92         m_unlocked = true;
93 #if !USE(COORDINATED_GRAPHICS_THREADED)
94         m_timer.stop();
95         m_dataCondition.notifyOne();
96 #endif
97     }
98
99     void drain()
100     {
101         LockHolder locker(m_sampleMutex);
102         m_sample = nullptr;
103     }
104
105     bool requestRender(WebKitVideoSink* sink, GstBuffer* buffer)
106     {
107         LockHolder locker(m_sampleMutex);
108         if (m_unlocked)
109             return true;
110
111         m_sample = webkitVideoSinkRequestRender(sink, buffer);
112         if (!m_sample)
113             return false;
114
115 #if USE(COORDINATED_GRAPHICS_THREADED)
116         auto sample = WTFMove(m_sample);
117         locker.unlockEarly();
118         if (LIKELY(GST_IS_SAMPLE(sample.get())))
119             webkitVideoSinkRepaintRequested(sink, sample.get());
120 #else
121         m_sink = sink;
122         m_timer.startOneShot(0);
123         m_dataCondition.wait(m_sampleMutex);
124 #endif
125         return true;
126     }
127
128 private:
129
130 #if !USE(COORDINATED_GRAPHICS_THREADED)
131     void render()
132     {
133         LockHolder locker(m_sampleMutex);
134         GRefPtr<GstSample> sample = WTFMove(m_sample);
135         GRefPtr<WebKitVideoSink> sink = WTFMove(m_sink);
136         if (sample && !m_unlocked && LIKELY(GST_IS_SAMPLE(sample.get())))
137             webkitVideoSinkRepaintRequested(sink.get(), sample.get());
138         m_dataCondition.notifyOne();
139     }
140 #endif
141
142     Lock m_sampleMutex;
143     GRefPtr<GstSample> m_sample;
144
145 #if !USE(COORDINATED_GRAPHICS_THREADED)
146     RunLoop::Timer<VideoRenderRequestScheduler> m_timer;
147     Condition m_dataCondition;
148     GRefPtr<WebKitVideoSink> m_sink;
149 #endif
150
151     // If this is true all processing should finish ASAP
152     // This is necessary because there could be a race between
153     // unlock() and render(), where unlock() wins, signals the
154     // Condition, then render() tries to render a frame although
155     // everything else isn't running anymore. This will lead
156     // to deadlocks because render() holds the stream lock.
157     //
158     // Protected by the sample mutex
159     bool m_unlocked { false };
160 };
161
162 struct _WebKitVideoSinkPrivate {
163     _WebKitVideoSinkPrivate()
164     {
165         gst_video_info_init(&info);
166     }
167
168     ~_WebKitVideoSinkPrivate()
169     {
170         if (currentCaps)
171             gst_caps_unref(currentCaps);
172     }
173
174     VideoRenderRequestScheduler scheduler;
175     GstVideoInfo info;
176     GstCaps* currentCaps;
177 };
178
179 #define webkit_video_sink_parent_class parent_class
180 G_DEFINE_TYPE_WITH_CODE(WebKitVideoSink, webkit_video_sink, GST_TYPE_VIDEO_SINK, GST_DEBUG_CATEGORY_INIT(webkitVideoSinkDebug, "webkitsink", 0, "webkit video sink"));
181
182
183 static void webkit_video_sink_init(WebKitVideoSink* sink)
184 {
185     sink->priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, WEBKIT_TYPE_VIDEO_SINK, WebKitVideoSinkPrivate);
186     g_object_set(GST_BASE_SINK(sink), "enable-last-sample", FALSE, NULL);
187     new (sink->priv) WebKitVideoSinkPrivate();
188 }
189
190 static void webkitVideoSinkRepaintRequested(WebKitVideoSink* sink, GstSample* sample)
191 {
192     g_signal_emit(sink, webkitVideoSinkSignals[REPAINT_REQUESTED], 0, sample);
193 }
194
195 static GRefPtr<GstSample> webkitVideoSinkRequestRender(WebKitVideoSink* sink, GstBuffer* buffer)
196 {
197     WebKitVideoSinkPrivate* priv = sink->priv;
198     GRefPtr<GstSample> sample = adoptGRef(gst_sample_new(buffer, priv->currentCaps, nullptr, nullptr));
199
200     // The video info structure is valid only if the sink handled an allocation query.
201     GstVideoFormat format = GST_VIDEO_INFO_FORMAT(&priv->info);
202     if (format == GST_VIDEO_FORMAT_UNKNOWN)
203         return nullptr;
204
205 #if !(USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS))
206     // Cairo's ARGB has pre-multiplied alpha while GStreamer's doesn't.
207     // Here we convert to Cairo's ARGB.
208     if (format == GST_VIDEO_FORMAT_ARGB || format == GST_VIDEO_FORMAT_BGRA) {
209         // Because GstBaseSink::render() only owns the buffer reference in the
210         // method scope we can't use gst_buffer_make_writable() here. Also
211         // The buffer content should not be changed here because the same buffer
212         // could be passed multiple times to this method (in theory).
213
214         GstBuffer* newBuffer = WebCore::createGstBuffer(buffer);
215
216         // Check if allocation failed.
217         if (UNLIKELY(!newBuffer))
218             return nullptr;
219
220         // We don't use Color::premultipliedARGBFromColor() here because
221         // one function call per video pixel is just too expensive:
222         // For 720p/PAL for example this means 1280*720*25=23040000
223         // function calls per second!
224         GstVideoFrame sourceFrame;
225         GstVideoFrame destinationFrame;
226
227         if (!gst_video_frame_map(&sourceFrame, &priv->info, buffer, GST_MAP_READ)) {
228             gst_buffer_unref(newBuffer);
229             return nullptr;
230         }
231         if (!gst_video_frame_map(&destinationFrame, &priv->info, newBuffer, GST_MAP_WRITE)) {
232             gst_video_frame_unmap(&sourceFrame);
233             gst_buffer_unref(newBuffer);
234             return nullptr;
235         }
236
237         const guint8* source = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&sourceFrame, 0));
238         guint8* destination = static_cast<guint8*>(GST_VIDEO_FRAME_PLANE_DATA(&destinationFrame, 0));
239
240         for (int x = 0; x < GST_VIDEO_FRAME_HEIGHT(&sourceFrame); x++) {
241             for (int y = 0; y < GST_VIDEO_FRAME_WIDTH(&sourceFrame); y++) {
242 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
243                 unsigned short alpha = source[3];
244                 destination[0] = (source[0] * alpha + 128) / 255;
245                 destination[1] = (source[1] * alpha + 128) / 255;
246                 destination[2] = (source[2] * alpha + 128) / 255;
247                 destination[3] = alpha;
248 #else
249                 unsigned short alpha = source[0];
250                 destination[0] = alpha;
251                 destination[1] = (source[1] * alpha + 128) / 255;
252                 destination[2] = (source[2] * alpha + 128) / 255;
253                 destination[3] = (source[3] * alpha + 128) / 255;
254 #endif
255                 source += 4;
256                 destination += 4;
257             }
258         }
259
260         gst_video_frame_unmap(&sourceFrame);
261         gst_video_frame_unmap(&destinationFrame);
262         sample = adoptGRef(gst_sample_new(newBuffer, priv->currentCaps, nullptr, nullptr));
263         gst_buffer_unref(newBuffer);
264     }
265 #endif
266
267     return sample;
268 }
269
270 static GstFlowReturn webkitVideoSinkRender(GstBaseSink* baseSink, GstBuffer* buffer)
271 {
272     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
273     return sink->priv->scheduler.requestRender(sink, buffer) ? GST_FLOW_OK : GST_FLOW_ERROR;
274 }
275
276 static void webkitVideoSinkFinalize(GObject* object)
277 {
278     WEBKIT_VIDEO_SINK(object)->priv->~WebKitVideoSinkPrivate();
279     G_OBJECT_CLASS(parent_class)->finalize(object);
280 }
281
282 static gboolean webkitVideoSinkUnlock(GstBaseSink* baseSink)
283 {
284     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
285
286     priv->scheduler.stop();
287
288     return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock, (baseSink), TRUE);
289 }
290
291 static gboolean webkitVideoSinkUnlockStop(GstBaseSink* baseSink)
292 {
293     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
294
295     priv->scheduler.start();
296
297     return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop, (baseSink), TRUE);
298 }
299
300 static gboolean webkitVideoSinkStop(GstBaseSink* baseSink)
301 {
302     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
303
304     priv->scheduler.stop();
305     if (priv->currentCaps) {
306         gst_caps_unref(priv->currentCaps);
307         priv->currentCaps = nullptr;
308     }
309
310     return TRUE;
311 }
312
313 static gboolean webkitVideoSinkStart(GstBaseSink* baseSink)
314 {
315     WebKitVideoSinkPrivate* priv = WEBKIT_VIDEO_SINK(baseSink)->priv;
316
317     priv->scheduler.start();
318
319     return TRUE;
320 }
321
322 static gboolean webkitVideoSinkSetCaps(GstBaseSink* baseSink, GstCaps* caps)
323 {
324     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
325     WebKitVideoSinkPrivate* priv = sink->priv;
326
327     GST_DEBUG_OBJECT(sink, "Current caps %" GST_PTR_FORMAT ", setting caps %" GST_PTR_FORMAT, priv->currentCaps, caps);
328
329     GstVideoInfo videoInfo;
330     gst_video_info_init(&videoInfo);
331     if (!gst_video_info_from_caps(&videoInfo, caps)) {
332         GST_ERROR_OBJECT(sink, "Invalid caps %" GST_PTR_FORMAT, caps);
333         return FALSE;
334     }
335
336     priv->info = videoInfo;
337     gst_caps_replace(&priv->currentCaps, caps);
338     return TRUE;
339 }
340
341 static gboolean webkitVideoSinkProposeAllocation(GstBaseSink* baseSink, GstQuery* query)
342 {
343     GstCaps* caps;
344     gst_query_parse_allocation(query, &caps, 0);
345     if (!caps)
346         return FALSE;
347
348     WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
349     if (!gst_video_info_from_caps(&sink->priv->info, caps))
350         return FALSE;
351
352     gst_query_add_allocation_meta(query, GST_VIDEO_META_API_TYPE, 0);
353     gst_query_add_allocation_meta(query, GST_VIDEO_CROP_META_API_TYPE, 0);
354     gst_query_add_allocation_meta(query, GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, 0);
355     return TRUE;
356 }
357
358 static gboolean webkitVideoSinkEvent(GstBaseSink* baseSink, GstEvent* event)
359 {
360     switch (GST_EVENT_TYPE(event)) {
361     case GST_EVENT_FLUSH_START: {
362         WebKitVideoSink* sink = WEBKIT_VIDEO_SINK(baseSink);
363         sink->priv->scheduler.drain();
364
365         GST_DEBUG_OBJECT(sink, "Flush-start, releasing m_sample");
366         }
367         FALLTHROUGH;
368     default:
369         return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, event, (baseSink, event), TRUE);
370     }
371 }
372
373 static void webkit_video_sink_class_init(WebKitVideoSinkClass* klass)
374 {
375     GObjectClass* gobjectClass = G_OBJECT_CLASS(klass);
376     GstBaseSinkClass* baseSinkClass = GST_BASE_SINK_CLASS(klass);
377     GstElementClass* elementClass = GST_ELEMENT_CLASS(klass);
378
379     gst_element_class_add_pad_template(elementClass, gst_static_pad_template_get(&s_sinkTemplate));
380     gst_element_class_set_metadata(elementClass, "WebKit video sink", "Sink/Video", "Sends video data from a GStreamer pipeline to WebKit", "Igalia, Alp Toker <alp@atoker.com>");
381
382     g_type_class_add_private(klass, sizeof(WebKitVideoSinkPrivate));
383
384     gobjectClass->finalize = webkitVideoSinkFinalize;
385
386     baseSinkClass->unlock = webkitVideoSinkUnlock;
387     baseSinkClass->unlock_stop = webkitVideoSinkUnlockStop;
388     baseSinkClass->render = webkitVideoSinkRender;
389     baseSinkClass->preroll = webkitVideoSinkRender;
390     baseSinkClass->stop = webkitVideoSinkStop;
391     baseSinkClass->start = webkitVideoSinkStart;
392     baseSinkClass->set_caps = webkitVideoSinkSetCaps;
393     baseSinkClass->propose_allocation = webkitVideoSinkProposeAllocation;
394     baseSinkClass->event = webkitVideoSinkEvent;
395
396     webkitVideoSinkSignals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
397             G_TYPE_FROM_CLASS(klass),
398             static_cast<GSignalFlags>(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
399             0, // Class offset
400             0, // Accumulator
401             0, // Accumulator data
402             g_cclosure_marshal_generic,
403             G_TYPE_NONE, // Return type
404             1, // Only one parameter
405             GST_TYPE_SAMPLE);
406 }
407
408
409 GstElement* webkitVideoSinkNew()
410 {
411     return GST_ELEMENT(g_object_new(WEBKIT_TYPE_VIDEO_SINK, 0));
412 }
413
414 #endif // ENABLE(VIDEO) && USE(GSTREAMER)