[WPE][GTK] Bump minimum versions of GLib, GTK, libsoup, ATK, GStreamer, and Cairo
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerCommon.h"
31 #include "GraphicsContext.h"
32 #include "ImageGStreamer.h"
33 #include "ImageOrientation.h"
34 #include "IntRect.h"
35 #include "Logging.h"
36 #include "MediaPlayer.h"
37 #include "NotImplemented.h"
38 #include "VideoSinkGStreamer.h"
39 #include "WebKitWebSourceGStreamer.h"
40 #include <wtf/glib/GUniquePtr.h>
41 #include <wtf/text/AtomString.h>
42 #include <wtf/text/CString.h>
43 #include <wtf/MathExtras.h>
44 #include <wtf/StringPrintStream.h>
45
46 #include <gst/audio/streamvolume.h>
47 #include <gst/video/gstvideometa.h>
48
49 #if ENABLE(ENCRYPTED_MEDIA)
50 #include "CDMInstance.h"
51 #include "GStreamerEMEUtilities.h"
52 #include "SharedBuffer.h"
53 #include "WebKitCommonEncryptionDecryptorGStreamer.h"
54 #endif
55
56 #if USE(GSTREAMER_GL)
57 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
58 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
59 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
60 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
61 #else
62 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
63 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
64 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
65 #endif
66
67 #include <gst/app/gstappsink.h>
68
69 #if USE(LIBEPOXY)
70 // Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
71 #include <epoxy/gl.h>
72
73 // Workaround build issue with RPi userland GLESv2 headers and libepoxy <https://webkit.org/b/185639>
74 #if !GST_CHECK_VERSION(1, 14, 0)
75 #include <gst/gl/gstglconfig.h>
76 #if defined(GST_GL_HAVE_WINDOW_DISPMANX) && GST_GL_HAVE_WINDOW_DISPMANX
77 #define __gl2_h_
78 #undef GST_GL_HAVE_GLSYNC
79 #define GST_GL_HAVE_GLSYNC 1
80 #endif
81 #endif // !GST_CHECK_VERSION(1, 14, 0)
82 #endif // USE(LIBEPOXY)
83
84 #define GST_USE_UNSTABLE_API
85 #include <gst/gl/gl.h>
86 #undef GST_USE_UNSTABLE_API
87
88 #include "GLContext.h"
89 #if USE(GLX)
90 #include "GLContextGLX.h"
91 #include <gst/gl/x11/gstgldisplay_x11.h>
92 #endif
93
94 #if USE(EGL)
95 #include "GLContextEGL.h"
96 #include <gst/gl/egl/gstgldisplay_egl.h>
97 #endif
98
99 #if PLATFORM(X11)
100 #include "PlatformDisplayX11.h"
101 #endif
102
103 #if PLATFORM(WAYLAND)
104 #include "PlatformDisplayWayland.h"
105 #endif
106
107 #if USE(WPE_RENDERER)
108 #include "PlatformDisplayLibWPE.h"
109 #endif
110
111 // gstglapi.h may include eglplatform.h and it includes X.h, which
112 // defines None, breaking MediaPlayer::None enum
113 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
114 #undef None
115 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
116 #include "VideoTextureCopierGStreamer.h"
117 #endif // USE(GSTREAMER_GL)
118
119 #if USE(TEXTURE_MAPPER_GL)
120 #include "BitmapTextureGL.h"
121 #include "BitmapTexturePool.h"
122 #include "GraphicsContext3D.h"
123 #include "TextureMapperContextAttributes.h"
124 #include "TextureMapperPlatformLayerBuffer.h"
125 #include "TextureMapperPlatformLayerProxy.h"
126 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
127 #include <cairo-gl.h>
128 #endif
129 #endif // USE(TEXTURE_MAPPER_GL)
130
131 GST_DEBUG_CATEGORY(webkit_media_player_debug);
132 #define GST_CAT_DEFAULT webkit_media_player_debug
133
134
135 namespace WebCore {
136 using namespace std;
137
138 #if USE(GSTREAMER_HOLEPUNCH)
139 static const FloatSize s_holePunchDefaultFrameSize(1280, 720);
140 #endif
141
142 static int greatestCommonDivisor(int a, int b)
143 {
144     while (b) {
145         int temp = a;
146         a = b;
147         b = temp % b;
148     }
149
150     return ABS(a);
151 }
152
153 #if USE(TEXTURE_MAPPER_GL)
154 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
155 public:
156     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags, bool gstGLEnabled)
157     {
158         GstVideoInfo videoInfo;
159         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
160             return;
161
162         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
163         m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
164         m_buffer = gst_sample_get_buffer(sample);
165         if (UNLIKELY(!GST_IS_BUFFER(m_buffer)))
166             return;
167
168 #if USE(GSTREAMER_GL)
169         m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0);
170
171         if (gstGLEnabled) {
172             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL));
173             if (m_isMapped)
174                 m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
175         } else
176 #else
177         UNUSED_PARAM(flags);
178         UNUSED_PARAM(gstGLEnabled);
179 #endif // USE(GSTREAMER_GL)
180
181         {
182             m_textureID = 0;
183             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, GST_MAP_READ);
184             if (m_isMapped) {
185                 // Right now the TextureMapper only supports chromas with one plane
186                 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
187             }
188         }
189     }
190
191     virtual ~GstVideoFrameHolder()
192     {
193         if (UNLIKELY(!m_isMapped))
194             return;
195
196         gst_video_frame_unmap(&m_videoFrame);
197     }
198
199     const IntSize& size() const { return m_size; }
200     bool hasAlphaChannel() const { return m_hasAlphaChannel; }
201     TextureMapperGL::Flags flags() const { return m_flags; }
202     GLuint textureID() const { return m_textureID; }
203
204     void updateTexture(BitmapTextureGL& texture)
205     {
206         ASSERT(!m_textureID);
207         GstVideoGLTextureUploadMeta* meta;
208         if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) {
209             if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
210                 guint ids[4] = { texture.id(), 0, 0, 0 };
211
212                 if (gst_video_gl_texture_upload_meta_upload(meta, ids))
213                     return;
214             }
215         }
216
217         int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
218         const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0);
219         texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride);
220     }
221
222 private:
223     GstBuffer* m_buffer;
224     GstVideoFrame m_videoFrame { };
225     IntSize m_size;
226     bool m_hasAlphaChannel;
227     TextureMapperGL::Flags m_flags { };
228     GLuint m_textureID { 0 };
229     bool m_isMapped { false };
230 };
231 #endif
232
233 void MediaPlayerPrivateGStreamerBase::initializeDebugCategory()
234 {
235     GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
236 }
237
238 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
239     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
240     , m_player(player)
241     , m_fpsSink(nullptr)
242     , m_readyState(MediaPlayer::HaveNothing)
243     , m_networkState(MediaPlayer::Empty)
244     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
245 #if USE(TEXTURE_MAPPER_GL)
246 #if USE(NICOSIA)
247     , m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this)))
248 #else
249     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
250 #endif
251 #endif
252 {
253 }
254
255 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
256 {
257 #if USE(GSTREAMER_GL)
258     if (m_videoDecoderPlatform == WebKitGstVideoDecoderPlatform::Video4Linux)
259         flushCurrentBuffer();
260 #endif
261 #if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
262     downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
263 #endif
264
265 #if ENABLE(ENCRYPTED_MEDIA)
266     m_protectionCondition.notifyAll();
267 #endif
268     m_notifier->invalidate();
269
270     if (m_videoSink) {
271         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
272 #if USE(GSTREAMER_GL)
273         if (GST_IS_BIN(m_videoSink.get())) {
274             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
275             g_signal_handlers_disconnect_by_data(appsink.get(), this);
276         }
277 #endif
278     }
279
280     if (m_volumeElement)
281         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
282
283     // This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
284     // waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
285     cancelRepaint(true);
286
287     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
288     // about handlers running in the GStreamer thread.
289     if (m_pipeline)
290         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
291
292     m_player = nullptr;
293 }
294
295 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
296 {
297     m_pipeline = pipeline;
298
299     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
300     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
301         auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
302
303         if (player.handleSyncMessage(message)) {
304             gst_message_unref(message);
305             return GST_BUS_DROP;
306         }
307
308         return GST_BUS_PASS;
309     }, this, nullptr);
310 }
311
312 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
313 {
314     UNUSED_PARAM(message);
315     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
316         return false;
317
318     const gchar* contextType;
319     gst_message_parse_context_type(message, &contextType);
320     GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
321
322     if (!g_strcmp0(contextType, WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME)) {
323         GRefPtr<GstContext> context = adoptGRef(gst_context_new(WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME, FALSE));
324         GstStructure* contextStructure = gst_context_writable_structure(context.get());
325
326         ASSERT(m_player);
327         gst_structure_set(contextStructure, "player", G_TYPE_POINTER, m_player, nullptr);
328         gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
329         return true;
330     }
331
332 #if USE(GSTREAMER_GL)
333     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType));
334     if (elementContext) {
335         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
336         return true;
337     }
338 #endif // USE(GSTREAMER_GL)
339
340 #if ENABLE(ENCRYPTED_MEDIA)
341     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
342         if (isMainThread()) {
343             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
344             ASSERT_NOT_REACHED();
345             return false;
346         }
347         GST_DEBUG_OBJECT(pipeline(), "handling drm-preferred-decryption-system-id need context message");
348         LockHolder lock(m_protectionMutex);
349         ProtectionSystemEvents protectionSystemEvents(message);
350         GST_TRACE("found %lu protection events, %lu decryptors available", protectionSystemEvents.events().size(), protectionSystemEvents.availableSystems().size());
351         InitData initData;
352
353         for (auto& event : protectionSystemEvents.events()) {
354             const char* eventKeySystemId = nullptr;
355             GstBuffer* data = nullptr;
356             gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
357
358             initData.append({eventKeySystemId, data});
359             m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
360         }
361
362         initializationDataEncountered(WTFMove(initData));
363
364         GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance");
365         m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
366             return this->m_cdmInstance;
367         });
368
369         if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) {
370             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
371             GST_INFO_OBJECT(pipeline(), "working with key system %s, continuing with key system %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
372
373             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
374             GstStructure* contextStructure = gst_context_writable_structure(context.get());
375             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
376             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
377         } else
378             GST_WARNING("CDM instance not initializaed");
379
380         return true;
381     }
382 #endif // ENABLE(ENCRYPTED_MEDIA)
383
384     return false;
385 }
386
387 #if USE(GSTREAMER_GL)
388 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType)
389 {
390     if (!ensureGstGLContext())
391         return nullptr;
392
393     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
394         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
395         gst_context_set_gl_display(displayContext, gstGLDisplay());
396         return displayContext;
397     }
398
399     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
400         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
401         GstStructure* structure = gst_context_writable_structure(appContext);
402 #if GST_CHECK_VERSION(1, 12, 0)
403         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr);
404 #else
405         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr);
406 #endif
407         return appContext;
408     }
409
410     return nullptr;
411 }
412
413 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
414 {
415     if (m_glContext)
416         return true;
417
418     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
419
420     // The floating ref removal support was added in https://bugzilla.gnome.org/show_bug.cgi?id=743062.
421     bool shouldAdoptRef = webkitGstCheckVersion(1, 14, 0);
422     if (!m_glDisplay) {
423 #if PLATFORM(X11)
424 #if USE(GLX)
425         if (is<PlatformDisplayX11>(sharedDisplay)) {
426             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared GL display");
427             if (shouldAdoptRef)
428                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())));
429             else
430                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
431         }
432 #elif USE(EGL)
433         if (is<PlatformDisplayX11>(sharedDisplay)) {
434             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared EGL display");
435             if (shouldAdoptRef)
436                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())));
437             else
438                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
439         }
440 #endif
441 #endif
442
443 #if PLATFORM(WAYLAND)
444         if (is<PlatformDisplayWayland>(sharedDisplay)) {
445             GST_DEBUG_OBJECT(pipeline(), "Creating Wayland shared display");
446             if (shouldAdoptRef)
447                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())));
448             else
449                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
450         }
451 #endif
452
453 #if USE(WPE_RENDERER)
454         if (is<PlatformDisplayLibWPE>(sharedDisplay)) {
455             GST_DEBUG_OBJECT(pipeline(), "Creating WPE shared EGL display");
456             if (shouldAdoptRef)
457                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay())));
458             else
459                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay()));
460         }
461 #endif
462
463         ASSERT(m_glDisplay);
464     }
465
466     GLContext* webkitContext = sharedDisplay.sharingGLContext();
467     // EGL and GLX are mutually exclusive, no need for ifdefs here.
468     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
469
470 #if USE(OPENGL_ES)
471     GstGLAPI glAPI = GST_GL_API_GLES2;
472 #elif USE(OPENGL)
473     GstGLAPI glAPI = GST_GL_API_OPENGL;
474 #else
475     ASSERT_NOT_REACHED();
476 #endif
477
478     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
479     if (!contextHandle)
480         return false;
481
482     if (shouldAdoptRef)
483         m_glContext = adoptGRef(gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
484     else
485         m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
486
487     return true;
488 }
489 #endif // USE(GSTREAMER_GL)
490
491 // Returns the size of the video
492 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
493 {
494 #if USE(GSTREAMER_HOLEPUNCH)
495     // When using the holepuch we may not be able to get the video frames size, so we can't use
496     // it. But we need to report some non empty naturalSize for the player's GraphicsLayer
497     // to be properly created.
498     return s_holePunchDefaultFrameSize;
499 #endif
500
501     if (!hasVideo())
502         return FloatSize();
503
504     if (!m_videoSize.isEmpty())
505         return m_videoSize;
506
507     auto sampleLocker = holdLock(m_sampleMutex);
508     if (!GST_IS_SAMPLE(m_sample.get()))
509         return FloatSize();
510
511     GstCaps* caps = gst_sample_get_caps(m_sample.get());
512     if (!caps)
513         return FloatSize();
514
515
516     // TODO: handle possible clean aperture data. See
517     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
518     // TODO: handle possible transformation matrix. See
519     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
520
521     // Get the video PAR and original size, if this fails the
522     // video-sink has likely not yet negotiated its caps.
523     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
524     IntSize originalSize;
525     GstVideoFormat format;
526     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
527         return FloatSize();
528
529 #if USE(TEXTURE_MAPPER_GL)
530     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
531     if (m_renderingCanBeAccelerated) {
532         if (m_videoSourceOrientation.usesWidthAsHeight())
533             originalSize = originalSize.transposedSize();
534     }
535 #endif
536
537     GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
538     GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
539
540     // Calculate DAR based on PAR and video size.
541     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
542     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
543
544     // Divide display width and height by their GCD to avoid possible overflows.
545     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
546     displayWidth /= displayAspectRatioGCD;
547     displayHeight /= displayAspectRatioGCD;
548
549     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
550     guint64 width = 0, height = 0;
551     if (!(originalSize.height() % displayHeight)) {
552         GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
553         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
554         height = static_cast<guint64>(originalSize.height());
555     } else if (!(originalSize.width() % displayWidth)) {
556         GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
557         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
558         width = static_cast<guint64>(originalSize.width());
559     } else {
560         GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
561         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
562         height = static_cast<guint64>(originalSize.height());
563     }
564
565     GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
566     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
567     return m_videoSize;
568 }
569
570 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
571 {
572     if (!m_volumeElement)
573         return;
574
575     GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
576     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
577 }
578
579 float MediaPlayerPrivateGStreamerBase::volume() const
580 {
581     if (!m_volumeElement)
582         return 0;
583
584     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
585 }
586
587
588 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
589 {
590     if (!m_player || !m_volumeElement)
591         return;
592     double volume;
593     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
594     // get_volume() can return values superior to 1.0 if the user
595     // applies software user gain via third party application (GNOME
596     // volume control for instance).
597     volume = CLAMP(volume, 0.0, 1.0);
598     m_player->volumeChanged(static_cast<float>(volume));
599 }
600
601 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
602 {
603     // This is called when m_volumeElement receives the notify::volume signal.
604     GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
605
606     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] {
607         player->notifyPlayerOfVolumeChange();
608     });
609 }
610
611 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
612 {
613     return m_networkState;
614 }
615
616 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
617 {
618     return m_readyState;
619 }
620
621 void MediaPlayerPrivateGStreamerBase::sizeChanged()
622 {
623     notImplemented();
624 }
625
626 void MediaPlayerPrivateGStreamerBase::setMuted(bool mute)
627 {
628     if (!m_volumeElement)
629         return;
630
631     bool currentValue = muted();
632     if (currentValue == mute)
633         return;
634
635     GST_INFO_OBJECT(pipeline(), "Set muted to %s", toString(mute).utf8().data());
636     g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
637 }
638
639 bool MediaPlayerPrivateGStreamerBase::muted() const
640 {
641     if (!m_volumeElement)
642         return false;
643
644     gboolean muted;
645     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
646     GST_INFO_OBJECT(pipeline(), "Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
647     return muted;
648 }
649
650 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
651 {
652     if (!m_player || !m_volumeElement)
653         return;
654
655     gboolean muted;
656     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
657     m_player->muteChanged(static_cast<bool>(muted));
658 }
659
660 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
661 {
662     // This is called when m_volumeElement receives the notify::mute signal.
663     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] {
664         player->notifyPlayerOfMute();
665     });
666 }
667
668 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
669 {
670     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
671 }
672
673 #if USE(TEXTURE_MAPPER_GL)
674 PlatformLayer* MediaPlayerPrivateGStreamerBase::platformLayer() const
675 {
676 #if USE(NICOSIA)
677     return m_nicosiaLayer.ptr();
678 #else
679     return const_cast<MediaPlayerPrivateGStreamerBase*>(this);
680 #endif
681 }
682
683 #if USE(NICOSIA)
684 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
685 {
686 #if USE(GSTREAMER_HOLEPUNCH)
687     pushNextHolePunchBuffer();
688 #endif
689 }
690 #else
691 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
692 {
693     return m_platformLayerProxy.copyRef();
694 }
695
696 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
697 {
698 #if USE(GSTREAMER_HOLEPUNCH)
699     pushNextHolePunchBuffer();
700 #endif
701 }
702 #endif
703
704 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
705 {
706     auto sampleLocker = holdLock(m_sampleMutex);
707     if (!GST_IS_SAMPLE(m_sample.get()))
708         return;
709
710     auto proxyOperation =
711         [this](TextureMapperPlatformLayerProxy& proxy)
712         {
713             LockHolder holder(proxy.lock());
714
715             if (!proxy.isActive())
716                 return;
717
718             std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, !m_usingFallbackVideoSink);
719
720             GLuint textureID = frameHolder->textureID();
721             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
722             if (textureID) {
723                 layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(textureID, frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
724                 layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
725             } else {
726                 layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE);
727                 if (UNLIKELY(!layerBuffer)) {
728                     auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
729                     texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
730                     layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
731                 }
732                 frameHolder->updateTexture(layerBuffer->textureGL());
733                 layerBuffer->setExtraFlags(m_textureMapperFlags | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0));
734             }
735             proxy.pushNextBuffer(WTFMove(layerBuffer));
736         };
737
738 #if USE(NICOSIA)
739     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
740 #else
741     proxyOperation(*m_platformLayerProxy);
742 #endif
743 }
744 #endif // USE(TEXTURE_MAPPER_GL)
745
746 void MediaPlayerPrivateGStreamerBase::repaint()
747 {
748     ASSERT(m_sample);
749     ASSERT(isMainThread());
750
751     m_player->repaint();
752
753     LockHolder lock(m_drawMutex);
754     m_drawCondition.notifyOne();
755 }
756
757 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
758 {
759     bool triggerResize;
760     {
761         auto sampleLocker = holdLock(m_sampleMutex);
762         triggerResize = !m_sample;
763         m_sample = sample;
764     }
765
766     if (triggerResize) {
767         GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
768         m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
769             m_player->sizeChanged();
770         });
771     }
772
773     if (!m_renderingCanBeAccelerated) {
774         LockHolder locker(m_drawMutex);
775         if (m_destroying)
776             return;
777         m_drawTimer.startOneShot(0_s);
778         m_drawCondition.wait(m_drawMutex);
779         return;
780     }
781
782 #if USE(TEXTURE_MAPPER_GL)
783     if (m_usingFallbackVideoSink) {
784         LockHolder lock(m_drawMutex);
785         auto proxyOperation =
786             [this](TextureMapperPlatformLayerProxy& proxy)
787             {
788                 return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); });
789             };
790 #if USE(NICOSIA)
791         if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()))
792             return;
793 #else
794         if (!proxyOperation(*m_platformLayerProxy))
795             return;
796 #endif
797         m_drawTimer.startOneShot(0_s);
798         m_drawCondition.wait(m_drawMutex);
799     } else
800         pushTextureToCompositor();
801 #endif // USE(TEXTURE_MAPPER_GL)
802 }
803
804 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
805 {
806     player->triggerRepaint(sample);
807 }
808
809 void MediaPlayerPrivateGStreamerBase::cancelRepaint(bool destroying)
810 {
811     // The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
812     // to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
813     // the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
814     // main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
815     // thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
816     //
817     // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
818     // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
819     if (!m_renderingCanBeAccelerated) {
820         LockHolder locker(m_drawMutex);
821         m_drawTimer.stop();
822         m_destroying = destroying;
823         m_drawCondition.notifyOne();
824     }
825 }
826
827 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
828 {
829     player->cancelRepaint();
830 }
831
832 #if USE(GSTREAMER_GL)
833 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
834 {
835     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
836     player->triggerRepaint(sample.get());
837     return GST_FLOW_OK;
838 }
839
840 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
841 {
842     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
843     player->triggerRepaint(sample.get());
844     return GST_FLOW_OK;
845 }
846
847 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
848 {
849     auto sampleLocker = holdLock(m_sampleMutex);
850
851     if (m_sample) {
852         // Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions.
853         // This prevents resizing problems when the video changes its quality and a DRAIN is performed.
854         const GstStructure* info = gst_sample_get_info(m_sample.get());
855         m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
856             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
857     }
858
859     bool shouldWait = m_videoDecoderPlatform == WebKitGstVideoDecoderPlatform::Video4Linux;
860     auto proxyOperation = [shouldWait, pipeline = pipeline()](TextureMapperPlatformLayerProxy& proxy) {
861         GST_DEBUG_OBJECT(pipeline, "Flushing video sample %s", shouldWait ? "synchronously" : "");
862         LockHolder locker(!shouldWait ? &proxy.lock() : nullptr);
863
864         if (proxy.isActive())
865             proxy.dropCurrentBufferWhilePreservingTexture(shouldWait);
866     };
867
868 #if USE(NICOSIA)
869     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
870 #else
871     proxyOperation(*m_platformLayerProxy);
872 #endif
873 }
874 #endif
875
876 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
877 {
878     m_size = size;
879 }
880
881 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
882 {
883     if (context.paintingDisabled())
884         return;
885
886     if (!m_player->visible())
887         return;
888
889     auto sampleLocker = holdLock(m_sampleMutex);
890     if (!GST_IS_SAMPLE(m_sample.get()))
891         return;
892
893     ImagePaintingOptions paintingOptions(CompositeCopy);
894     if (m_renderingCanBeAccelerated)
895         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
896
897     auto gstImage = ImageGStreamer::createImage(m_sample.get());
898     if (!gstImage)
899         return;
900
901     context.drawImage(gstImage->image(), rect, gstImage->rect(), paintingOptions);
902 }
903
904 #if USE(GSTREAMER_GL)
905 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
906 {
907     UNUSED_PARAM(context);
908
909     if (m_usingFallbackVideoSink)
910         return false;
911
912     if (premultiplyAlpha)
913         return false;
914
915     auto sampleLocker = holdLock(m_sampleMutex);
916
917     if (!GST_IS_SAMPLE(m_sample.get()))
918         return false;
919
920     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, true);
921
922     auto textureID = frameHolder->textureID();
923     if (!textureID)
924         return false;
925
926     auto size = frameHolder->size();
927     if (m_videoSourceOrientation.usesWidthAsHeight())
928         size = size.transposedSize();
929
930     if (!m_videoTextureCopier)
931         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
932
933     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
934 }
935
936 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
937 {
938 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
939     if (m_usingFallbackVideoSink)
940         return nullptr;
941
942     auto sampleLocker = holdLock(m_sampleMutex);
943
944     if (!GST_IS_SAMPLE(m_sample.get()))
945         return nullptr;
946
947     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, true);
948
949     auto textureID = frameHolder->textureID();
950     if (!textureID)
951         return nullptr;
952
953     auto size = frameHolder->size();
954     if (m_videoSourceOrientation.usesWidthAsHeight())
955         size = size.transposedSize();
956
957     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
958     context->makeContextCurrent();
959
960     if (!m_videoTextureCopier)
961         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
962
963     if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation))
964         return nullptr;
965
966     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
967 #else
968     return nullptr;
969 #endif
970 }
971 #endif // USE(GSTREAMER_GL)
972
973 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
974 {
975     if (m_videoSourceOrientation == orientation)
976         return;
977
978     m_videoSourceOrientation = orientation;
979 #if USE(TEXTURE_MAPPER_GL)
980     updateTextureMapperFlags();
981 #endif
982 }
983
984 #if USE(TEXTURE_MAPPER_GL)
985 void MediaPlayerPrivateGStreamerBase::updateTextureMapperFlags()
986 {
987     switch (m_videoSourceOrientation) {
988     case DefaultImageOrientation:
989         m_textureMapperFlags = 0;
990         break;
991     case OriginRightTop:
992         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture90;
993         break;
994     case OriginBottomRight:
995         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture180;
996         break;
997     case OriginLeftBottom:
998         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture270;
999         break;
1000     default:
1001         // FIXME: Handle OriginTopRight, OriginBottomLeft, OriginLeftTop and OriginRightBottom?
1002         m_textureMapperFlags = 0;
1003         break;
1004     }
1005
1006 #if USE(GSTREAMER_GL)
1007     // When the imxvpudecoder is used, the texture sampling of the
1008     // directviv-uploaded texture returns an RGB value, so there's no need to
1009     // convert it.
1010     if (m_videoDecoderPlatform != WebKitGstVideoDecoderPlatform::ImxVPU)
1011         m_textureMapperFlags |= TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
1012 #endif
1013 }
1014 #endif
1015
1016 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
1017 {
1018     return true;
1019 }
1020
1021 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
1022 {
1023     if (m_readyState == MediaPlayer::HaveNothing)
1024         return MediaPlayer::Unknown;
1025
1026     if (isLiveStream())
1027         return MediaPlayer::LiveStream;
1028
1029     return MediaPlayer::Download;
1030 }
1031
1032 #if USE(GSTREAMER_GL)
1033 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
1034 {
1035     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
1036     if (!appsink)
1037         return nullptr;
1038
1039     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
1040     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
1041     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
1042
1043     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
1044     gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_PUSH | GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
1045         // In some platforms (e.g. OpenMAX on the Raspberry Pi) when a resolution change occurs the
1046         // pipeline has to be drained before a frame with the new resolution can be decoded.
1047         // In this context, it's important that we don't hold references to any previous frame
1048         // (e.g. m_sample) so that decoding can continue.
1049         // We are also not supposed to keep the original frame after a flush.
1050         if (info->type & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
1051             if (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info)) != GST_QUERY_DRAIN)
1052                 return GST_PAD_PROBE_OK;
1053             GST_DEBUG("Acting upon DRAIN query");
1054         }
1055         if (info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) {
1056             if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) != GST_EVENT_FLUSH_START)
1057                 return GST_PAD_PROBE_OK;
1058             GST_DEBUG("Acting upon flush-start event");
1059         }
1060
1061         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
1062         player->flushCurrentBuffer();
1063         return GST_PAD_PROBE_OK;
1064     }, this, nullptr);
1065
1066     return appsink;
1067 }
1068
1069 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1070 {
1071     gboolean result = TRUE;
1072     GstElement* videoSink = gst_bin_new(nullptr);
1073     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1074     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1075     GstElement* appsink = createGLAppSink();
1076
1077     if (!appsink || !upload || !colorconvert) {
1078         GST_WARNING("Failed to create GstGL elements");
1079         gst_object_unref(videoSink);
1080
1081         if (upload)
1082             gst_object_unref(upload);
1083         if (colorconvert)
1084             gst_object_unref(colorconvert);
1085         if (appsink)
1086             gst_object_unref(appsink);
1087
1088         g_warning("WebKit wasn't able to find the GStreamer opengl plugin. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin.");
1089         return nullptr;
1090     }
1091
1092     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1093
1094     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
1095
1096     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1097     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1098
1099     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1100     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1101
1102     if (!result) {
1103         GST_WARNING("Failed to link GstGL elements");
1104         gst_object_unref(videoSink);
1105         videoSink = nullptr;
1106     }
1107     return videoSink;
1108 }
1109
1110 void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext()
1111 {
1112     if (!m_glDisplayElementContext)
1113         m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE));
1114
1115     if (m_glDisplayElementContext)
1116         gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get());
1117
1118     if (!m_glAppElementContext)
1119         m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context"));
1120
1121     if (m_glAppElementContext)
1122         gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get());
1123 }
1124 #endif // USE(GSTREAMER_GL)
1125
1126 #if USE(GSTREAMER_HOLEPUNCH)
1127 static void setRectangleToVideoSink(GstElement* videoSink, const IntRect& rect)
1128 {
1129     // Here goes the platform-dependant code to set to the videoSink the size
1130     // and position of the video rendering window. Mark them unused as default.
1131     UNUSED_PARAM(videoSink);
1132     UNUSED_PARAM(rect);
1133 }
1134
1135 class GStreamerHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient {
1136 public:
1137     GStreamerHolePunchClient(GRefPtr<GstElement>&& videoSink) : m_videoSink(WTFMove(videoSink)) { };
1138     void setVideoRectangle(const IntRect& rect) final { setRectangleToVideoSink(m_videoSink.get(), rect); }
1139 private:
1140     GRefPtr<GstElement> m_videoSink;
1141 };
1142
1143 GstElement* MediaPlayerPrivateGStreamerBase::createHolePunchVideoSink()
1144 {
1145     // Here goes the platform-dependant code to create the videoSink. As a default
1146     // we use a fakeVideoSink so nothing is drawn to the page.
1147     GstElement* videoSink =  gst_element_factory_make("fakevideosink", nullptr);
1148
1149     return videoSink;
1150 }
1151
1152 void MediaPlayerPrivateGStreamerBase::pushNextHolePunchBuffer()
1153 {
1154     auto proxyOperation =
1155         [this](TextureMapperPlatformLayerProxy& proxy)
1156         {
1157             LockHolder holder(proxy.lock());
1158             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE);
1159             std::unique_ptr<GStreamerHolePunchClient> holePunchClient = std::make_unique<GStreamerHolePunchClient>(m_videoSink.get());
1160             layerBuffer->setHolePunchClient(WTFMove(holePunchClient));
1161             proxy.pushNextBuffer(WTFMove(layerBuffer));
1162         };
1163
1164 #if USE(NICOSIA)
1165     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
1166 #else
1167     proxyOperation(*m_platformLayerProxy);
1168 #endif
1169 }
1170 #endif
1171
1172 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1173 {
1174     acceleratedRenderingStateChanged();
1175
1176 #if USE(GSTREAMER_HOLEPUNCH)
1177     m_videoSink = createHolePunchVideoSink();
1178     pushNextHolePunchBuffer();
1179     return m_videoSink.get();
1180 #endif
1181
1182 #if USE(GSTREAMER_GL)
1183     if (m_renderingCanBeAccelerated)
1184         m_videoSink = createVideoSinkGL();
1185 #endif
1186
1187     if (!m_videoSink) {
1188         m_usingFallbackVideoSink = true;
1189         m_videoSink = webkitVideoSinkNew();
1190         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1191         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
1192     }
1193
1194     GstElement* videoSink = nullptr;
1195 #if ENABLE(MEDIA_STATISTICS)
1196     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1197     if (m_fpsSink) {
1198         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1199
1200         // Turn off text overlay unless tracing is enabled.
1201         if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
1202             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1203
1204         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1205             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1206             videoSink = m_fpsSink.get();
1207         } else
1208             m_fpsSink = nullptr;
1209     }
1210 #endif
1211
1212     if (!m_fpsSink)
1213         videoSink = m_videoSink.get();
1214
1215     ASSERT(videoSink);
1216
1217     return videoSink;
1218 }
1219
1220 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1221 {
1222     ASSERT(!m_volumeElement);
1223     m_volumeElement = volume;
1224
1225     // We don't set the initial volume because we trust the sink to keep it for us. See
1226     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1227     if (!m_player->platformVolumeConfigurationRequired()) {
1228         GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f", m_player->volume());
1229         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
1230     } else
1231         GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one");
1232
1233     GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s", toString(m_player->muted()).utf8().data());
1234     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1235
1236     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1237     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1238 }
1239
1240 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1241 {
1242     guint64 decodedFrames = 0;
1243     if (m_fpsSink)
1244         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1245     return static_cast<unsigned>(decodedFrames);
1246 }
1247
1248 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1249 {
1250     guint64 framesDropped = 0;
1251     if (m_fpsSink)
1252         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1253     return static_cast<unsigned>(framesDropped);
1254 }
1255
1256 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1257 {
1258     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1259     gint64 position = 0;
1260
1261     if (audioSink() && gst_element_query(audioSink(), query))
1262         gst_query_parse_position(query, 0, &position);
1263
1264     gst_query_unref(query);
1265     return static_cast<unsigned>(position);
1266 }
1267
1268 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1269 {
1270     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1271     gint64 position = 0;
1272
1273     if (gst_element_query(m_videoSink.get(), query))
1274         gst_query_parse_position(query, 0, &position);
1275
1276     gst_query_unref(query);
1277     return static_cast<unsigned>(position);
1278 }
1279
1280 #if ENABLE(ENCRYPTED_MEDIA)
1281 void MediaPlayerPrivateGStreamerBase::initializationDataEncountered(InitData&& initData)
1282 {
1283     ASSERT(!isMainThread());
1284
1285     RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), initData = WTFMove(initData)] {
1286         if (!weakThis)
1287             return;
1288
1289         GST_DEBUG("scheduling initializationDataEncountered event of size %lu", initData.payload()->size());
1290         GST_MEMDUMP("init datas", reinterpret_cast<const uint8_t*>(initData.payload()->data()), initData.payload()->size());
1291         weakThis->m_player->initializationDataEncountered(initData.payloadContainerType(), initData.payload()->tryCreateArrayBuffer());
1292     });
1293 }
1294
1295 void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance)
1296 {
1297     ASSERT(isMainThread());
1298
1299     if (m_cdmInstance == &instance)
1300         return;
1301
1302     if (!m_pipeline) {
1303         GST_ERROR("no pipeline yet");
1304         ASSERT_NOT_REACHED();
1305         return;
1306     }
1307
1308     m_cdmInstance = &instance;
1309
1310     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1311     GstStructure* contextStructure = gst_context_writable_structure(context.get());
1312     gst_structure_set(contextStructure, "cdm-instance", G_TYPE_POINTER, m_cdmInstance.get(), nullptr);
1313     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1314
1315     GST_DEBUG_OBJECT(m_pipeline.get(), "CDM instance %p dispatched as context", m_cdmInstance.get());
1316
1317     m_protectionCondition.notifyAll();
1318 }
1319
1320 void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance)
1321 {
1322     ASSERT(isMainThread());
1323
1324     if (m_cdmInstance != &instance) {
1325         GST_WARNING("passed CDMInstance %p is different from stored one %p", &instance, m_cdmInstance.get());
1326         ASSERT_NOT_REACHED();
1327         return;
1328     }
1329
1330     ASSERT(m_pipeline);
1331
1332     GST_DEBUG_OBJECT(m_pipeline.get(), "detaching CDM instance %p, setting empty context", m_cdmInstance.get());
1333     m_cdmInstance = nullptr;
1334
1335     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1336     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1337
1338     m_protectionCondition.notifyAll();
1339 }
1340
1341 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance)
1342 {
1343     ASSERT(m_cdmInstance.get() == &instance);
1344     GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
1345     attemptToDecryptWithLocalInstance();
1346 }
1347
1348 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
1349 {
1350     bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt")));
1351     GST_DEBUG("attempting to decrypt, event handled %s", boolForPrinting(eventHandled));
1352 }
1353
1354 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1355 {
1356     if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1357         GST_DEBUG_OBJECT(pipeline(), "event %u already handled", GST_EVENT_SEQNUM(event));
1358         return;
1359     }
1360     GST_DEBUG_OBJECT(pipeline(), "handling event %u from MSE", GST_EVENT_SEQNUM(event));
1361     const char* eventKeySystemUUID = nullptr;
1362     GstBuffer* initData = nullptr;
1363     gst_event_parse_protection(event, &eventKeySystemUUID, &initData, nullptr);
1364     initializationDataEncountered({eventKeySystemUUID, initData});
1365 }
1366
1367 void MediaPlayerPrivateGStreamerBase::setWaitingForKey(bool waitingForKey)
1368 {
1369     // We bail out if values did not change or if we are requested to not wait anymore but there are still waiting decryptors.
1370     GST_TRACE("waitingForKey %s, m_waitingForKey %s", boolForPrinting(waitingForKey), boolForPrinting(m_waitingForKey));
1371     if (waitingForKey == m_waitingForKey || (!waitingForKey && this->waitingForKey()))
1372         return;
1373
1374     m_waitingForKey = waitingForKey;
1375     GST_DEBUG("waiting for key changed %s", boolForPrinting(m_waitingForKey));
1376     m_player->waitingForKeyChanged();
1377 }
1378
1379 bool MediaPlayerPrivateGStreamerBase::waitingForKey() const
1380 {
1381     if (!m_pipeline)
1382         return false;
1383
1384     GstState state;
1385     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1386
1387     bool result = false;
1388     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_custom(GST_QUERY_CUSTOM, gst_structure_new_empty("any-decryptor-waiting-for-key")));
1389     if (state >= GST_STATE_PAUSED) {
1390         result = gst_element_query(m_pipeline.get(), query.get());
1391         GST_TRACE("query result %s, on %s", boolForPrinting(result), gst_element_state_get_name(state));
1392     } else if (state >= GST_STATE_READY) {
1393         // Running a query in the pipeline is easier but it only works when the pipeline is set up and running, otherwise we need to inspect it and ask the decryptors directly.
1394         GUniquePtr<GstIterator> iterator(gst_bin_iterate_recurse(GST_BIN(m_pipeline.get())));
1395         GstIteratorResult iteratorResult;
1396         do {
1397             iteratorResult = gst_iterator_fold(iterator.get(), [](const GValue *item, GValue *, gpointer data) -> gboolean {
1398                 GstElement* element = GST_ELEMENT(g_value_get_object(item));
1399                 GstQuery* query = GST_QUERY(data);
1400                 return !WEBKIT_IS_MEDIA_CENC_DECRYPT(element) || !gst_element_query(element, query);
1401             }, nullptr, query.get());
1402             if (iteratorResult == GST_ITERATOR_RESYNC)
1403                 gst_iterator_resync(iterator.get());
1404         } while (iteratorResult == GST_ITERATOR_RESYNC);
1405         if (iteratorResult == GST_ITERATOR_ERROR)
1406             GST_WARNING("iterator returned an error");
1407         result = iteratorResult == GST_ITERATOR_OK;
1408         GST_TRACE("iterator result %d, waiting %s", iteratorResult, boolForPrinting(result));
1409     }
1410
1411     return result;
1412 }
1413 #endif
1414
1415 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1416 {
1417     bool result = false;
1418
1419 #if ENABLE(ENCRYPTED_MEDIA)
1420     result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
1421 #endif
1422
1423     GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
1424     return result;
1425 }
1426
1427 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1428 {
1429     UNUSED_PARAM(parameters);
1430     return result;
1431 }
1432
1433 }
1434
1435 #endif // USE(GSTREAMER)