[WPE] Implement GStreamer based holepunch
[WebKit.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerCommon.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsContext3D.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
35 #include "IntRect.h"
36 #include "Logging.h"
37 #include "MediaPlayer.h"
38 #include "NotImplemented.h"
39 #include "VideoSinkGStreamer.h"
40 #include <wtf/glib/GUniquePtr.h>
41 #include <wtf/text/AtomicString.h>
42 #include <wtf/text/CString.h>
43 #include <wtf/MathExtras.h>
44 #include <wtf/StringPrintStream.h>
45
46 #include <gst/audio/streamvolume.h>
47 #include <gst/video/gstvideometa.h>
48
49 #if ENABLE(ENCRYPTED_MEDIA)
50 #include "CDMInstance.h"
51 #include "GStreamerEMEUtilities.h"
52 #include "SharedBuffer.h"
53 #include "WebKitCommonEncryptionDecryptorGStreamer.h"
54 #endif
55
56 #if USE(GSTREAMER_GL)
57 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
58 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
59 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
60 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
61 #else
62 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
63 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
64 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
65 #endif
66
67 #include <gst/app/gstappsink.h>
68
69 #if USE(LIBEPOXY)
70 // Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
71 #include <epoxy/gl.h>
72
73 // Workaround build issue with RPi userland GLESv2 headers and libepoxy <https://webkit.org/b/185639>
74 #if !GST_CHECK_VERSION(1, 14, 0)
75 #include <gst/gl/gstglconfig.h>
76 #if defined(GST_GL_HAVE_WINDOW_DISPMANX) && GST_GL_HAVE_WINDOW_DISPMANX
77 #define __gl2_h_
78 #undef GST_GL_HAVE_GLSYNC
79 #define GST_GL_HAVE_GLSYNC 1
80 #endif
81 #endif // !GST_CHECK_VERSION(1, 14, 0)
82 #endif // USE(LIBEPOXY)
83
84 #define GST_USE_UNSTABLE_API
85 #include <gst/gl/gl.h>
86 #undef GST_USE_UNSTABLE_API
87
88 #include "GLContext.h"
89 #if USE(GLX)
90 #include "GLContextGLX.h"
91 #include <gst/gl/x11/gstgldisplay_x11.h>
92 #endif
93
94 #if USE(EGL)
95 #include "GLContextEGL.h"
96 #include <gst/gl/egl/gstgldisplay_egl.h>
97 #endif
98
99 #if PLATFORM(X11)
100 #include "PlatformDisplayX11.h"
101 #endif
102
103 #if PLATFORM(WAYLAND)
104 #include "PlatformDisplayWayland.h"
105 #elif PLATFORM(WPE)
106 #include "PlatformDisplayLibWPE.h"
107 #endif
108
109 // gstglapi.h may include eglplatform.h and it includes X.h, which
110 // defines None, breaking MediaPlayer::None enum
111 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
112 #undef None
113 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
114 #include "VideoTextureCopierGStreamer.h"
115 #endif // USE(GSTREAMER_GL)
116
117 #if USE(TEXTURE_MAPPER_GL)
118 #include "BitmapTextureGL.h"
119 #include "BitmapTexturePool.h"
120 #include "TextureMapperContextAttributes.h"
121 #include "TextureMapperGL.h"
122 #include "TextureMapperPlatformLayerBuffer.h"
123 #include "TextureMapperPlatformLayerProxy.h"
124 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
125 #include <cairo-gl.h>
126 #endif
127 #endif // USE(TEXTURE_MAPPER_GL)
128
129 GST_DEBUG_CATEGORY(webkit_media_player_debug);
130 #define GST_CAT_DEFAULT webkit_media_player_debug
131
132
133 namespace WebCore {
134 using namespace std;
135
136 #if USE(GSTREAMER_HOLEPUNCH)
137 static const FloatSize s_holePunchDefaultFrameSize(1280, 720);
138 #endif
139
140 static int greatestCommonDivisor(int a, int b)
141 {
142     while (b) {
143         int temp = a;
144         a = b;
145         b = temp % b;
146     }
147
148     return ABS(a);
149 }
150
151 #if USE(TEXTURE_MAPPER_GL)
152 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
153 {
154     switch (orientation) {
155     case DefaultImageOrientation:
156         return 0;
157     case OriginRightTop:
158         return TextureMapperGL::ShouldRotateTexture90;
159     case OriginBottomRight:
160         return TextureMapperGL::ShouldRotateTexture180;
161     case OriginLeftBottom:
162         return TextureMapperGL::ShouldRotateTexture270;
163     default:
164         ASSERT_NOT_REACHED();
165     }
166
167     return 0;
168 }
169 #endif
170
171 #if USE(TEXTURE_MAPPER_GL)
172 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
173 public:
174     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags, bool gstGLEnabled)
175     {
176         GstVideoInfo videoInfo;
177         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
178             return;
179
180         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
181         m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
182         m_buffer = gst_sample_get_buffer(sample);
183         if (UNLIKELY(!GST_IS_BUFFER(m_buffer)))
184             return;
185
186 #if USE(GSTREAMER_GL)
187         m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0) | TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
188
189         if (gstGLEnabled) {
190             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL));
191             if (m_isMapped)
192                 m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
193         } else
194 #endif // USE(GSTREAMER_GL)
195
196         {
197             m_textureID = 0;
198             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, GST_MAP_READ);
199             if (m_isMapped) {
200                 // Right now the TextureMapper only supports chromas with one plane
201                 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
202             }
203         }
204     }
205
206     virtual ~GstVideoFrameHolder()
207     {
208         if (UNLIKELY(!m_isMapped))
209             return;
210
211         gst_video_frame_unmap(&m_videoFrame);
212     }
213
214     const IntSize& size() const { return m_size; }
215     bool hasAlphaChannel() const { return m_hasAlphaChannel; }
216     TextureMapperGL::Flags flags() const { return m_flags; }
217     GLuint textureID() const { return m_textureID; }
218
219     void updateTexture(BitmapTextureGL& texture)
220     {
221         ASSERT(!m_textureID);
222         GstVideoGLTextureUploadMeta* meta;
223         if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) {
224             if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
225                 guint ids[4] = { texture.id(), 0, 0, 0 };
226
227                 if (gst_video_gl_texture_upload_meta_upload(meta, ids))
228                     return;
229             }
230         }
231
232         int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
233         const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0);
234         texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride);
235     }
236
237 private:
238     GstBuffer* m_buffer;
239     GstVideoFrame m_videoFrame { };
240     IntSize m_size;
241     bool m_hasAlphaChannel;
242     TextureMapperGL::Flags m_flags { };
243     GLuint m_textureID { 0 };
244     bool m_isMapped { false };
245 };
246 #endif
247
248 void MediaPlayerPrivateGStreamerBase::initializeDebugCategory()
249 {
250     GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
251 }
252
253 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
254     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
255     , m_player(player)
256     , m_fpsSink(nullptr)
257     , m_readyState(MediaPlayer::HaveNothing)
258     , m_networkState(MediaPlayer::Empty)
259     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
260 #if USE(TEXTURE_MAPPER_GL)
261 #if USE(NICOSIA)
262     , m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this)))
263 #else
264     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
265 #endif
266 #endif
267 {
268 }
269
270 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
271 {
272 #if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
273     downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
274 #endif
275
276 #if ENABLE(ENCRYPTED_MEDIA)
277     m_protectionCondition.notifyAll();
278 #endif
279     m_notifier->invalidate();
280
281     if (m_videoSink) {
282         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
283 #if USE(GSTREAMER_GL)
284         if (GST_IS_BIN(m_videoSink.get())) {
285             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
286             g_signal_handlers_disconnect_by_data(appsink.get(), this);
287         }
288 #endif
289     }
290
291     if (m_volumeElement)
292         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
293
294     // This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
295     // waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
296     cancelRepaint(true);
297
298     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
299     // about handlers running in the GStreamer thread.
300     if (m_pipeline)
301         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
302
303     m_player = nullptr;
304 }
305
306 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
307 {
308     m_pipeline = pipeline;
309
310     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
311     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
312         auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
313
314         if (player.handleSyncMessage(message)) {
315             gst_message_unref(message);
316             return GST_BUS_DROP;
317         }
318
319         return GST_BUS_PASS;
320     }, this, nullptr);
321 }
322
323 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
324 {
325     UNUSED_PARAM(message);
326     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
327         return false;
328
329     const gchar* contextType;
330     gst_message_parse_context_type(message, &contextType);
331     GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
332
333 #if USE(GSTREAMER_GL)
334     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType));
335     if (elementContext) {
336         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
337         return true;
338     }
339 #endif // USE(GSTREAMER_GL)
340
341 #if ENABLE(ENCRYPTED_MEDIA)
342     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
343         if (isMainThread()) {
344             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
345             ASSERT_NOT_REACHED();
346             return false;
347         }
348         GST_DEBUG_OBJECT(pipeline(), "handling drm-preferred-decryption-system-id need context message");
349         LockHolder lock(m_protectionMutex);
350         ProtectionSystemEvents protectionSystemEvents(message);
351         GST_TRACE("found %lu protection events, %lu decryptors available", protectionSystemEvents.events().size(), protectionSystemEvents.availableSystems().size());
352         InitData initData;
353
354         for (auto& event : protectionSystemEvents.events()) {
355             const char* eventKeySystemId = nullptr;
356             GstBuffer* data = nullptr;
357             gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
358
359             initData.append({eventKeySystemId, data});
360             m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
361         }
362
363         initializationDataEncountered(WTFMove(initData));
364
365         GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance");
366         m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
367             return this->m_cdmInstance;
368         });
369
370         if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) {
371             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
372             GST_INFO_OBJECT(pipeline(), "working with key system %s, continuing with key system %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
373
374             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
375             GstStructure* contextStructure = gst_context_writable_structure(context.get());
376             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
377             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
378         } else
379             GST_WARNING("CDM instance not initializaed");
380
381         return true;
382     }
383 #endif // ENABLE(ENCRYPTED_MEDIA)
384
385     return false;
386 }
387
388 #if USE(GSTREAMER_GL)
389 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType)
390 {
391     if (!ensureGstGLContext())
392         return nullptr;
393
394     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
395         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
396         gst_context_set_gl_display(displayContext, gstGLDisplay());
397         return displayContext;
398     }
399
400     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
401         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
402         GstStructure* structure = gst_context_writable_structure(appContext);
403 #if GST_CHECK_VERSION(1, 11, 0)
404         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr);
405 #else
406         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr);
407 #endif
408         return appContext;
409     }
410
411     return nullptr;
412 }
413
414 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
415 {
416     if (m_glContext)
417         return true;
418
419     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
420
421     // The floating ref removal support was added in https://bugzilla.gnome.org/show_bug.cgi?id=743062.
422     bool shouldAdoptRef = webkitGstCheckVersion(1, 13, 1);
423     if (!m_glDisplay) {
424 #if PLATFORM(X11)
425 #if USE(GLX)
426         if (is<PlatformDisplayX11>(sharedDisplay)) {
427             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared GL display");
428             if (shouldAdoptRef)
429                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())));
430             else
431                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
432         }
433 #elif USE(EGL)
434         if (is<PlatformDisplayX11>(sharedDisplay)) {
435             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared EGL display");
436             if (shouldAdoptRef)
437                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())));
438             else
439                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
440         }
441 #endif
442 #endif
443
444 #if PLATFORM(WAYLAND)
445         if (is<PlatformDisplayWayland>(sharedDisplay)) {
446             GST_DEBUG_OBJECT(pipeline(), "Creating Wayland shared display");
447             if (shouldAdoptRef)
448                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())));
449             else
450                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
451         }
452 #endif
453
454 #if PLATFORM(WPE)
455         ASSERT(is<PlatformDisplayLibWPE>(sharedDisplay));
456         GST_DEBUG_OBJECT(pipeline(), "Creating WPE shared EGL display");
457         if (shouldAdoptRef)
458             m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay())));
459         else
460             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay()));
461 #endif
462
463         ASSERT(m_glDisplay);
464     }
465
466     GLContext* webkitContext = sharedDisplay.sharingGLContext();
467     // EGL and GLX are mutually exclusive, no need for ifdefs here.
468     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
469
470 #if USE(OPENGL_ES)
471     GstGLAPI glAPI = GST_GL_API_GLES2;
472 #elif USE(OPENGL)
473     GstGLAPI glAPI = GST_GL_API_OPENGL;
474 #else
475     ASSERT_NOT_REACHED();
476 #endif
477
478     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
479     if (!contextHandle)
480         return false;
481
482     if (shouldAdoptRef)
483         m_glContext = adoptGRef(gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
484     else
485         m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
486
487     return true;
488 }
489 #endif // USE(GSTREAMER_GL)
490
491 // Returns the size of the video
492 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
493 {
494 #if USE(GSTREAMER_HOLEPUNCH)
495     // When using the holepuch we may not be able to get the video frames size, so we can't use
496     // it. But we need to report some non empty naturalSize for the player's GraphicsLayer
497     // to be properly created.
498     return s_holePunchDefaultFrameSize;
499 #endif
500
501     if (!hasVideo())
502         return FloatSize();
503
504     if (!m_videoSize.isEmpty())
505         return m_videoSize;
506
507     auto sampleLocker = holdLock(m_sampleMutex);
508     if (!GST_IS_SAMPLE(m_sample.get()))
509         return FloatSize();
510
511     GstCaps* caps = gst_sample_get_caps(m_sample.get());
512     if (!caps)
513         return FloatSize();
514
515
516     // TODO: handle possible clean aperture data. See
517     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
518     // TODO: handle possible transformation matrix. See
519     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
520
521     // Get the video PAR and original size, if this fails the
522     // video-sink has likely not yet negotiated its caps.
523     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
524     IntSize originalSize;
525     GstVideoFormat format;
526     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
527         return FloatSize();
528
529 #if USE(TEXTURE_MAPPER_GL)
530     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
531     if (m_renderingCanBeAccelerated) {
532         if (m_videoSourceOrientation.usesWidthAsHeight())
533             originalSize = originalSize.transposedSize();
534     }
535 #endif
536
537     GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
538     GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
539
540     // Calculate DAR based on PAR and video size.
541     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
542     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
543
544     // Divide display width and height by their GCD to avoid possible overflows.
545     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
546     displayWidth /= displayAspectRatioGCD;
547     displayHeight /= displayAspectRatioGCD;
548
549     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
550     guint64 width = 0, height = 0;
551     if (!(originalSize.height() % displayHeight)) {
552         GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
553         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
554         height = static_cast<guint64>(originalSize.height());
555     } else if (!(originalSize.width() % displayWidth)) {
556         GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
557         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
558         width = static_cast<guint64>(originalSize.width());
559     } else {
560         GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
561         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
562         height = static_cast<guint64>(originalSize.height());
563     }
564
565     GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
566     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
567     return m_videoSize;
568 }
569
570 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
571 {
572     if (!m_volumeElement)
573         return;
574
575     GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
576     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
577 }
578
579 float MediaPlayerPrivateGStreamerBase::volume() const
580 {
581     if (!m_volumeElement)
582         return 0;
583
584     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
585 }
586
587
588 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
589 {
590     if (!m_player || !m_volumeElement)
591         return;
592     double volume;
593     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
594     // get_volume() can return values superior to 1.0 if the user
595     // applies software user gain via third party application (GNOME
596     // volume control for instance).
597     volume = CLAMP(volume, 0.0, 1.0);
598     m_player->volumeChanged(static_cast<float>(volume));
599 }
600
601 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
602 {
603     // This is called when m_volumeElement receives the notify::volume signal.
604     GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
605
606     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] {
607         player->notifyPlayerOfVolumeChange();
608     });
609 }
610
611 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
612 {
613     return m_networkState;
614 }
615
616 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
617 {
618     return m_readyState;
619 }
620
621 void MediaPlayerPrivateGStreamerBase::sizeChanged()
622 {
623     notImplemented();
624 }
625
626 void MediaPlayerPrivateGStreamerBase::setMuted(bool mute)
627 {
628     if (!m_volumeElement)
629         return;
630
631     bool currentValue = muted();
632     if (currentValue == mute)
633         return;
634
635     GST_INFO_OBJECT(pipeline(), "Set muted to %s", toString(mute).utf8().data());
636     g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
637 }
638
639 bool MediaPlayerPrivateGStreamerBase::muted() const
640 {
641     if (!m_volumeElement)
642         return false;
643
644     gboolean muted;
645     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
646     GST_INFO_OBJECT(pipeline(), "Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
647     return muted;
648 }
649
650 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
651 {
652     if (!m_player || !m_volumeElement)
653         return;
654
655     gboolean muted;
656     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
657     m_player->muteChanged(static_cast<bool>(muted));
658 }
659
660 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
661 {
662     // This is called when m_volumeElement receives the notify::mute signal.
663     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] {
664         player->notifyPlayerOfMute();
665     });
666 }
667
668 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
669 {
670     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
671 }
672
673 #if USE(TEXTURE_MAPPER_GL)
674 PlatformLayer* MediaPlayerPrivateGStreamerBase::platformLayer() const
675 {
676 #if USE(NICOSIA)
677     return m_nicosiaLayer.ptr();
678 #else
679     return const_cast<MediaPlayerPrivateGStreamerBase*>(this);
680 #endif
681 }
682
683 #if USE(NICOSIA)
684 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
685 {
686 #if USE(GSTREAMER_HOLEPUNCH)
687     pushNextHolePunchBuffer();
688 #endif
689 }
690 #else
691 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
692 {
693     return m_platformLayerProxy.copyRef();
694 }
695
696 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
697 {
698 #if USE(GSTREAMER_HOLEPUNCH)
699     pushNextHolePunchBuffer();
700 #endif
701 }
702 #endif
703
704 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
705 {
706     auto sampleLocker = holdLock(m_sampleMutex);
707     if (!GST_IS_SAMPLE(m_sample.get()))
708         return;
709
710     auto proxyOperation =
711         [this](TextureMapperPlatformLayerProxy& proxy)
712         {
713             LockHolder holder(proxy.lock());
714
715             if (!proxy.isActive())
716                 return;
717
718             std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), !m_usingFallbackVideoSink);
719
720             GLuint textureID = frameHolder->textureID();
721             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
722             if (textureID) {
723                 layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(textureID, frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
724                 layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
725             } else {
726                 layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE);
727                 if (UNLIKELY(!layerBuffer)) {
728                     auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
729                     texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
730                     layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
731                 }
732                 frameHolder->updateTexture(layerBuffer->textureGL());
733                 layerBuffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0));
734             }
735             proxy.pushNextBuffer(WTFMove(layerBuffer));
736         };
737
738 #if USE(NICOSIA)
739     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
740 #else
741     proxyOperation(*m_platformLayerProxy);
742 #endif
743 }
744 #endif // USE(TEXTURE_MAPPER_GL)
745
746 void MediaPlayerPrivateGStreamerBase::repaint()
747 {
748     ASSERT(m_sample);
749     ASSERT(isMainThread());
750
751     m_player->repaint();
752
753     LockHolder lock(m_drawMutex);
754     m_drawCondition.notifyOne();
755 }
756
757 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
758 {
759     bool triggerResize;
760     {
761         auto sampleLocker = holdLock(m_sampleMutex);
762         triggerResize = !m_sample;
763         m_sample = sample;
764     }
765
766     if (triggerResize) {
767         GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
768         m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
769             m_player->sizeChanged();
770         });
771     }
772
773     if (!m_renderingCanBeAccelerated) {
774         LockHolder locker(m_drawMutex);
775         if (m_destroying)
776             return;
777         m_drawTimer.startOneShot(0_s);
778         m_drawCondition.wait(m_drawMutex);
779         return;
780     }
781
782 #if USE(TEXTURE_MAPPER_GL)
783     if (m_usingFallbackVideoSink) {
784         LockHolder lock(m_drawMutex);
785         auto proxyOperation =
786             [this](TextureMapperPlatformLayerProxy& proxy)
787             {
788                 return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); });
789             };
790 #if USE(NICOSIA)
791         if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()))
792             return;
793 #else
794         if (!proxyOperation(*m_platformLayerProxy))
795             return;
796 #endif
797         m_drawTimer.startOneShot(0_s);
798         m_drawCondition.wait(m_drawMutex);
799     } else
800         pushTextureToCompositor();
801 #endif // USE(TEXTURE_MAPPER_GL)
802 }
803
804 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
805 {
806     player->triggerRepaint(sample);
807 }
808
809 void MediaPlayerPrivateGStreamerBase::cancelRepaint(bool destroying)
810 {
811     // The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
812     // to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
813     // the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
814     // main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
815     // thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
816     //
817     // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
818     // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
819     if (!m_renderingCanBeAccelerated) {
820         LockHolder locker(m_drawMutex);
821         m_drawTimer.stop();
822         m_destroying = destroying;
823         m_drawCondition.notifyOne();
824     }
825 }
826
827 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
828 {
829     player->cancelRepaint();
830 }
831
832 #if USE(GSTREAMER_GL)
833 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
834 {
835     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
836     player->triggerRepaint(sample.get());
837     return GST_FLOW_OK;
838 }
839
840 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
841 {
842     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
843     player->triggerRepaint(sample.get());
844     return GST_FLOW_OK;
845 }
846
847 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
848 {
849     GST_DEBUG_OBJECT(pipeline(), "Flushing video sample");
850     auto sampleLocker = holdLock(m_sampleMutex);
851
852     if (m_sample) {
853         // Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions.
854         // This prevents resizing problems when the video changes its quality and a DRAIN is performed.
855         const GstStructure* info = gst_sample_get_info(m_sample.get());
856         m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
857             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
858     }
859
860     auto proxyOperation =
861         [](TextureMapperPlatformLayerProxy& proxy)
862         {
863             LockHolder locker(proxy.lock());
864
865             if (proxy.isActive())
866                 proxy.dropCurrentBufferWhilePreservingTexture();
867         };
868
869 #if USE(NICOSIA)
870     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
871 #else
872     proxyOperation(*m_platformLayerProxy);
873 #endif
874 }
875 #endif
876
877 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
878 {
879     m_size = size;
880 }
881
882 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
883 {
884     if (context.paintingDisabled())
885         return;
886
887     if (!m_player->visible())
888         return;
889
890     auto sampleLocker = holdLock(m_sampleMutex);
891     if (!GST_IS_SAMPLE(m_sample.get()))
892         return;
893
894     ImagePaintingOptions paintingOptions(CompositeCopy);
895     if (m_renderingCanBeAccelerated)
896         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
897
898     auto gstImage = ImageGStreamer::createImage(m_sample.get());
899     if (!gstImage)
900         return;
901
902     context.drawImage(gstImage->image(), rect, gstImage->rect(), paintingOptions);
903 }
904
905 #if USE(GSTREAMER_GL)
906 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
907 {
908     UNUSED_PARAM(context);
909
910     if (m_usingFallbackVideoSink)
911         return false;
912
913     if (premultiplyAlpha)
914         return false;
915
916     auto sampleLocker = holdLock(m_sampleMutex);
917
918     if (!GST_IS_SAMPLE(m_sample.get()))
919         return false;
920
921     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), true);
922
923     auto textureID = frameHolder->textureID();
924     if (!textureID)
925         return false;
926
927     auto size = frameHolder->size();
928     if (m_videoSourceOrientation.usesWidthAsHeight())
929         size = size.transposedSize();
930
931     if (!m_videoTextureCopier)
932         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
933
934     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
935 }
936
937 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
938 {
939 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
940     if (m_usingFallbackVideoSink)
941         return nullptr;
942
943     auto sampleLocker = holdLock(m_sampleMutex);
944
945     if (!GST_IS_SAMPLE(m_sample.get()))
946         return nullptr;
947
948     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), true);
949
950     auto textureID = frameHolder->textureID();
951     if (!textureID)
952         return nullptr;
953
954     auto size = frameHolder->size();
955     if (m_videoSourceOrientation.usesWidthAsHeight())
956         size = size.transposedSize();
957
958     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
959     context->makeContextCurrent();
960
961     if (!m_videoTextureCopier)
962         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
963
964     if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation))
965         return nullptr;
966
967     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
968 #else
969     return nullptr;
970 #endif
971 }
972 #endif // USE(GSTREAMER_GL)
973
974 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
975 {
976     if (m_videoSourceOrientation == orientation)
977         return;
978
979     m_videoSourceOrientation = orientation;
980 }
981
982 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
983 {
984     return true;
985 }
986
987 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
988 {
989     if (m_readyState == MediaPlayer::HaveNothing)
990         return MediaPlayer::Unknown;
991
992     if (isLiveStream())
993         return MediaPlayer::LiveStream;
994
995     return MediaPlayer::Download;
996 }
997
998 #if USE(GSTREAMER_GL)
999 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
1000 {
1001     if (!webkitGstCheckVersion(1, 8, 0))
1002         return nullptr;
1003
1004     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
1005     if (!appsink)
1006         return nullptr;
1007
1008     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
1009     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
1010     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
1011
1012     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
1013     gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
1014         // In some platforms (e.g. OpenMAX on the Raspberry Pi) when a resolution change occurs the
1015         // pipeline has to be drained before a frame with the new resolution can be decoded.
1016         // In this context, it's important that we don't hold references to any previous frame
1017         // (e.g. m_sample) so that decoding can continue.
1018         // We are also not supposed to keep the original frame after a flush.
1019         if (info->type & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
1020             if (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info)) != GST_QUERY_DRAIN)
1021                 return GST_PAD_PROBE_OK;
1022             GST_DEBUG("Acting upon DRAIN query");
1023         }
1024         if (info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) {
1025             if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) != GST_EVENT_FLUSH_START)
1026                 return GST_PAD_PROBE_OK;
1027             GST_DEBUG("Acting upon flush-start event");
1028         }
1029
1030         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
1031         player->flushCurrentBuffer();
1032         return GST_PAD_PROBE_OK;
1033     }, this, nullptr);
1034
1035     return appsink;
1036 }
1037
1038 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1039 {
1040     if (!webkitGstCheckVersion(1, 8, 0))
1041         return nullptr;
1042
1043     gboolean result = TRUE;
1044     GstElement* videoSink = gst_bin_new(nullptr);
1045     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1046     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1047     GstElement* appsink = createGLAppSink();
1048
1049     if (!appsink || !upload || !colorconvert) {
1050         GST_WARNING("Failed to create GstGL elements");
1051         gst_object_unref(videoSink);
1052
1053         if (upload)
1054             gst_object_unref(upload);
1055         if (colorconvert)
1056             gst_object_unref(colorconvert);
1057         if (appsink)
1058             gst_object_unref(appsink);
1059
1060         g_warning("WebKit wasn't able to find the GStreamer opengl plugin. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin.");
1061         return nullptr;
1062     }
1063
1064     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1065
1066     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
1067
1068     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1069     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1070
1071     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1072     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1073
1074     if (!result) {
1075         GST_WARNING("Failed to link GstGL elements");
1076         gst_object_unref(videoSink);
1077         videoSink = nullptr;
1078     }
1079     return videoSink;
1080 }
1081
1082 void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext()
1083 {
1084     if (!m_glDisplayElementContext)
1085         m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE));
1086
1087     if (m_glDisplayElementContext)
1088         gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get());
1089
1090     if (!m_glAppElementContext)
1091         m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context"));
1092
1093     if (m_glAppElementContext)
1094         gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get());
1095 }
1096 #endif // USE(GSTREAMER_GL)
1097
1098 #if USE(GSTREAMER_HOLEPUNCH)
1099 static void setRectangleToVideoSink(GstElement* videoSink, const IntRect& rect)
1100 {
1101     // Here goes the platform-dependant code to set to the videoSink the size
1102     // and position of the video rendering window. Mark them unused as default.
1103     UNUSED_PARAM(videoSink);
1104     UNUSED_PARAM(rect);
1105 }
1106
1107 class GStreamerHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient {
1108 public:
1109     GStreamerHolePunchClient(GRefPtr<GstElement>&& videoSink) : m_videoSink(WTFMove(videoSink)) { };
1110     void setVideoRectangle(const IntRect& rect) final { setRectangleToVideoSink(m_videoSink.get(), rect); }
1111 private:
1112     GRefPtr<GstElement> m_videoSink;
1113 };
1114
1115 GstElement* MediaPlayerPrivateGStreamerBase::createHolePunchVideoSink()
1116 {
1117     // Here goes the platform-dependant code to create the videoSink. As a default
1118     // we use a fakeVideoSink so nothing is drawn to the page.
1119     GstElement* videoSink =  gst_element_factory_make("fakevideosink", nullptr);
1120
1121     return videoSink;
1122 }
1123
1124 void MediaPlayerPrivateGStreamerBase::pushNextHolePunchBuffer()
1125 {
1126     auto proxyOperation =
1127         [this](TextureMapperPlatformLayerProxy& proxy)
1128         {
1129             LockHolder holder(proxy.lock());
1130             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE);
1131             std::unique_ptr<GStreamerHolePunchClient> holePunchClient = std::make_unique<GStreamerHolePunchClient>(m_videoSink.get());
1132             layerBuffer->setHolePunchClient(WTFMove(holePunchClient));
1133             proxy.pushNextBuffer(WTFMove(layerBuffer));
1134         };
1135
1136 #if USE(NICOSIA)
1137     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
1138 #else
1139     proxyOperation(*m_platformLayerProxy);
1140 #endif
1141 }
1142 #endif
1143
1144 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1145 {
1146     acceleratedRenderingStateChanged();
1147
1148 #if USE(GSTREAMER_HOLEPUNCH)
1149     m_videoSink = createHolePunchVideoSink();
1150     pushNextHolePunchBuffer();
1151     return m_videoSink.get();
1152 #endif
1153
1154 #if USE(GSTREAMER_GL)
1155     if (m_renderingCanBeAccelerated)
1156         m_videoSink = createVideoSinkGL();
1157 #endif
1158
1159     if (!m_videoSink) {
1160         m_usingFallbackVideoSink = true;
1161         m_videoSink = webkitVideoSinkNew();
1162         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1163         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
1164     }
1165
1166     GstElement* videoSink = nullptr;
1167 #if ENABLE(MEDIA_STATISTICS)
1168     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1169     if (m_fpsSink) {
1170         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1171
1172         // Turn off text overlay unless tracing is enabled.
1173         if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
1174             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1175
1176         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1177             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1178             videoSink = m_fpsSink.get();
1179         } else
1180             m_fpsSink = nullptr;
1181     }
1182 #endif
1183
1184     if (!m_fpsSink)
1185         videoSink = m_videoSink.get();
1186
1187     ASSERT(videoSink);
1188
1189     return videoSink;
1190 }
1191
1192 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1193 {
1194     ASSERT(!m_volumeElement);
1195     m_volumeElement = volume;
1196
1197     // We don't set the initial volume because we trust the sink to keep it for us. See
1198     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1199     if (!m_player->platformVolumeConfigurationRequired()) {
1200         GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f", m_player->volume());
1201         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
1202     } else
1203         GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one");
1204
1205     GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s", toString(m_player->muted()).utf8().data());
1206     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1207
1208     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1209     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1210 }
1211
1212 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1213 {
1214     guint64 decodedFrames = 0;
1215     if (m_fpsSink)
1216         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1217     return static_cast<unsigned>(decodedFrames);
1218 }
1219
1220 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1221 {
1222     guint64 framesDropped = 0;
1223     if (m_fpsSink)
1224         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1225     return static_cast<unsigned>(framesDropped);
1226 }
1227
1228 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1229 {
1230     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1231     gint64 position = 0;
1232
1233     if (audioSink() && gst_element_query(audioSink(), query))
1234         gst_query_parse_position(query, 0, &position);
1235
1236     gst_query_unref(query);
1237     return static_cast<unsigned>(position);
1238 }
1239
1240 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1241 {
1242     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1243     gint64 position = 0;
1244
1245     if (gst_element_query(m_videoSink.get(), query))
1246         gst_query_parse_position(query, 0, &position);
1247
1248     gst_query_unref(query);
1249     return static_cast<unsigned>(position);
1250 }
1251
1252 #if ENABLE(ENCRYPTED_MEDIA)
1253 void MediaPlayerPrivateGStreamerBase::initializationDataEncountered(InitData&& initData)
1254 {
1255     ASSERT(!isMainThread());
1256
1257     RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), initData = WTFMove(initData)] {
1258         if (!weakThis)
1259             return;
1260
1261         GST_DEBUG("scheduling initializationDataEncountered event of size %lu", initData.payload()->size());
1262         GST_MEMDUMP("init datas", reinterpret_cast<const uint8_t*>(initData.payload()->data()), initData.payload()->size());
1263         weakThis->m_player->initializationDataEncountered(initData.payloadContainerType(), initData.payload()->tryCreateArrayBuffer());
1264     });
1265 }
1266
1267 void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance)
1268 {
1269     ASSERT(isMainThread());
1270
1271     if (m_cdmInstance == &instance)
1272         return;
1273
1274     if (!m_pipeline) {
1275         GST_ERROR("no pipeline yet");
1276         ASSERT_NOT_REACHED();
1277         return;
1278     }
1279
1280     m_cdmInstance = &instance;
1281
1282     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1283     GstStructure* contextStructure = gst_context_writable_structure(context.get());
1284     gst_structure_set(contextStructure, "cdm-instance", G_TYPE_POINTER, m_cdmInstance.get(), nullptr);
1285     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1286
1287     GST_DEBUG_OBJECT(m_pipeline.get(), "CDM instance %p dispatched as context", m_cdmInstance.get());
1288
1289     m_protectionCondition.notifyAll();
1290 }
1291
1292 void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance)
1293 {
1294     ASSERT(isMainThread());
1295
1296     if (m_cdmInstance != &instance) {
1297         GST_WARNING("passed CDMInstance %p is different from stored one %p", &instance, m_cdmInstance.get());
1298         ASSERT_NOT_REACHED();
1299         return;
1300     }
1301
1302     ASSERT(m_pipeline);
1303
1304     GST_DEBUG_OBJECT(m_pipeline.get(), "detaching CDM instance %p, setting empty context", m_cdmInstance.get());
1305     m_cdmInstance = nullptr;
1306
1307     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1308     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1309
1310     m_protectionCondition.notifyAll();
1311 }
1312
1313 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance)
1314 {
1315     ASSERT(m_cdmInstance.get() == &instance);
1316     GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
1317     attemptToDecryptWithLocalInstance();
1318 }
1319
1320 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
1321 {
1322     bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt")));
1323     GST_DEBUG("attempting to decrypt, event handled %s", boolForPrinting(eventHandled));
1324 }
1325
1326 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1327 {
1328     if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1329         GST_DEBUG_OBJECT(pipeline(), "event %u already handled", GST_EVENT_SEQNUM(event));
1330         return;
1331     }
1332     GST_DEBUG_OBJECT(pipeline(), "handling event %u from MSE", GST_EVENT_SEQNUM(event));
1333     const char* eventKeySystemUUID = nullptr;
1334     GstBuffer* initData = nullptr;
1335     gst_event_parse_protection(event, &eventKeySystemUUID, &initData, nullptr);
1336     initializationDataEncountered({eventKeySystemUUID, initData});
1337 }
1338
1339 void MediaPlayerPrivateGStreamerBase::setWaitingForKey(bool waitingForKey)
1340 {
1341     // We bail out if values did not change or if we are requested to not wait anymore but there are still waiting decryptors.
1342     GST_TRACE("waitingForKey %s, m_waitingForKey %s", boolForPrinting(waitingForKey), boolForPrinting(m_waitingForKey));
1343     if (waitingForKey == m_waitingForKey || (!waitingForKey && this->waitingForKey()))
1344         return;
1345
1346     m_waitingForKey = waitingForKey;
1347     GST_DEBUG("waiting for key changed %s", boolForPrinting(m_waitingForKey));
1348     m_player->waitingForKeyChanged();
1349 }
1350
1351 bool MediaPlayerPrivateGStreamerBase::waitingForKey() const
1352 {
1353     if (!m_pipeline)
1354         return false;
1355
1356     GstState state;
1357     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1358
1359     bool result = false;
1360     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_custom(GST_QUERY_CUSTOM, gst_structure_new_empty("any-decryptor-waiting-for-key")));
1361     if (state >= GST_STATE_PAUSED) {
1362         result = gst_element_query(m_pipeline.get(), query.get());
1363         GST_TRACE("query result %s, on %s", boolForPrinting(result), gst_element_state_get_name(state));
1364     } else if (state >= GST_STATE_READY) {
1365         // Running a query in the pipeline is easier but it only works when the pipeline is set up and running, otherwise we need to inspect it and ask the decryptors directly.
1366         GUniquePtr<GstIterator> iterator(gst_bin_iterate_recurse(GST_BIN(m_pipeline.get())));
1367         GstIteratorResult iteratorResult;
1368         do {
1369             iteratorResult = gst_iterator_fold(iterator.get(), [](const GValue *item, GValue *, gpointer data) -> gboolean {
1370                 GstElement* element = GST_ELEMENT(g_value_get_object(item));
1371                 GstQuery* query = GST_QUERY(data);
1372                 return !WEBKIT_IS_MEDIA_CENC_DECRYPT(element) || !gst_element_query(element, query);
1373             }, nullptr, query.get());
1374             if (iteratorResult == GST_ITERATOR_RESYNC)
1375                 gst_iterator_resync(iterator.get());
1376         } while (iteratorResult == GST_ITERATOR_RESYNC);
1377         if (iteratorResult == GST_ITERATOR_ERROR)
1378             GST_WARNING("iterator returned an error");
1379         result = iteratorResult == GST_ITERATOR_OK;
1380         GST_TRACE("iterator result %d, waiting %s", iteratorResult, boolForPrinting(result));
1381     }
1382
1383     return result;
1384 }
1385 #endif
1386
1387 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1388 {
1389     bool result = false;
1390
1391 #if ENABLE(ENCRYPTED_MEDIA)
1392     result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
1393 #endif
1394
1395     GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
1396     return result;
1397 }
1398
1399 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1400 {
1401     UNUSED_PARAM(parameters);
1402     return result;
1403 }
1404
1405 }
1406
1407 #endif // USE(GSTREAMER)