e130c20083d554f0ae1684acef86731c83060fb7
[WebKit.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerCommon.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsContext3D.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
35 #include "IntRect.h"
36 #include "Logging.h"
37 #include "MediaPlayer.h"
38 #include "NotImplemented.h"
39 #include "VideoSinkGStreamer.h"
40 #include <wtf/glib/GUniquePtr.h>
41 #include <wtf/text/AtomicString.h>
42 #include <wtf/text/CString.h>
43 #include <wtf/MathExtras.h>
44 #include <wtf/StringPrintStream.h>
45
46 #include <gst/audio/streamvolume.h>
47 #include <gst/video/gstvideometa.h>
48
49 #if ENABLE(ENCRYPTED_MEDIA)
50 #include "CDMInstance.h"
51 #include "GStreamerEMEUtilities.h"
52 #include "SharedBuffer.h"
53 #include "WebKitCommonEncryptionDecryptorGStreamer.h"
54 #endif
55
56 #if USE(GSTREAMER_GL)
57 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
58 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
59 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
60 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
61 #else
62 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
63 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
64 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
65 #endif
66
67 #include <gst/app/gstappsink.h>
68
69 #if USE(LIBEPOXY)
70 // Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
71 #include <epoxy/gl.h>
72
73 // Workaround build issue with RPi userland GLESv2 headers and libepoxy <https://webkit.org/b/185639>
74 #if !GST_CHECK_VERSION(1, 14, 0)
75 #include <gst/gl/gstglconfig.h>
76 #if defined(GST_GL_HAVE_WINDOW_DISPMANX) && GST_GL_HAVE_WINDOW_DISPMANX
77 #define __gl2_h_
78 #undef GST_GL_HAVE_GLSYNC
79 #define GST_GL_HAVE_GLSYNC 1
80 #endif
81 #endif // !GST_CHECK_VERSION(1, 14, 0)
82 #endif // USE(LIBEPOXY)
83
84 #define GST_USE_UNSTABLE_API
85 #include <gst/gl/gl.h>
86 #undef GST_USE_UNSTABLE_API
87
88 #include "GLContext.h"
89 #if USE(GLX)
90 #include "GLContextGLX.h"
91 #include <gst/gl/x11/gstgldisplay_x11.h>
92 #endif
93
94 #if USE(EGL)
95 #include "GLContextEGL.h"
96 #include <gst/gl/egl/gstgldisplay_egl.h>
97 #endif
98
99 #if PLATFORM(X11)
100 #include "PlatformDisplayX11.h"
101 #endif
102
103 #if PLATFORM(WAYLAND)
104 #include "PlatformDisplayWayland.h"
105 #elif PLATFORM(WPE)
106 #include "PlatformDisplayLibWPE.h"
107 #endif
108
109 // gstglapi.h may include eglplatform.h and it includes X.h, which
110 // defines None, breaking MediaPlayer::None enum
111 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
112 #undef None
113 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
114 #include "VideoTextureCopierGStreamer.h"
115 #endif // USE(GSTREAMER_GL)
116
117 #if USE(TEXTURE_MAPPER_GL)
118 #include "BitmapTextureGL.h"
119 #include "BitmapTexturePool.h"
120 #include "TextureMapperContextAttributes.h"
121 #include "TextureMapperGL.h"
122 #include "TextureMapperPlatformLayerBuffer.h"
123 #include "TextureMapperPlatformLayerProxy.h"
124 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
125 #include <cairo-gl.h>
126 #endif
127 #endif // USE(TEXTURE_MAPPER_GL)
128
129 GST_DEBUG_CATEGORY(webkit_media_player_debug);
130 #define GST_CAT_DEFAULT webkit_media_player_debug
131
132
133 namespace WebCore {
134 using namespace std;
135
136 static int greatestCommonDivisor(int a, int b)
137 {
138     while (b) {
139         int temp = a;
140         a = b;
141         b = temp % b;
142     }
143
144     return ABS(a);
145 }
146
147 #if USE(TEXTURE_MAPPER_GL)
148 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
149 {
150     switch (orientation) {
151     case DefaultImageOrientation:
152         return 0;
153     case OriginRightTop:
154         return TextureMapperGL::ShouldRotateTexture90;
155     case OriginBottomRight:
156         return TextureMapperGL::ShouldRotateTexture180;
157     case OriginLeftBottom:
158         return TextureMapperGL::ShouldRotateTexture270;
159     default:
160         ASSERT_NOT_REACHED();
161     }
162
163     return 0;
164 }
165 #endif
166
167 #if USE(TEXTURE_MAPPER_GL)
168 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
169 public:
170     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags, bool gstGLEnabled)
171     {
172         GstVideoInfo videoInfo;
173         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
174             return;
175
176         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
177         m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
178         m_buffer = gst_sample_get_buffer(sample);
179         if (UNLIKELY(!GST_IS_BUFFER(m_buffer)))
180             return;
181
182 #if USE(GSTREAMER_GL)
183         m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0) | TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
184
185         if (gstGLEnabled) {
186             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL));
187             if (m_isMapped)
188                 m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
189         } else
190 #endif // USE(GSTREAMER_GL)
191
192         {
193             m_textureID = 0;
194             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, GST_MAP_READ);
195             if (m_isMapped) {
196                 // Right now the TextureMapper only supports chromas with one plane
197                 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
198             }
199         }
200     }
201
202     virtual ~GstVideoFrameHolder()
203     {
204         if (UNLIKELY(!m_isMapped))
205             return;
206
207         gst_video_frame_unmap(&m_videoFrame);
208     }
209
210     const IntSize& size() const { return m_size; }
211     bool hasAlphaChannel() const { return m_hasAlphaChannel; }
212     TextureMapperGL::Flags flags() const { return m_flags; }
213     GLuint textureID() const { return m_textureID; }
214
215     void updateTexture(BitmapTextureGL& texture)
216     {
217         ASSERT(!m_textureID);
218         GstVideoGLTextureUploadMeta* meta;
219         if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) {
220             if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
221                 guint ids[4] = { texture.id(), 0, 0, 0 };
222
223                 if (gst_video_gl_texture_upload_meta_upload(meta, ids))
224                     return;
225             }
226         }
227
228         int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
229         const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0);
230         texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride);
231     }
232
233 private:
234     GstBuffer* m_buffer;
235     GstVideoFrame m_videoFrame { };
236     IntSize m_size;
237     bool m_hasAlphaChannel;
238     TextureMapperGL::Flags m_flags { };
239     GLuint m_textureID { 0 };
240     bool m_isMapped { false };
241 };
242 #endif
243
244 void MediaPlayerPrivateGStreamerBase::initializeDebugCategory()
245 {
246     GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
247 }
248
249 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
250     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
251     , m_player(player)
252     , m_fpsSink(nullptr)
253     , m_readyState(MediaPlayer::HaveNothing)
254     , m_networkState(MediaPlayer::Empty)
255     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
256 #if USE(TEXTURE_MAPPER_GL)
257 #if USE(NICOSIA)
258     , m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this)))
259 #else
260     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
261 #endif
262 #endif
263 {
264 }
265
266 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
267 {
268 #if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
269     downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
270 #endif
271
272 #if ENABLE(ENCRYPTED_MEDIA)
273     m_protectionCondition.notifyAll();
274 #endif
275     m_notifier->invalidate();
276
277     if (m_videoSink) {
278         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
279 #if USE(GSTREAMER_GL)
280         if (GST_IS_BIN(m_videoSink.get())) {
281             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
282             g_signal_handlers_disconnect_by_data(appsink.get(), this);
283         }
284 #endif
285     }
286
287     if (m_volumeElement)
288         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
289
290     // This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
291     // waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
292     cancelRepaint(true);
293
294     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
295     // about handlers running in the GStreamer thread.
296     if (m_pipeline)
297         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
298
299     m_player = nullptr;
300 }
301
302 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
303 {
304     m_pipeline = pipeline;
305
306     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
307     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
308         auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
309
310         if (player.handleSyncMessage(message)) {
311             gst_message_unref(message);
312             return GST_BUS_DROP;
313         }
314
315         return GST_BUS_PASS;
316     }, this, nullptr);
317 }
318
319 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
320 {
321     UNUSED_PARAM(message);
322     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
323         return false;
324
325     const gchar* contextType;
326     gst_message_parse_context_type(message, &contextType);
327     GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
328
329 #if USE(GSTREAMER_GL)
330     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType));
331     if (elementContext) {
332         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
333         return true;
334     }
335 #endif // USE(GSTREAMER_GL)
336
337 #if ENABLE(ENCRYPTED_MEDIA)
338     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
339         if (isMainThread()) {
340             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
341             ASSERT_NOT_REACHED();
342             return false;
343         }
344         GST_DEBUG_OBJECT(pipeline(), "handling drm-preferred-decryption-system-id need context message");
345         LockHolder lock(m_protectionMutex);
346         ProtectionSystemEvents protectionSystemEvents(message);
347         GST_TRACE("found %lu protection events, %lu decryptors available", protectionSystemEvents.events().size(), protectionSystemEvents.availableSystems().size());
348         InitData initData;
349
350         for (auto& event : protectionSystemEvents.events()) {
351             const char* eventKeySystemId = nullptr;
352             GstBuffer* data = nullptr;
353             gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
354
355             initData.append({eventKeySystemId, data});
356             m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
357         }
358
359         initializationDataEncountered(WTFMove(initData));
360
361         GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance");
362         m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
363             return this->m_cdmInstance;
364         });
365
366         if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) {
367             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
368             GST_INFO_OBJECT(pipeline(), "working with key system %s, continuing with key system %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
369
370             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
371             GstStructure* contextStructure = gst_context_writable_structure(context.get());
372             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
373             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
374         } else
375             GST_WARNING("CDM instance not initializaed");
376
377         return true;
378     }
379 #endif // ENABLE(ENCRYPTED_MEDIA)
380
381     return false;
382 }
383
384 #if USE(GSTREAMER_GL)
385 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType)
386 {
387     if (!ensureGstGLContext())
388         return nullptr;
389
390     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
391         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
392         gst_context_set_gl_display(displayContext, gstGLDisplay());
393         return displayContext;
394     }
395
396     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
397         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
398         GstStructure* structure = gst_context_writable_structure(appContext);
399 #if GST_CHECK_VERSION(1, 11, 0)
400         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr);
401 #else
402         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr);
403 #endif
404         return appContext;
405     }
406
407     return nullptr;
408 }
409
410 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
411 {
412     if (m_glContext)
413         return true;
414
415     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
416
417     // The floating ref removal support was added in https://bugzilla.gnome.org/show_bug.cgi?id=743062.
418     bool shouldAdoptRef = webkitGstCheckVersion(1, 13, 1);
419     if (!m_glDisplay) {
420 #if PLATFORM(X11)
421 #if USE(GLX)
422         if (is<PlatformDisplayX11>(sharedDisplay)) {
423             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared GL display");
424             if (shouldAdoptRef)
425                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())));
426             else
427                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
428         }
429 #elif USE(EGL)
430         if (is<PlatformDisplayX11>(sharedDisplay)) {
431             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared EGL display");
432             if (shouldAdoptRef)
433                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())));
434             else
435                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
436         }
437 #endif
438 #endif
439
440 #if PLATFORM(WAYLAND)
441         if (is<PlatformDisplayWayland>(sharedDisplay)) {
442             GST_DEBUG_OBJECT(pipeline(), "Creating Wayland shared display");
443             if (shouldAdoptRef)
444                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())));
445             else
446                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
447         }
448 #endif
449
450 #if PLATFORM(WPE)
451         ASSERT(is<PlatformDisplayLibWPE>(sharedDisplay));
452         GST_DEBUG_OBJECT(pipeline(), "Creating WPE shared EGL display");
453         if (shouldAdoptRef)
454             m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay())));
455         else
456             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay()));
457 #endif
458
459         ASSERT(m_glDisplay);
460     }
461
462     GLContext* webkitContext = sharedDisplay.sharingGLContext();
463     // EGL and GLX are mutually exclusive, no need for ifdefs here.
464     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
465
466 #if USE(OPENGL_ES)
467     GstGLAPI glAPI = GST_GL_API_GLES2;
468 #elif USE(OPENGL)
469     GstGLAPI glAPI = GST_GL_API_OPENGL;
470 #else
471     ASSERT_NOT_REACHED();
472 #endif
473
474     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
475     if (!contextHandle)
476         return false;
477
478     if (shouldAdoptRef)
479         m_glContext = adoptGRef(gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
480     else
481         m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
482
483     return true;
484 }
485 #endif // USE(GSTREAMER_GL)
486
487 // Returns the size of the video
488 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
489 {
490     if (!hasVideo())
491         return FloatSize();
492
493     if (!m_videoSize.isEmpty())
494         return m_videoSize;
495
496     auto sampleLocker = holdLock(m_sampleMutex);
497     if (!GST_IS_SAMPLE(m_sample.get()))
498         return FloatSize();
499
500     GstCaps* caps = gst_sample_get_caps(m_sample.get());
501     if (!caps)
502         return FloatSize();
503
504
505     // TODO: handle possible clean aperture data. See
506     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
507     // TODO: handle possible transformation matrix. See
508     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
509
510     // Get the video PAR and original size, if this fails the
511     // video-sink has likely not yet negotiated its caps.
512     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
513     IntSize originalSize;
514     GstVideoFormat format;
515     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
516         return FloatSize();
517
518 #if USE(TEXTURE_MAPPER_GL)
519     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
520     if (m_renderingCanBeAccelerated) {
521         if (m_videoSourceOrientation.usesWidthAsHeight())
522             originalSize = originalSize.transposedSize();
523     }
524 #endif
525
526     GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
527     GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
528
529     // Calculate DAR based on PAR and video size.
530     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
531     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
532
533     // Divide display width and height by their GCD to avoid possible overflows.
534     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
535     displayWidth /= displayAspectRatioGCD;
536     displayHeight /= displayAspectRatioGCD;
537
538     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
539     guint64 width = 0, height = 0;
540     if (!(originalSize.height() % displayHeight)) {
541         GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
542         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
543         height = static_cast<guint64>(originalSize.height());
544     } else if (!(originalSize.width() % displayWidth)) {
545         GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
546         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
547         width = static_cast<guint64>(originalSize.width());
548     } else {
549         GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
550         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
551         height = static_cast<guint64>(originalSize.height());
552     }
553
554     GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
555     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
556     return m_videoSize;
557 }
558
559 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
560 {
561     if (!m_volumeElement)
562         return;
563
564     GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
565     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
566 }
567
568 float MediaPlayerPrivateGStreamerBase::volume() const
569 {
570     if (!m_volumeElement)
571         return 0;
572
573     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
574 }
575
576
577 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
578 {
579     if (!m_player || !m_volumeElement)
580         return;
581     double volume;
582     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
583     // get_volume() can return values superior to 1.0 if the user
584     // applies software user gain via third party application (GNOME
585     // volume control for instance).
586     volume = CLAMP(volume, 0.0, 1.0);
587     m_player->volumeChanged(static_cast<float>(volume));
588 }
589
590 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
591 {
592     // This is called when m_volumeElement receives the notify::volume signal.
593     GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
594
595     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] {
596         player->notifyPlayerOfVolumeChange();
597     });
598 }
599
600 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
601 {
602     return m_networkState;
603 }
604
605 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
606 {
607     return m_readyState;
608 }
609
610 void MediaPlayerPrivateGStreamerBase::sizeChanged()
611 {
612     notImplemented();
613 }
614
615 void MediaPlayerPrivateGStreamerBase::setMuted(bool mute)
616 {
617     if (!m_volumeElement)
618         return;
619
620     bool currentValue = muted();
621     if (currentValue == mute)
622         return;
623
624     GST_INFO_OBJECT(pipeline(), "Set muted to %s", toString(mute).utf8().data());
625     g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
626 }
627
628 bool MediaPlayerPrivateGStreamerBase::muted() const
629 {
630     if (!m_volumeElement)
631         return false;
632
633     gboolean muted;
634     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
635     GST_INFO_OBJECT(pipeline(), "Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
636     return muted;
637 }
638
639 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
640 {
641     if (!m_player || !m_volumeElement)
642         return;
643
644     gboolean muted;
645     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
646     m_player->muteChanged(static_cast<bool>(muted));
647 }
648
649 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
650 {
651     // This is called when m_volumeElement receives the notify::mute signal.
652     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] {
653         player->notifyPlayerOfMute();
654     });
655 }
656
657 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
658 {
659     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
660 }
661
662 #if USE(TEXTURE_MAPPER_GL)
663 PlatformLayer* MediaPlayerPrivateGStreamerBase::platformLayer() const
664 {
665 #if USE(NICOSIA)
666     return m_nicosiaLayer.ptr();
667 #else
668     return const_cast<MediaPlayerPrivateGStreamerBase*>(this);
669 #endif
670 }
671
672 #if USE(NICOSIA)
673 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
674 {
675 }
676 #else
677 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
678 {
679     return m_platformLayerProxy.copyRef();
680 }
681
682 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
683 {
684 }
685 #endif
686
687 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
688 {
689     auto sampleLocker = holdLock(m_sampleMutex);
690     if (!GST_IS_SAMPLE(m_sample.get()))
691         return;
692
693     auto proxyOperation =
694         [this](TextureMapperPlatformLayerProxy& proxy)
695         {
696             LockHolder holder(proxy.lock());
697
698             if (!proxy.isActive())
699                 return;
700
701             std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), !m_usingFallbackVideoSink);
702
703             GLuint textureID = frameHolder->textureID();
704             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
705             if (textureID) {
706                 layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(textureID, frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
707                 layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
708             } else {
709                 layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE);
710                 if (UNLIKELY(!layerBuffer)) {
711                     auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
712                     texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
713                     layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
714                 }
715                 frameHolder->updateTexture(layerBuffer->textureGL());
716                 layerBuffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0));
717             }
718             proxy.pushNextBuffer(WTFMove(layerBuffer));
719         };
720
721 #if USE(NICOSIA)
722     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
723 #else
724     proxyOperation(*m_platformLayerProxy);
725 #endif
726 }
727 #endif // USE(TEXTURE_MAPPER_GL)
728
729 void MediaPlayerPrivateGStreamerBase::repaint()
730 {
731     ASSERT(m_sample);
732     ASSERT(isMainThread());
733
734     m_player->repaint();
735
736     LockHolder lock(m_drawMutex);
737     m_drawCondition.notifyOne();
738 }
739
740 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
741 {
742     bool triggerResize;
743     {
744         auto sampleLocker = holdLock(m_sampleMutex);
745         triggerResize = !m_sample;
746         m_sample = sample;
747     }
748
749     if (triggerResize) {
750         GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
751         m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
752             m_player->sizeChanged();
753         });
754     }
755
756     if (!m_renderingCanBeAccelerated) {
757         LockHolder locker(m_drawMutex);
758         if (m_destroying)
759             return;
760         m_drawTimer.startOneShot(0_s);
761         m_drawCondition.wait(m_drawMutex);
762         return;
763     }
764
765 #if USE(TEXTURE_MAPPER_GL)
766     if (m_usingFallbackVideoSink) {
767         LockHolder lock(m_drawMutex);
768         auto proxyOperation =
769             [this](TextureMapperPlatformLayerProxy& proxy)
770             {
771                 return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); });
772             };
773 #if USE(NICOSIA)
774         if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()))
775             return;
776 #else
777         if (!proxyOperation(*m_platformLayerProxy))
778             return;
779 #endif
780         m_drawTimer.startOneShot(0_s);
781         m_drawCondition.wait(m_drawMutex);
782     } else
783         pushTextureToCompositor();
784 #endif // USE(TEXTURE_MAPPER_GL)
785 }
786
787 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
788 {
789     player->triggerRepaint(sample);
790 }
791
792 void MediaPlayerPrivateGStreamerBase::cancelRepaint(bool destroying)
793 {
794     // The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
795     // to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
796     // the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
797     // main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
798     // thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
799     //
800     // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
801     // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
802     if (!m_renderingCanBeAccelerated) {
803         LockHolder locker(m_drawMutex);
804         m_drawTimer.stop();
805         m_destroying = destroying;
806         m_drawCondition.notifyOne();
807     }
808 }
809
810 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
811 {
812     player->cancelRepaint();
813 }
814
815 #if USE(GSTREAMER_GL)
816 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
817 {
818     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
819     player->triggerRepaint(sample.get());
820     return GST_FLOW_OK;
821 }
822
823 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
824 {
825     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
826     player->triggerRepaint(sample.get());
827     return GST_FLOW_OK;
828 }
829
830 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
831 {
832     GST_DEBUG_OBJECT(pipeline(), "Flushing video sample");
833     auto sampleLocker = holdLock(m_sampleMutex);
834
835     if (m_sample) {
836         // Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions.
837         // This prevents resizing problems when the video changes its quality and a DRAIN is performed.
838         const GstStructure* info = gst_sample_get_info(m_sample.get());
839         m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
840             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
841     }
842
843     auto proxyOperation =
844         [](TextureMapperPlatformLayerProxy& proxy)
845         {
846             LockHolder locker(proxy.lock());
847
848             if (proxy.isActive())
849                 proxy.dropCurrentBufferWhilePreservingTexture();
850         };
851
852 #if USE(NICOSIA)
853     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
854 #else
855     proxyOperation(*m_platformLayerProxy);
856 #endif
857 }
858 #endif
859
860 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
861 {
862     m_size = size;
863 }
864
865 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
866 {
867     if (context.paintingDisabled())
868         return;
869
870     if (!m_player->visible())
871         return;
872
873     auto sampleLocker = holdLock(m_sampleMutex);
874     if (!GST_IS_SAMPLE(m_sample.get()))
875         return;
876
877     ImagePaintingOptions paintingOptions(CompositeCopy);
878     if (m_renderingCanBeAccelerated)
879         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
880
881     auto gstImage = ImageGStreamer::createImage(m_sample.get());
882     if (!gstImage)
883         return;
884
885     context.drawImage(gstImage->image(), rect, gstImage->rect(), paintingOptions);
886 }
887
888 #if USE(GSTREAMER_GL)
889 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
890 {
891     UNUSED_PARAM(context);
892
893     if (m_usingFallbackVideoSink)
894         return false;
895
896     if (premultiplyAlpha)
897         return false;
898
899     auto sampleLocker = holdLock(m_sampleMutex);
900
901     if (!GST_IS_SAMPLE(m_sample.get()))
902         return false;
903
904     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), true);
905
906     auto textureID = frameHolder->textureID();
907     if (!textureID)
908         return false;
909
910     auto size = frameHolder->size();
911     if (m_videoSourceOrientation.usesWidthAsHeight())
912         size = size.transposedSize();
913
914     if (!m_videoTextureCopier)
915         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
916
917     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
918 }
919
920 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
921 {
922 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
923     if (m_usingFallbackVideoSink)
924         return nullptr;
925
926     auto sampleLocker = holdLock(m_sampleMutex);
927
928     if (!GST_IS_SAMPLE(m_sample.get()))
929         return nullptr;
930
931     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), true);
932
933     auto textureID = frameHolder->textureID();
934     if (!textureID)
935         return nullptr;
936
937     auto size = frameHolder->size();
938     if (m_videoSourceOrientation.usesWidthAsHeight())
939         size = size.transposedSize();
940
941     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
942     context->makeContextCurrent();
943
944     if (!m_videoTextureCopier)
945         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
946
947     if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation))
948         return nullptr;
949
950     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
951 #else
952     return nullptr;
953 #endif
954 }
955 #endif // USE(GSTREAMER_GL)
956
957 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
958 {
959     if (m_videoSourceOrientation == orientation)
960         return;
961
962     m_videoSourceOrientation = orientation;
963 }
964
965 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
966 {
967     return true;
968 }
969
970 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
971 {
972     if (m_readyState == MediaPlayer::HaveNothing)
973         return MediaPlayer::Unknown;
974
975     if (isLiveStream())
976         return MediaPlayer::LiveStream;
977
978     return MediaPlayer::Download;
979 }
980
981 #if USE(GSTREAMER_GL)
982 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
983 {
984     if (!webkitGstCheckVersion(1, 8, 0))
985         return nullptr;
986
987     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
988     if (!appsink)
989         return nullptr;
990
991     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
992     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
993     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
994
995     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
996     gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
997         // In some platforms (e.g. OpenMAX on the Raspberry Pi) when a resolution change occurs the
998         // pipeline has to be drained before a frame with the new resolution can be decoded.
999         // In this context, it's important that we don't hold references to any previous frame
1000         // (e.g. m_sample) so that decoding can continue.
1001         // We are also not supposed to keep the original frame after a flush.
1002         if (info->type & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
1003             if (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info)) != GST_QUERY_DRAIN)
1004                 return GST_PAD_PROBE_OK;
1005             GST_DEBUG("Acting upon DRAIN query");
1006         }
1007         if (info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) {
1008             if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) != GST_EVENT_FLUSH_START)
1009                 return GST_PAD_PROBE_OK;
1010             GST_DEBUG("Acting upon flush-start event");
1011         }
1012
1013         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
1014         player->flushCurrentBuffer();
1015         return GST_PAD_PROBE_OK;
1016     }, this, nullptr);
1017
1018     return appsink;
1019 }
1020
1021 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1022 {
1023     if (!webkitGstCheckVersion(1, 8, 0))
1024         return nullptr;
1025
1026     gboolean result = TRUE;
1027     GstElement* videoSink = gst_bin_new(nullptr);
1028     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1029     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1030     GstElement* appsink = createGLAppSink();
1031
1032     if (!appsink || !upload || !colorconvert) {
1033         GST_WARNING("Failed to create GstGL elements");
1034         gst_object_unref(videoSink);
1035
1036         if (upload)
1037             gst_object_unref(upload);
1038         if (colorconvert)
1039             gst_object_unref(colorconvert);
1040         if (appsink)
1041             gst_object_unref(appsink);
1042
1043         g_warning("WebKit wasn't able to find the GStreamer opengl plugin. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin.");
1044         return nullptr;
1045     }
1046
1047     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1048
1049     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
1050
1051     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1052     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1053
1054     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1055     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1056
1057     if (!result) {
1058         GST_WARNING("Failed to link GstGL elements");
1059         gst_object_unref(videoSink);
1060         videoSink = nullptr;
1061     }
1062     return videoSink;
1063 }
1064
1065 void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext()
1066 {
1067     if (!m_glDisplayElementContext)
1068         m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE));
1069
1070     if (m_glDisplayElementContext)
1071         gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get());
1072
1073     if (!m_glAppElementContext)
1074         m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context"));
1075
1076     if (m_glAppElementContext)
1077         gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get());
1078 }
1079 #endif // USE(GSTREAMER_GL)
1080
1081 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1082 {
1083     acceleratedRenderingStateChanged();
1084
1085 #if USE(GSTREAMER_GL)
1086     if (m_renderingCanBeAccelerated)
1087         m_videoSink = createVideoSinkGL();
1088 #endif
1089
1090     if (!m_videoSink) {
1091         m_usingFallbackVideoSink = true;
1092         m_videoSink = webkitVideoSinkNew();
1093         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1094         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
1095     }
1096
1097     GstElement* videoSink = nullptr;
1098 #if ENABLE(MEDIA_STATISTICS)
1099     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1100     if (m_fpsSink) {
1101         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1102
1103         // Turn off text overlay unless tracing is enabled.
1104         if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
1105             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1106
1107         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1108             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1109             videoSink = m_fpsSink.get();
1110         } else
1111             m_fpsSink = nullptr;
1112     }
1113 #endif
1114
1115     if (!m_fpsSink)
1116         videoSink = m_videoSink.get();
1117
1118     ASSERT(videoSink);
1119
1120     return videoSink;
1121 }
1122
1123 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1124 {
1125     ASSERT(!m_volumeElement);
1126     m_volumeElement = volume;
1127
1128     // We don't set the initial volume because we trust the sink to keep it for us. See
1129     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1130     if (!m_player->platformVolumeConfigurationRequired()) {
1131         GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f", m_player->volume());
1132         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
1133     } else
1134         GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one");
1135
1136     GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s", toString(m_player->muted()).utf8().data());
1137     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1138
1139     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1140     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1141 }
1142
1143 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1144 {
1145     guint64 decodedFrames = 0;
1146     if (m_fpsSink)
1147         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1148     return static_cast<unsigned>(decodedFrames);
1149 }
1150
1151 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1152 {
1153     guint64 framesDropped = 0;
1154     if (m_fpsSink)
1155         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1156     return static_cast<unsigned>(framesDropped);
1157 }
1158
1159 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1160 {
1161     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1162     gint64 position = 0;
1163
1164     if (audioSink() && gst_element_query(audioSink(), query))
1165         gst_query_parse_position(query, 0, &position);
1166
1167     gst_query_unref(query);
1168     return static_cast<unsigned>(position);
1169 }
1170
1171 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1172 {
1173     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1174     gint64 position = 0;
1175
1176     if (gst_element_query(m_videoSink.get(), query))
1177         gst_query_parse_position(query, 0, &position);
1178
1179     gst_query_unref(query);
1180     return static_cast<unsigned>(position);
1181 }
1182
1183 #if ENABLE(ENCRYPTED_MEDIA)
1184 void MediaPlayerPrivateGStreamerBase::initializationDataEncountered(InitData&& initData)
1185 {
1186     ASSERT(!isMainThread());
1187
1188     RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), initData = WTFMove(initData)] {
1189         if (!weakThis)
1190             return;
1191
1192         GST_DEBUG("scheduling initializationDataEncountered event of size %lu", initData.payload()->size());
1193         GST_MEMDUMP("init datas", reinterpret_cast<const uint8_t*>(initData.payload()->data()), initData.payload()->size());
1194         weakThis->m_player->initializationDataEncountered(initData.payloadContainerType(), initData.payload()->tryCreateArrayBuffer());
1195     });
1196 }
1197
1198 void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance)
1199 {
1200     ASSERT(isMainThread());
1201
1202     if (m_cdmInstance == &instance)
1203         return;
1204
1205     if (!m_pipeline) {
1206         GST_ERROR("no pipeline yet");
1207         ASSERT_NOT_REACHED();
1208         return;
1209     }
1210
1211     m_cdmInstance = &instance;
1212
1213     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1214     GstStructure* contextStructure = gst_context_writable_structure(context.get());
1215     gst_structure_set(contextStructure, "cdm-instance", G_TYPE_POINTER, m_cdmInstance.get(), nullptr);
1216     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1217
1218     GST_DEBUG_OBJECT(m_pipeline.get(), "CDM instance %p dispatched as context", m_cdmInstance.get());
1219
1220     m_protectionCondition.notifyAll();
1221 }
1222
1223 void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance)
1224 {
1225     ASSERT(isMainThread());
1226
1227     if (m_cdmInstance != &instance) {
1228         GST_WARNING("passed CDMInstance %p is different from stored one %p", &instance, m_cdmInstance.get());
1229         ASSERT_NOT_REACHED();
1230         return;
1231     }
1232
1233     ASSERT(m_pipeline);
1234
1235     GST_DEBUG_OBJECT(m_pipeline.get(), "detaching CDM instance %p, setting empty context", m_cdmInstance.get());
1236     m_cdmInstance = nullptr;
1237
1238     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1239     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1240
1241     m_protectionCondition.notifyAll();
1242 }
1243
1244 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance)
1245 {
1246     ASSERT(m_cdmInstance.get() == &instance);
1247     GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
1248     attemptToDecryptWithLocalInstance();
1249 }
1250
1251 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
1252 {
1253     bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt")));
1254     GST_DEBUG("attempting to decrypt, event handled %s", boolForPrinting(eventHandled));
1255 }
1256
1257 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1258 {
1259     if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1260         GST_DEBUG_OBJECT(pipeline(), "event %u already handled", GST_EVENT_SEQNUM(event));
1261         return;
1262     }
1263     GST_DEBUG_OBJECT(pipeline(), "handling event %u from MSE", GST_EVENT_SEQNUM(event));
1264     const char* eventKeySystemUUID = nullptr;
1265     GstBuffer* initData = nullptr;
1266     gst_event_parse_protection(event, &eventKeySystemUUID, &initData, nullptr);
1267     initializationDataEncountered({eventKeySystemUUID, initData});
1268 }
1269
1270 void MediaPlayerPrivateGStreamerBase::setWaitingForKey(bool waitingForKey)
1271 {
1272     // We bail out if values did not change or if we are requested to not wait anymore but there are still waiting decryptors.
1273     GST_TRACE("waitingForKey %s, m_waitingForKey %s", boolForPrinting(waitingForKey), boolForPrinting(m_waitingForKey));
1274     if (waitingForKey == m_waitingForKey || (!waitingForKey && this->waitingForKey()))
1275         return;
1276
1277     m_waitingForKey = waitingForKey;
1278     GST_DEBUG("waiting for key changed %s", boolForPrinting(m_waitingForKey));
1279     m_player->waitingForKeyChanged();
1280 }
1281
1282 bool MediaPlayerPrivateGStreamerBase::waitingForKey() const
1283 {
1284     if (!m_pipeline)
1285         return false;
1286
1287     GstState state;
1288     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1289
1290     bool result = false;
1291     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_custom(GST_QUERY_CUSTOM, gst_structure_new_empty("any-decryptor-waiting-for-key")));
1292     if (state >= GST_STATE_PAUSED) {
1293         result = gst_element_query(m_pipeline.get(), query.get());
1294         GST_TRACE("query result %s, on %s", boolForPrinting(result), gst_element_state_get_name(state));
1295     } else if (state >= GST_STATE_READY) {
1296         // Running a query in the pipeline is easier but it only works when the pipeline is set up and running, otherwise we need to inspect it and ask the decryptors directly.
1297         GUniquePtr<GstIterator> iterator(gst_bin_iterate_recurse(GST_BIN(m_pipeline.get())));
1298         GstIteratorResult iteratorResult;
1299         do {
1300             iteratorResult = gst_iterator_fold(iterator.get(), [](const GValue *item, GValue *, gpointer data) -> gboolean {
1301                 GstElement* element = GST_ELEMENT(g_value_get_object(item));
1302                 GstQuery* query = GST_QUERY(data);
1303                 return !WEBKIT_IS_MEDIA_CENC_DECRYPT(element) || !gst_element_query(element, query);
1304             }, nullptr, query.get());
1305             if (iteratorResult == GST_ITERATOR_RESYNC)
1306                 gst_iterator_resync(iterator.get());
1307         } while (iteratorResult == GST_ITERATOR_RESYNC);
1308         if (iteratorResult == GST_ITERATOR_ERROR)
1309             GST_WARNING("iterator returned an error");
1310         result = iteratorResult == GST_ITERATOR_OK;
1311         GST_TRACE("iterator result %d, waiting %s", iteratorResult, boolForPrinting(result));
1312     }
1313
1314     return result;
1315 }
1316 #endif
1317
1318 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1319 {
1320     bool result = false;
1321
1322 #if ENABLE(ENCRYPTED_MEDIA)
1323     result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
1324 #endif
1325
1326     GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
1327     return result;
1328 }
1329
1330 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1331 {
1332     UNUSED_PARAM(parameters);
1333     return result;
1334 }
1335
1336 }
1337
1338 #endif // USE(GSTREAMER)