[GStreamer][EME] waitingforkey event should consider decryptors' waiting status
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerCommon.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsContext3D.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
35 #include "IntRect.h"
36 #include "Logging.h"
37 #include "MediaPlayer.h"
38 #include "NotImplemented.h"
39 #include "VideoSinkGStreamer.h"
40 #include <wtf/glib/GUniquePtr.h>
41 #include <wtf/text/AtomicString.h>
42 #include <wtf/text/CString.h>
43 #include <wtf/MathExtras.h>
44 #include <wtf/StringPrintStream.h>
45
46 #include <gst/audio/streamvolume.h>
47 #include <gst/video/gstvideometa.h>
48
49 #if ENABLE(ENCRYPTED_MEDIA)
50 #include "CDMInstance.h"
51 #include "GStreamerEMEUtilities.h"
52 #include "SharedBuffer.h"
53 #include "WebKitCommonEncryptionDecryptorGStreamer.h"
54 #endif
55
56 #if USE(GSTREAMER_GL)
57 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
58 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
59 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
60 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
61 #else
62 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
63 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
64 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
65 #endif
66
67 #include <gst/app/gstappsink.h>
68
69 #if USE(LIBEPOXY)
70 // Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
71 #include <epoxy/gl.h>
72
73 // Workaround build issue with RPi userland GLESv2 headers and libepoxy <https://webkit.org/b/185639>
74 #if !GST_CHECK_VERSION(1, 14, 0)
75 #include <gst/gl/gstglconfig.h>
76 #if defined(GST_GL_HAVE_WINDOW_DISPMANX) && GST_GL_HAVE_WINDOW_DISPMANX
77 #define __gl2_h_
78 #undef GST_GL_HAVE_GLSYNC
79 #define GST_GL_HAVE_GLSYNC 1
80 #endif
81 #endif // !GST_CHECK_VERSION(1, 14, 0)
82 #endif // USE(LIBEPOXY)
83
84 #define GST_USE_UNSTABLE_API
85 #include <gst/gl/gl.h>
86 #undef GST_USE_UNSTABLE_API
87
88 #include "GLContext.h"
89 #if USE(GLX)
90 #include "GLContextGLX.h"
91 #include <gst/gl/x11/gstgldisplay_x11.h>
92 #endif
93
94 #if USE(EGL)
95 #include "GLContextEGL.h"
96 #include <gst/gl/egl/gstgldisplay_egl.h>
97 #endif
98
99 #if PLATFORM(X11)
100 #include "PlatformDisplayX11.h"
101 #endif
102
103 #if PLATFORM(WAYLAND)
104 #include "PlatformDisplayWayland.h"
105 #elif PLATFORM(WPE)
106 #include "PlatformDisplayWPE.h"
107 #endif
108
109 // gstglapi.h may include eglplatform.h and it includes X.h, which
110 // defines None, breaking MediaPlayer::None enum
111 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
112 #undef None
113 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
114 #include "VideoTextureCopierGStreamer.h"
115 #endif // USE(GSTREAMER_GL)
116
117 #if USE(TEXTURE_MAPPER_GL)
118 #include "BitmapTextureGL.h"
119 #include "BitmapTexturePool.h"
120 #include "TextureMapperContextAttributes.h"
121 #include "TextureMapperGL.h"
122 #include "TextureMapperPlatformLayerBuffer.h"
123 #include "TextureMapperPlatformLayerProxy.h"
124 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
125 #include <cairo-gl.h>
126 #endif
127 #endif // USE(TEXTURE_MAPPER_GL)
128
129 GST_DEBUG_CATEGORY(webkit_media_player_debug);
130 #define GST_CAT_DEFAULT webkit_media_player_debug
131
132
133 namespace WebCore {
134 using namespace std;
135
136 static int greatestCommonDivisor(int a, int b)
137 {
138     while (b) {
139         int temp = a;
140         a = b;
141         b = temp % b;
142     }
143
144     return ABS(a);
145 }
146
147 #if USE(TEXTURE_MAPPER_GL)
148 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
149 {
150     switch (orientation) {
151     case DefaultImageOrientation:
152         return 0;
153     case OriginRightTop:
154         return TextureMapperGL::ShouldRotateTexture90;
155     case OriginBottomRight:
156         return TextureMapperGL::ShouldRotateTexture180;
157     case OriginLeftBottom:
158         return TextureMapperGL::ShouldRotateTexture270;
159     default:
160         ASSERT_NOT_REACHED();
161     }
162
163     return 0;
164 }
165 #endif
166
167 #if USE(TEXTURE_MAPPER_GL)
168 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
169 public:
170     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags, bool gstGLEnabled)
171     {
172         GstVideoInfo videoInfo;
173         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
174             return;
175
176         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
177         m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
178         m_buffer = gst_sample_get_buffer(sample);
179         if (UNLIKELY(!GST_IS_BUFFER(m_buffer)))
180             return;
181
182 #if USE(GSTREAMER_GL)
183         m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0) | TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
184
185         if (gstGLEnabled) {
186             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL));
187             if (m_isMapped)
188                 m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
189         } else
190 #endif // USE(GSTREAMER_GL)
191
192         {
193             m_textureID = 0;
194             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, GST_MAP_READ);
195             if (m_isMapped) {
196                 // Right now the TextureMapper only supports chromas with one plane
197                 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
198             }
199         }
200     }
201
202     virtual ~GstVideoFrameHolder()
203     {
204         if (UNLIKELY(!m_isMapped))
205             return;
206
207         gst_video_frame_unmap(&m_videoFrame);
208     }
209
210     const IntSize& size() const { return m_size; }
211     bool hasAlphaChannel() const { return m_hasAlphaChannel; }
212     TextureMapperGL::Flags flags() const { return m_flags; }
213     GLuint textureID() const { return m_textureID; }
214
215     void updateTexture(BitmapTextureGL& texture)
216     {
217         ASSERT(!m_textureID);
218         GstVideoGLTextureUploadMeta* meta;
219         if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) {
220             if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
221                 guint ids[4] = { texture.id(), 0, 0, 0 };
222
223                 if (gst_video_gl_texture_upload_meta_upload(meta, ids))
224                     return;
225             }
226         }
227
228         int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
229         const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0);
230         texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride);
231     }
232
233 private:
234     GstBuffer* m_buffer;
235     GstVideoFrame m_videoFrame { };
236     IntSize m_size;
237     bool m_hasAlphaChannel;
238     TextureMapperGL::Flags m_flags { };
239     GLuint m_textureID { 0 };
240     bool m_isMapped { false };
241 };
242 #endif
243
244 void MediaPlayerPrivateGStreamerBase::initializeDebugCategory()
245 {
246     GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
247 }
248
249 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
250     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
251     , m_player(player)
252     , m_fpsSink(nullptr)
253     , m_readyState(MediaPlayer::HaveNothing)
254     , m_networkState(MediaPlayer::Empty)
255     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
256 #if USE(TEXTURE_MAPPER_GL)
257 #if USE(NICOSIA)
258     , m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this)))
259 #else
260     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
261 #endif
262 #endif
263 {
264 }
265
266 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
267 {
268 #if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
269     downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
270 #endif
271
272 #if ENABLE(ENCRYPTED_MEDIA)
273     m_protectionCondition.notifyAll();
274 #endif
275     m_notifier->invalidate();
276
277     if (m_videoSink) {
278         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
279 #if USE(GSTREAMER_GL)
280         if (GST_IS_BIN(m_videoSink.get())) {
281             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
282             g_signal_handlers_disconnect_by_data(appsink.get(), this);
283         }
284 #endif
285     }
286
287     if (m_volumeElement)
288         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
289
290     // This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
291     // waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
292     cancelRepaint(true);
293
294     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
295     // about handlers running in the GStreamer thread.
296     if (m_pipeline)
297         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
298
299     m_player = nullptr;
300 }
301
302 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
303 {
304     m_pipeline = pipeline;
305
306     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
307     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
308         auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
309
310         if (player.handleSyncMessage(message)) {
311             gst_message_unref(message);
312             return GST_BUS_DROP;
313         }
314
315         return GST_BUS_PASS;
316     }, this, nullptr);
317 }
318
319 #if ENABLE(ENCRYPTED_MEDIA)
320 static std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> extractEventsAndSystemsFromMessage(GstMessage* message)
321 {
322     const GstStructure* structure = gst_message_get_structure(message);
323
324     const GValue* streamEncryptionEventsList = gst_structure_get_value(structure, "stream-encryption-events");
325     ASSERT(streamEncryptionEventsList && GST_VALUE_HOLDS_LIST(streamEncryptionEventsList));
326     unsigned streamEncryptionEventsListSize = gst_value_list_get_size(streamEncryptionEventsList);
327     Vector<GRefPtr<GstEvent>> streamEncryptionEventsVector;
328
329     unsigned i;
330     for (i = 0; i < streamEncryptionEventsListSize; ++i)
331         streamEncryptionEventsVector.append(GRefPtr<GstEvent>(static_cast<GstEvent*>(g_value_get_boxed(gst_value_list_get_value(streamEncryptionEventsList, i)))));
332
333     Vector<String> streamEncryptionAllowedSystemsVector;
334     const GValue* streamEncryptionAllowedSystemsValue = gst_structure_get_value(structure, "available-stream-encryption-systems");
335     const char** streamEncryptionAllowedSystems = reinterpret_cast<const char**>(g_value_get_boxed(streamEncryptionAllowedSystemsValue));
336     if (streamEncryptionAllowedSystems) {
337         for (i = 0; streamEncryptionAllowedSystems[i]; ++i)
338             streamEncryptionAllowedSystemsVector.append(streamEncryptionAllowedSystems[i]);
339     }
340
341     return std::make_pair(streamEncryptionEventsVector, streamEncryptionAllowedSystemsVector);
342 }
343 #endif
344
345 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
346 {
347     UNUSED_PARAM(message);
348     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
349         return false;
350
351     const gchar* contextType;
352     gst_message_parse_context_type(message, &contextType);
353     GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
354
355 #if USE(GSTREAMER_GL)
356     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType));
357     if (elementContext) {
358         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
359         return true;
360     }
361 #endif // USE(GSTREAMER_GL)
362
363 #if ENABLE(ENCRYPTED_MEDIA)
364     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
365         if (isMainThread()) {
366             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
367             ASSERT_NOT_REACHED();
368             return false;
369         }
370         GST_DEBUG_OBJECT(pipeline(), "handling drm-preferred-decryption-system-id need context message");
371         LockHolder lock(m_protectionMutex);
372         std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> streamEncryptionInformation = extractEventsAndSystemsFromMessage(message);
373         GST_TRACE("found %" G_GSIZE_FORMAT " protection events", streamEncryptionInformation.first.size());
374         Vector<uint8_t> concatenatedInitDataChunks;
375         unsigned concatenatedInitDataChunksNumber = 0;
376         String eventKeySystemIdString;
377
378         for (auto& event : streamEncryptionInformation.first) {
379             GST_TRACE("handling protection event %u", GST_EVENT_SEQNUM(event.get()));
380             const char* eventKeySystemId = nullptr;
381             GstBuffer* data = nullptr;
382             gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
383
384             GstMappedBuffer dataMapped(data, GST_MAP_READ);
385             if (!dataMapped) {
386                 GST_WARNING("cannot map %s protection data", eventKeySystemId);
387                 break;
388             }
389             GST_TRACE("appending init data for %s of size %" G_GSIZE_FORMAT, eventKeySystemId, dataMapped.size());
390             GST_MEMDUMP("init data", reinterpret_cast<const unsigned char*>(dataMapped.data()), dataMapped.size());
391             concatenatedInitDataChunks.append(dataMapped.data(), dataMapped.size());
392             ++concatenatedInitDataChunksNumber;
393             eventKeySystemIdString = eventKeySystemId;
394             if (streamEncryptionInformation.second.contains(eventKeySystemId)) {
395                 GST_TRACE("considering init data handled for %s", eventKeySystemId);
396                 m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
397             }
398         }
399
400         if (!concatenatedInitDataChunksNumber)
401             return false;
402
403         if (concatenatedInitDataChunksNumber > 1)
404             eventKeySystemIdString = emptyString();
405
406         RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), eventKeySystemIdString, initData = WTFMove(concatenatedInitDataChunks)] {
407             if (!weakThis)
408                 return;
409
410             GST_DEBUG_OBJECT(weakThis->pipeline(), "scheduling initializationDataEncountered event for %s with concatenated init datas size of %" G_GSIZE_FORMAT, eventKeySystemIdString.utf8().data(), initData.size());
411             GST_MEMDUMP("init datas", initData.data(), initData.size());
412             weakThis->m_player->initializationDataEncountered("cenc"_s, ArrayBuffer::create(initData.data(), initData.size()));
413         });
414
415         GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance");
416         m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
417             return this->m_cdmInstance;
418         });
419         if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) {
420             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
421             GST_INFO_OBJECT(pipeline(), "working with %s, continuing with %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
422
423             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
424             GstStructure* contextStructure = gst_context_writable_structure(context.get());
425             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
426             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
427         } else
428             GST_WARNING("no proper CDM instance attached");
429
430         return true;
431     }
432 #endif // ENABLE(ENCRYPTED_MEDIA)
433
434     return false;
435 }
436
437 #if USE(GSTREAMER_GL)
438 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType)
439 {
440     if (!ensureGstGLContext())
441         return nullptr;
442
443     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
444         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
445         gst_context_set_gl_display(displayContext, gstGLDisplay());
446         return displayContext;
447     }
448
449     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
450         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
451         GstStructure* structure = gst_context_writable_structure(appContext);
452 #if GST_CHECK_VERSION(1, 11, 0)
453         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr);
454 #else
455         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr);
456 #endif
457         return appContext;
458     }
459
460     return nullptr;
461 }
462
463 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
464 {
465     if (m_glContext)
466         return true;
467
468     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
469
470     // The floating ref removal support was added in https://bugzilla.gnome.org/show_bug.cgi?id=743062.
471     bool shouldAdoptRef = webkitGstCheckVersion(1, 13, 1);
472     if (!m_glDisplay) {
473 #if PLATFORM(X11)
474 #if USE(GLX)
475         if (is<PlatformDisplayX11>(sharedDisplay)) {
476             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared GL display");
477             if (shouldAdoptRef)
478                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())));
479             else
480                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
481         }
482 #elif USE(EGL)
483         if (is<PlatformDisplayX11>(sharedDisplay)) {
484             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared EGL display");
485             if (shouldAdoptRef)
486                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())));
487             else
488                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
489         }
490 #endif
491 #endif
492
493 #if PLATFORM(WAYLAND)
494         if (is<PlatformDisplayWayland>(sharedDisplay)) {
495             GST_DEBUG_OBJECT(pipeline(), "Creating Wayland shared display");
496             if (shouldAdoptRef)
497                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())));
498             else
499                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
500         }
501 #endif
502
503 #if PLATFORM(WPE)
504         ASSERT(is<PlatformDisplayWPE>(sharedDisplay));
505         GST_DEBUG_OBJECT(pipeline(), "Creating WPE shared EGL display");
506         if (shouldAdoptRef)
507             m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWPE>(sharedDisplay).eglDisplay())));
508         else
509             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWPE>(sharedDisplay).eglDisplay()));
510 #endif
511
512         ASSERT(m_glDisplay);
513     }
514
515     GLContext* webkitContext = sharedDisplay.sharingGLContext();
516     // EGL and GLX are mutually exclusive, no need for ifdefs here.
517     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
518
519 #if USE(OPENGL_ES)
520     GstGLAPI glAPI = GST_GL_API_GLES2;
521 #elif USE(OPENGL)
522     GstGLAPI glAPI = GST_GL_API_OPENGL;
523 #else
524     ASSERT_NOT_REACHED();
525 #endif
526
527     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
528     if (!contextHandle)
529         return false;
530
531     if (shouldAdoptRef)
532         m_glContext = adoptGRef(gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
533     else
534         m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
535
536     return true;
537 }
538 #endif // USE(GSTREAMER_GL)
539
540 // Returns the size of the video
541 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
542 {
543     if (!hasVideo())
544         return FloatSize();
545
546     if (!m_videoSize.isEmpty())
547         return m_videoSize;
548
549     auto sampleLocker = holdLock(m_sampleMutex);
550     if (!GST_IS_SAMPLE(m_sample.get()))
551         return FloatSize();
552
553     GstCaps* caps = gst_sample_get_caps(m_sample.get());
554     if (!caps)
555         return FloatSize();
556
557
558     // TODO: handle possible clean aperture data. See
559     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
560     // TODO: handle possible transformation matrix. See
561     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
562
563     // Get the video PAR and original size, if this fails the
564     // video-sink has likely not yet negotiated its caps.
565     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
566     IntSize originalSize;
567     GstVideoFormat format;
568     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
569         return FloatSize();
570
571 #if USE(TEXTURE_MAPPER_GL)
572     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
573     if (m_renderingCanBeAccelerated) {
574         if (m_videoSourceOrientation.usesWidthAsHeight())
575             originalSize = originalSize.transposedSize();
576     }
577 #endif
578
579     GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
580     GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
581
582     // Calculate DAR based on PAR and video size.
583     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
584     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
585
586     // Divide display width and height by their GCD to avoid possible overflows.
587     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
588     displayWidth /= displayAspectRatioGCD;
589     displayHeight /= displayAspectRatioGCD;
590
591     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
592     guint64 width = 0, height = 0;
593     if (!(originalSize.height() % displayHeight)) {
594         GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
595         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
596         height = static_cast<guint64>(originalSize.height());
597     } else if (!(originalSize.width() % displayWidth)) {
598         GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
599         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
600         width = static_cast<guint64>(originalSize.width());
601     } else {
602         GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
603         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
604         height = static_cast<guint64>(originalSize.height());
605     }
606
607     GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
608     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
609     return m_videoSize;
610 }
611
612 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
613 {
614     if (!m_volumeElement)
615         return;
616
617     GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
618     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
619 }
620
621 float MediaPlayerPrivateGStreamerBase::volume() const
622 {
623     if (!m_volumeElement)
624         return 0;
625
626     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
627 }
628
629
630 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
631 {
632     if (!m_player || !m_volumeElement)
633         return;
634     double volume;
635     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
636     // get_volume() can return values superior to 1.0 if the user
637     // applies software user gain via third party application (GNOME
638     // volume control for instance).
639     volume = CLAMP(volume, 0.0, 1.0);
640     m_player->volumeChanged(static_cast<float>(volume));
641 }
642
643 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
644 {
645     // This is called when m_volumeElement receives the notify::volume signal.
646     GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
647
648     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] {
649         player->notifyPlayerOfVolumeChange();
650     });
651 }
652
653 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
654 {
655     return m_networkState;
656 }
657
658 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
659 {
660     return m_readyState;
661 }
662
663 void MediaPlayerPrivateGStreamerBase::sizeChanged()
664 {
665     notImplemented();
666 }
667
668 void MediaPlayerPrivateGStreamerBase::setMuted(bool mute)
669 {
670     if (!m_volumeElement)
671         return;
672
673     bool currentValue = muted();
674     if (currentValue == mute)
675         return;
676
677     GST_INFO_OBJECT(pipeline(), "Set muted to %s", toString(mute).utf8().data());
678     g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
679 }
680
681 bool MediaPlayerPrivateGStreamerBase::muted() const
682 {
683     if (!m_volumeElement)
684         return false;
685
686     gboolean muted;
687     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
688     GST_INFO_OBJECT(pipeline(), "Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
689     return muted;
690 }
691
692 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
693 {
694     if (!m_player || !m_volumeElement)
695         return;
696
697     gboolean muted;
698     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
699     m_player->muteChanged(static_cast<bool>(muted));
700 }
701
702 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
703 {
704     // This is called when m_volumeElement receives the notify::mute signal.
705     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] {
706         player->notifyPlayerOfMute();
707     });
708 }
709
710 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
711 {
712     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
713 }
714
715 #if USE(TEXTURE_MAPPER_GL)
716 PlatformLayer* MediaPlayerPrivateGStreamerBase::platformLayer() const
717 {
718 #if USE(NICOSIA)
719     return m_nicosiaLayer.ptr();
720 #else
721     return const_cast<MediaPlayerPrivateGStreamerBase*>(this);
722 #endif
723 }
724
725 #if USE(NICOSIA)
726 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
727 {
728 }
729 #else
730 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
731 {
732     return m_platformLayerProxy.copyRef();
733 }
734
735 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
736 {
737 }
738 #endif
739
740 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
741 {
742     auto sampleLocker = holdLock(m_sampleMutex);
743     if (!GST_IS_SAMPLE(m_sample.get()))
744         return;
745
746     auto proxyOperation =
747         [this](TextureMapperPlatformLayerProxy& proxy)
748         {
749             LockHolder holder(proxy.lock());
750
751             if (!proxy.isActive())
752                 return;
753
754             std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), !m_usingFallbackVideoSink);
755
756             GLuint textureID = frameHolder->textureID();
757             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
758             if (textureID) {
759                 layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(textureID, frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
760                 layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
761             } else {
762                 layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE);
763                 if (UNLIKELY(!layerBuffer)) {
764                     auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
765                     texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
766                     layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
767                 }
768                 frameHolder->updateTexture(layerBuffer->textureGL());
769                 layerBuffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0));
770             }
771             proxy.pushNextBuffer(WTFMove(layerBuffer));
772         };
773
774 #if USE(NICOSIA)
775     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
776 #else
777     proxyOperation(*m_platformLayerProxy);
778 #endif
779 }
780 #endif // USE(TEXTURE_MAPPER_GL)
781
782 void MediaPlayerPrivateGStreamerBase::repaint()
783 {
784     ASSERT(m_sample);
785     ASSERT(isMainThread());
786
787     m_player->repaint();
788
789     LockHolder lock(m_drawMutex);
790     m_drawCondition.notifyOne();
791 }
792
793 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
794 {
795     bool triggerResize;
796     {
797         auto sampleLocker = holdLock(m_sampleMutex);
798         triggerResize = !m_sample;
799         m_sample = sample;
800     }
801
802     if (triggerResize) {
803         GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
804         m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
805             m_player->sizeChanged();
806         });
807     }
808
809     if (!m_renderingCanBeAccelerated) {
810         LockHolder locker(m_drawMutex);
811         if (m_destroying)
812             return;
813         m_drawTimer.startOneShot(0_s);
814         m_drawCondition.wait(m_drawMutex);
815         return;
816     }
817
818 #if USE(TEXTURE_MAPPER_GL)
819     if (m_usingFallbackVideoSink) {
820         LockHolder lock(m_drawMutex);
821         auto proxyOperation =
822             [this](TextureMapperPlatformLayerProxy& proxy)
823             {
824                 return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); });
825             };
826 #if USE(NICOSIA)
827         if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()))
828             return;
829 #else
830         if (!proxyOperation(*m_platformLayerProxy))
831             return;
832 #endif
833         m_drawTimer.startOneShot(0_s);
834         m_drawCondition.wait(m_drawMutex);
835     } else
836         pushTextureToCompositor();
837 #endif // USE(TEXTURE_MAPPER_GL)
838 }
839
840 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
841 {
842     player->triggerRepaint(sample);
843 }
844
845 void MediaPlayerPrivateGStreamerBase::cancelRepaint(bool destroying)
846 {
847     // The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
848     // to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
849     // the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
850     // main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
851     // thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
852     //
853     // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
854     // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
855     if (!m_renderingCanBeAccelerated) {
856         LockHolder locker(m_drawMutex);
857         m_drawTimer.stop();
858         m_destroying = destroying;
859         m_drawCondition.notifyOne();
860     }
861 }
862
863 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
864 {
865     player->cancelRepaint();
866 }
867
868 #if USE(GSTREAMER_GL)
869 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
870 {
871     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
872     player->triggerRepaint(sample.get());
873     return GST_FLOW_OK;
874 }
875
876 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
877 {
878     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
879     player->triggerRepaint(sample.get());
880     return GST_FLOW_OK;
881 }
882
883 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
884 {
885     GST_DEBUG_OBJECT(pipeline(), "Flushing video sample");
886     auto sampleLocker = holdLock(m_sampleMutex);
887
888     if (m_sample) {
889         // Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions.
890         // This prevents resizing problems when the video changes its quality and a DRAIN is performed.
891         const GstStructure* info = gst_sample_get_info(m_sample.get());
892         m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
893             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
894     }
895
896     auto proxyOperation =
897         [](TextureMapperPlatformLayerProxy& proxy)
898         {
899             LockHolder locker(proxy.lock());
900
901             if (proxy.isActive())
902                 proxy.dropCurrentBufferWhilePreservingTexture();
903         };
904
905 #if USE(NICOSIA)
906     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
907 #else
908     proxyOperation(*m_platformLayerProxy);
909 #endif
910 }
911 #endif
912
913 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
914 {
915     m_size = size;
916 }
917
918 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
919 {
920     if (context.paintingDisabled())
921         return;
922
923     if (!m_player->visible())
924         return;
925
926     auto sampleLocker = holdLock(m_sampleMutex);
927     if (!GST_IS_SAMPLE(m_sample.get()))
928         return;
929
930     ImagePaintingOptions paintingOptions(CompositeCopy);
931     if (m_renderingCanBeAccelerated)
932         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
933
934     RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
935     if (!gstImage)
936         return;
937
938     context.drawImage(gstImage->image(), rect, gstImage->rect(), paintingOptions);
939 }
940
941 #if USE(GSTREAMER_GL)
942 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
943 {
944     UNUSED_PARAM(context);
945
946     if (m_usingFallbackVideoSink)
947         return false;
948
949     if (premultiplyAlpha)
950         return false;
951
952     auto sampleLocker = holdLock(m_sampleMutex);
953
954     if (!GST_IS_SAMPLE(m_sample.get()))
955         return false;
956
957     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), true);
958
959     auto textureID = frameHolder->textureID();
960     if (!textureID)
961         return false;
962
963     auto size = frameHolder->size();
964     if (m_videoSourceOrientation.usesWidthAsHeight())
965         size = size.transposedSize();
966
967     if (!m_videoTextureCopier)
968         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
969
970     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
971 }
972
973 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
974 {
975 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
976     if (m_usingFallbackVideoSink)
977         return nullptr;
978
979     auto sampleLocker = holdLock(m_sampleMutex);
980
981     if (!GST_IS_SAMPLE(m_sample.get()))
982         return nullptr;
983
984     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), true);
985
986     auto textureID = frameHolder->textureID();
987     if (!textureID)
988         return nullptr;
989
990     auto size = frameHolder->size();
991     if (m_videoSourceOrientation.usesWidthAsHeight())
992         size = size.transposedSize();
993
994     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
995     context->makeContextCurrent();
996
997     if (!m_videoTextureCopier)
998         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
999
1000     if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation))
1001         return nullptr;
1002
1003     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
1004 #else
1005     return nullptr;
1006 #endif
1007 }
1008 #endif // USE(GSTREAMER_GL)
1009
1010 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
1011 {
1012     if (m_videoSourceOrientation == orientation)
1013         return;
1014
1015     m_videoSourceOrientation = orientation;
1016 }
1017
1018 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
1019 {
1020     return true;
1021 }
1022
1023 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
1024 {
1025     if (m_readyState == MediaPlayer::HaveNothing)
1026         return MediaPlayer::Unknown;
1027
1028     if (isLiveStream())
1029         return MediaPlayer::LiveStream;
1030
1031     return MediaPlayer::Download;
1032 }
1033
1034 #if USE(GSTREAMER_GL)
1035 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
1036 {
1037     if (!webkitGstCheckVersion(1, 8, 0))
1038         return nullptr;
1039
1040     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
1041     if (!appsink)
1042         return nullptr;
1043
1044     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
1045     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
1046     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
1047
1048     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
1049     gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
1050         // In some platforms (e.g. OpenMAX on the Raspberry Pi) when a resolution change occurs the
1051         // pipeline has to be drained before a frame with the new resolution can be decoded.
1052         // In this context, it's important that we don't hold references to any previous frame
1053         // (e.g. m_sample) so that decoding can continue.
1054         // We are also not supposed to keep the original frame after a flush.
1055         if (info->type & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
1056             if (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info)) != GST_QUERY_DRAIN)
1057                 return GST_PAD_PROBE_OK;
1058             GST_DEBUG("Acting upon DRAIN query");
1059         }
1060         if (info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) {
1061             if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) != GST_EVENT_FLUSH_START)
1062                 return GST_PAD_PROBE_OK;
1063             GST_DEBUG("Acting upon flush-start event");
1064         }
1065
1066         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
1067         player->flushCurrentBuffer();
1068         return GST_PAD_PROBE_OK;
1069     }, this, nullptr);
1070
1071     return appsink;
1072 }
1073
1074 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1075 {
1076     if (!webkitGstCheckVersion(1, 8, 0))
1077         return nullptr;
1078
1079     gboolean result = TRUE;
1080     GstElement* videoSink = gst_bin_new(nullptr);
1081     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1082     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1083     GstElement* appsink = createGLAppSink();
1084
1085     if (!appsink || !upload || !colorconvert) {
1086         GST_WARNING("Failed to create GstGL elements");
1087         gst_object_unref(videoSink);
1088
1089         if (upload)
1090             gst_object_unref(upload);
1091         if (colorconvert)
1092             gst_object_unref(colorconvert);
1093         if (appsink)
1094             gst_object_unref(appsink);
1095
1096         g_warning("WebKit wasn't able to find the GStreamer opengl plugin. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin.");
1097         return nullptr;
1098     }
1099
1100     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1101
1102     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
1103
1104     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1105     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1106
1107     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1108     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1109
1110     if (!result) {
1111         GST_WARNING("Failed to link GstGL elements");
1112         gst_object_unref(videoSink);
1113         videoSink = nullptr;
1114     }
1115     return videoSink;
1116 }
1117
1118 void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext()
1119 {
1120     if (!m_glDisplayElementContext)
1121         m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE));
1122
1123     if (m_glDisplayElementContext)
1124         gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get());
1125
1126     if (!m_glAppElementContext)
1127         m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context"));
1128
1129     if (m_glAppElementContext)
1130         gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get());
1131 }
1132 #endif // USE(GSTREAMER_GL)
1133
1134 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1135 {
1136     acceleratedRenderingStateChanged();
1137
1138 #if USE(GSTREAMER_GL)
1139     if (m_renderingCanBeAccelerated)
1140         m_videoSink = createVideoSinkGL();
1141 #endif
1142
1143     if (!m_videoSink) {
1144         m_usingFallbackVideoSink = true;
1145         m_videoSink = webkitVideoSinkNew();
1146         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1147         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
1148     }
1149
1150     GstElement* videoSink = nullptr;
1151 #if ENABLE(MEDIA_STATISTICS)
1152     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1153     if (m_fpsSink) {
1154         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1155
1156         // Turn off text overlay unless tracing is enabled.
1157         if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
1158             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1159
1160         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1161             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1162             videoSink = m_fpsSink.get();
1163         } else
1164             m_fpsSink = nullptr;
1165     }
1166 #endif
1167
1168     if (!m_fpsSink)
1169         videoSink = m_videoSink.get();
1170
1171     ASSERT(videoSink);
1172
1173     return videoSink;
1174 }
1175
1176 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1177 {
1178     ASSERT(!m_volumeElement);
1179     m_volumeElement = volume;
1180
1181     // We don't set the initial volume because we trust the sink to keep it for us. See
1182     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1183     if (!m_player->platformVolumeConfigurationRequired()) {
1184         GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f", m_player->volume());
1185         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
1186     } else
1187         GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one");
1188
1189     GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s", toString(m_player->muted()).utf8().data());
1190     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1191
1192     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1193     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1194 }
1195
1196 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1197 {
1198     guint64 decodedFrames = 0;
1199     if (m_fpsSink)
1200         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1201     return static_cast<unsigned>(decodedFrames);
1202 }
1203
1204 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1205 {
1206     guint64 framesDropped = 0;
1207     if (m_fpsSink)
1208         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1209     return static_cast<unsigned>(framesDropped);
1210 }
1211
1212 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1213 {
1214     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1215     gint64 position = 0;
1216
1217     if (audioSink() && gst_element_query(audioSink(), query))
1218         gst_query_parse_position(query, 0, &position);
1219
1220     gst_query_unref(query);
1221     return static_cast<unsigned>(position);
1222 }
1223
1224 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1225 {
1226     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1227     gint64 position = 0;
1228
1229     if (gst_element_query(m_videoSink.get(), query))
1230         gst_query_parse_position(query, 0, &position);
1231
1232     gst_query_unref(query);
1233     return static_cast<unsigned>(position);
1234 }
1235
1236 #if ENABLE(ENCRYPTED_MEDIA)
1237 void MediaPlayerPrivateGStreamerBase::initializationDataEncountered(GstEvent* event)
1238 {
1239     const char* eventKeySystemUUID = nullptr;
1240     GstBuffer* data = nullptr;
1241     gst_event_parse_protection(event, &eventKeySystemUUID, &data, nullptr);
1242
1243     // Check if the system key of the protection event is the same of the CDM instance.
1244     // For example: we can receive a new Widevine protection event but the CDM instance initialized with
1245     // Playready, so we ignore this event.
1246     if (m_cdmInstance
1247 #if GST_CHECK_VERSION(1, 15, 0)
1248         && g_strcmp0(eventKeySystemUUID, GST_PROTECTION_UNSPECIFIED_SYSTEM_ID)
1249 #endif
1250         && g_strcmp0(GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem()), eventKeySystemUUID)) {
1251         GST_DEBUG("The protection event with UUID %s is ignored because it isn't supported by the CDM %s", eventKeySystemUUID, m_cdmInstance->keySystem().utf8().data());
1252         return;
1253     }
1254
1255     GstMappedBuffer dataMapped(data, GST_MAP_READ);
1256     if (!dataMapped) {
1257         GST_WARNING("cannot map %s protection data", eventKeySystemUUID);
1258         return;
1259     }
1260
1261     GST_TRACE("init data encountered for %s of size %" G_GSIZE_FORMAT, eventKeySystemUUID, dataMapped.size());
1262     GST_MEMDUMP("init data", reinterpret_cast<const uint8_t*>(dataMapped.data()), dataMapped.size());
1263     InitData initData(reinterpret_cast<const uint8_t*>(dataMapped.data()), dataMapped.size());
1264
1265     String eventKeySystemUUIDString = eventKeySystemUUID;
1266     RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), eventKeySystemUUID = eventKeySystemUUIDString, initData] {
1267         if (!weakThis)
1268             return;
1269
1270         GST_DEBUG("scheduling initializationDataEncountered event for %s with init data size of %u", eventKeySystemUUID.utf8().data(), initData.sizeInBytes());
1271         GST_MEMDUMP("init datas", reinterpret_cast<const uint8_t*>(initData.characters8()), initData.sizeInBytes());
1272         auto initDataType = "cenc"_s;
1273 #if GST_CHECK_VERSION(1, 15, 0)
1274         if (eventKeySystemUUID == GST_PROTECTION_UNSPECIFIED_SYSTEM_ID)
1275             initDataType = "webm"_s;
1276 #endif
1277         weakThis->m_player->initializationDataEncountered(initDataType,
1278             ArrayBuffer::create(reinterpret_cast<const uint8_t*>(initData.characters8()), initData.sizeInBytes()));
1279     });
1280 }
1281
1282 void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance)
1283 {
1284     if (m_cdmInstance != &instance) {
1285         m_cdmInstance = &instance;
1286         GST_DEBUG_OBJECT(pipeline(), "CDM instance %p set", m_cdmInstance.get());
1287         m_protectionCondition.notifyAll();
1288     }
1289 }
1290
1291 void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance)
1292 {
1293 #ifdef NDEBUG
1294     UNUSED_PARAM(instance);
1295 #endif
1296     if (m_cdmInstance == &instance) {
1297         GST_DEBUG_OBJECT(pipeline(), "detaching CDM instance %p", m_cdmInstance.get());
1298         m_cdmInstance = nullptr;
1299         m_protectionCondition.notifyAll();
1300     }
1301 }
1302
1303 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance)
1304 {
1305     ASSERT(m_cdmInstance.get() == &instance);
1306     GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
1307     attemptToDecryptWithLocalInstance();
1308 }
1309
1310 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
1311 {
1312     // FIXME.
1313 }
1314
1315 void MediaPlayerPrivateGStreamerBase::dispatchDecryptionKey(GstBuffer* buffer)
1316 {
1317     bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
1318         gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, buffer, nullptr)));
1319     m_needToResendCredentials = m_handledProtectionEvents.size() > 0;
1320     GST_TRACE("emitted decryption cipher key on pipeline, event handled %s, need to resend credentials %s", boolForPrinting(eventHandled), boolForPrinting(m_needToResendCredentials));
1321 }
1322
1323 void MediaPlayerPrivateGStreamerBase::dispatchCDMInstance()
1324 {
1325     // This function dispatches the CDMInstance in GStreamer playback pipeline.
1326     if (m_cdmInstance)
1327         m_player->attemptToDecryptWithInstance(const_cast<CDMInstance&>(*m_cdmInstance.get()));
1328 }
1329
1330 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1331 {
1332     if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1333         GST_DEBUG_OBJECT(pipeline(), "event %u already handled", GST_EVENT_SEQNUM(event));
1334         return;
1335     }
1336     initializationDataEncountered(event);
1337 }
1338
1339 void MediaPlayerPrivateGStreamerBase::setWaitingForKey(bool waitingForKey)
1340 {
1341     // We bail out if values did not change or if we are requested to not wait anymore but there are still waiting decryptors.
1342     GST_TRACE("waitingForKey %s, m_waitingForKey %s", boolForPrinting(waitingForKey), boolForPrinting(m_waitingForKey));
1343     if (waitingForKey == m_waitingForKey || (!waitingForKey && this->waitingForKey()))
1344         return;
1345
1346     m_waitingForKey = waitingForKey;
1347     GST_DEBUG("waiting for key changed %s", boolForPrinting(m_waitingForKey));
1348     m_player->waitingForKeyChanged();
1349 }
1350
1351 bool MediaPlayerPrivateGStreamerBase::waitingForKey() const
1352 {
1353     if (!m_pipeline)
1354         return false;
1355
1356     GstState state;
1357     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1358
1359     bool result = false;
1360     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_custom(GST_QUERY_CUSTOM, gst_structure_new_empty("any-decryptor-waiting-for-key")));
1361     if (state >= GST_STATE_PAUSED) {
1362         result = gst_element_query(m_pipeline.get(), query.get());
1363         GST_TRACE("query result %s, on %s", boolForPrinting(result), gst_element_state_get_name(state));
1364     } else if (state >= GST_STATE_READY) {
1365         // Running a query in the pipeline is easier but it only works when the pipeline is set up and running, otherwise we need to inspect it and ask the decryptors directly.
1366         GUniquePtr<GstIterator> iterator(gst_bin_iterate_recurse(GST_BIN(m_pipeline.get())));
1367         GstIteratorResult iteratorResult;
1368         do {
1369             iteratorResult = gst_iterator_fold(iterator.get(), [](const GValue *item, GValue *, gpointer data) -> gboolean {
1370                 GstElement* element = GST_ELEMENT(g_value_get_object(item));
1371                 GstQuery* query = GST_QUERY(data);
1372                 return !WEBKIT_IS_MEDIA_CENC_DECRYPT(element) || !gst_element_query(element, query);
1373             }, nullptr, query.get());
1374             if (iteratorResult == GST_ITERATOR_RESYNC)
1375                 gst_iterator_resync(iterator.get());
1376         } while (iteratorResult == GST_ITERATOR_RESYNC);
1377         if (iteratorResult == GST_ITERATOR_ERROR)
1378             GST_WARNING("iterator returned an error");
1379         result = iteratorResult == GST_ITERATOR_OK;
1380         GST_TRACE("iterator result %d, waiting %s", iteratorResult, boolForPrinting(result));
1381     }
1382
1383     return result;
1384 }
1385 #endif
1386
1387 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1388 {
1389     bool result = false;
1390
1391 #if ENABLE(ENCRYPTED_MEDIA)
1392     result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
1393 #endif
1394
1395     GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
1396     return result;
1397 }
1398
1399 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1400 {
1401     UNUSED_PARAM(parameters);
1402     return result;
1403 }
1404
1405 }
1406
1407 #endif // USE(GSTREAMER)