95a0bc09f45a2bb59b61551085a8c960ccbe68d7
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerCommon.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsContext3D.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
35 #include "IntRect.h"
36 #include "Logging.h"
37 #include "MediaPlayer.h"
38 #include "NotImplemented.h"
39 #include "VideoSinkGStreamer.h"
40 #include <wtf/glib/GUniquePtr.h>
41 #include <wtf/text/AtomicString.h>
42 #include <wtf/text/CString.h>
43 #include <wtf/MathExtras.h>
44 #include <wtf/StringPrintStream.h>
45
46 #include <gst/audio/streamvolume.h>
47 #include <gst/video/gstvideometa.h>
48
49 #if ENABLE(ENCRYPTED_MEDIA)
50 #include "CDMInstance.h"
51 #include "GStreamerEMEUtilities.h"
52 #include "SharedBuffer.h"
53 #include "WebKitCommonEncryptionDecryptorGStreamer.h"
54 #endif
55
56 #if USE(GSTREAMER_GL)
57 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
58 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
59 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
60 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
61 #else
62 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
63 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
64 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
65 #endif
66
67 #include <gst/app/gstappsink.h>
68
69 #if USE(LIBEPOXY)
70 // Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
71 #include <epoxy/gl.h>
72
73 // Workaround build issue with RPi userland GLESv2 headers and libepoxy <https://webkit.org/b/185639>
74 #if !GST_CHECK_VERSION(1, 14, 0)
75 #include <gst/gl/gstglconfig.h>
76 #if defined(GST_GL_HAVE_WINDOW_DISPMANX) && GST_GL_HAVE_WINDOW_DISPMANX
77 #define __gl2_h_
78 #undef GST_GL_HAVE_GLSYNC
79 #define GST_GL_HAVE_GLSYNC 1
80 #endif
81 #endif // !GST_CHECK_VERSION(1, 14, 0)
82 #endif // USE(LIBEPOXY)
83
84 #define GST_USE_UNSTABLE_API
85 #include <gst/gl/gl.h>
86 #undef GST_USE_UNSTABLE_API
87
88 #include "GLContext.h"
89 #if USE(GLX)
90 #include "GLContextGLX.h"
91 #include <gst/gl/x11/gstgldisplay_x11.h>
92 #endif
93
94 #if USE(EGL)
95 #include "GLContextEGL.h"
96 #include <gst/gl/egl/gstgldisplay_egl.h>
97 #endif
98
99 #if PLATFORM(X11)
100 #include "PlatformDisplayX11.h"
101 #endif
102
103 #if PLATFORM(WAYLAND)
104 #include "PlatformDisplayWayland.h"
105 #elif PLATFORM(WPE)
106 #include "PlatformDisplayLibWPE.h"
107 #endif
108
109 // gstglapi.h may include eglplatform.h and it includes X.h, which
110 // defines None, breaking MediaPlayer::None enum
111 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
112 #undef None
113 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
114 #include "VideoTextureCopierGStreamer.h"
115 #endif // USE(GSTREAMER_GL)
116
117 #if USE(TEXTURE_MAPPER_GL)
118 #include "BitmapTextureGL.h"
119 #include "BitmapTexturePool.h"
120 #include "TextureMapperContextAttributes.h"
121 #include "TextureMapperGL.h"
122 #include "TextureMapperPlatformLayerBuffer.h"
123 #include "TextureMapperPlatformLayerProxy.h"
124 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
125 #include <cairo-gl.h>
126 #endif
127 #endif // USE(TEXTURE_MAPPER_GL)
128
129 GST_DEBUG_CATEGORY(webkit_media_player_debug);
130 #define GST_CAT_DEFAULT webkit_media_player_debug
131
132
133 namespace WebCore {
134 using namespace std;
135
136 #if USE(GSTREAMER_HOLEPUNCH)
137 static const FloatSize s_holePunchDefaultFrameSize(1280, 720);
138 #endif
139
140 static int greatestCommonDivisor(int a, int b)
141 {
142     while (b) {
143         int temp = a;
144         a = b;
145         b = temp % b;
146     }
147
148     return ABS(a);
149 }
150
151 #if USE(TEXTURE_MAPPER_GL)
152 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
153 {
154     switch (orientation) {
155     case DefaultImageOrientation:
156         return 0;
157     case OriginRightTop:
158         return TextureMapperGL::ShouldRotateTexture90;
159     case OriginBottomRight:
160         return TextureMapperGL::ShouldRotateTexture180;
161     case OriginLeftBottom:
162         return TextureMapperGL::ShouldRotateTexture270;
163     default:
164         ASSERT_NOT_REACHED();
165     }
166
167     return 0;
168 }
169 #endif
170
171 #if USE(TEXTURE_MAPPER_GL)
172 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
173 public:
174     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags, bool gstGLEnabled)
175     {
176         GstVideoInfo videoInfo;
177         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
178             return;
179
180         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
181         m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
182         m_buffer = gst_sample_get_buffer(sample);
183         if (UNLIKELY(!GST_IS_BUFFER(m_buffer)))
184             return;
185
186 #if USE(GSTREAMER_GL)
187         m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0) | TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
188
189         if (gstGLEnabled) {
190             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL));
191             if (m_isMapped)
192                 m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
193         } else
194 #endif // USE(GSTREAMER_GL)
195
196         {
197             m_textureID = 0;
198             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, GST_MAP_READ);
199             if (m_isMapped) {
200                 // Right now the TextureMapper only supports chromas with one plane
201                 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
202             }
203         }
204     }
205
206     virtual ~GstVideoFrameHolder()
207     {
208         if (UNLIKELY(!m_isMapped))
209             return;
210
211         gst_video_frame_unmap(&m_videoFrame);
212     }
213
214     const IntSize& size() const { return m_size; }
215     bool hasAlphaChannel() const { return m_hasAlphaChannel; }
216     TextureMapperGL::Flags flags() const { return m_flags; }
217     GLuint textureID() const { return m_textureID; }
218
219     void updateTexture(BitmapTextureGL& texture)
220     {
221         ASSERT(!m_textureID);
222         GstVideoGLTextureUploadMeta* meta;
223         if ((meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) {
224             if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
225                 guint ids[4] = { texture.id(), 0, 0, 0 };
226
227                 if (gst_video_gl_texture_upload_meta_upload(meta, ids))
228                     return;
229             }
230         }
231
232         int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
233         const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0);
234         texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride);
235     }
236
237 private:
238     GstBuffer* m_buffer;
239     GstVideoFrame m_videoFrame { };
240     IntSize m_size;
241     bool m_hasAlphaChannel;
242     TextureMapperGL::Flags m_flags { };
243     GLuint m_textureID { 0 };
244     bool m_isMapped { false };
245 };
246 #endif
247
248 void MediaPlayerPrivateGStreamerBase::initializeDebugCategory()
249 {
250     GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
251 }
252
253 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
254     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
255     , m_player(player)
256     , m_fpsSink(nullptr)
257     , m_readyState(MediaPlayer::HaveNothing)
258     , m_networkState(MediaPlayer::Empty)
259     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
260 #if USE(TEXTURE_MAPPER_GL)
261 #if USE(NICOSIA)
262     , m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this)))
263 #else
264     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
265 #endif
266 #endif
267 {
268 }
269
270 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
271 {
272     flushCurrentBuffer();
273 #if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
274     downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
275 #endif
276
277 #if ENABLE(ENCRYPTED_MEDIA)
278     m_protectionCondition.notifyAll();
279 #endif
280     m_notifier->invalidate();
281
282     if (m_videoSink) {
283         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
284 #if USE(GSTREAMER_GL)
285         if (GST_IS_BIN(m_videoSink.get())) {
286             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
287             g_signal_handlers_disconnect_by_data(appsink.get(), this);
288         }
289 #endif
290     }
291
292     if (m_volumeElement)
293         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
294
295     // This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
296     // waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
297     cancelRepaint(true);
298
299     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
300     // about handlers running in the GStreamer thread.
301     if (m_pipeline)
302         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
303
304     m_player = nullptr;
305 }
306
307 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
308 {
309     m_pipeline = pipeline;
310
311     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
312     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
313         auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
314
315         if (player.handleSyncMessage(message)) {
316             gst_message_unref(message);
317             return GST_BUS_DROP;
318         }
319
320         return GST_BUS_PASS;
321     }, this, nullptr);
322 }
323
324 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
325 {
326     UNUSED_PARAM(message);
327     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
328         return false;
329
330     const gchar* contextType;
331     gst_message_parse_context_type(message, &contextType);
332     GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
333
334 #if USE(GSTREAMER_GL)
335     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType));
336     if (elementContext) {
337         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
338         return true;
339     }
340 #endif // USE(GSTREAMER_GL)
341
342 #if ENABLE(ENCRYPTED_MEDIA)
343     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
344         if (isMainThread()) {
345             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
346             ASSERT_NOT_REACHED();
347             return false;
348         }
349         GST_DEBUG_OBJECT(pipeline(), "handling drm-preferred-decryption-system-id need context message");
350         LockHolder lock(m_protectionMutex);
351         ProtectionSystemEvents protectionSystemEvents(message);
352         GST_TRACE("found %lu protection events, %lu decryptors available", protectionSystemEvents.events().size(), protectionSystemEvents.availableSystems().size());
353         InitData initData;
354
355         for (auto& event : protectionSystemEvents.events()) {
356             const char* eventKeySystemId = nullptr;
357             GstBuffer* data = nullptr;
358             gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
359
360             initData.append({eventKeySystemId, data});
361             m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
362         }
363
364         initializationDataEncountered(WTFMove(initData));
365
366         GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance");
367         m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
368             return this->m_cdmInstance;
369         });
370
371         if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) {
372             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
373             GST_INFO_OBJECT(pipeline(), "working with key system %s, continuing with key system %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
374
375             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
376             GstStructure* contextStructure = gst_context_writable_structure(context.get());
377             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
378             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
379         } else
380             GST_WARNING("CDM instance not initializaed");
381
382         return true;
383     }
384 #endif // ENABLE(ENCRYPTED_MEDIA)
385
386     return false;
387 }
388
389 #if USE(GSTREAMER_GL)
390 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType)
391 {
392     if (!ensureGstGLContext())
393         return nullptr;
394
395     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
396         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
397         gst_context_set_gl_display(displayContext, gstGLDisplay());
398         return displayContext;
399     }
400
401     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
402         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
403         GstStructure* structure = gst_context_writable_structure(appContext);
404 #if GST_CHECK_VERSION(1, 11, 0)
405         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr);
406 #else
407         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr);
408 #endif
409         return appContext;
410     }
411
412     return nullptr;
413 }
414
415 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
416 {
417     if (m_glContext)
418         return true;
419
420     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
421
422     // The floating ref removal support was added in https://bugzilla.gnome.org/show_bug.cgi?id=743062.
423     bool shouldAdoptRef = webkitGstCheckVersion(1, 13, 1);
424     if (!m_glDisplay) {
425 #if PLATFORM(X11)
426 #if USE(GLX)
427         if (is<PlatformDisplayX11>(sharedDisplay)) {
428             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared GL display");
429             if (shouldAdoptRef)
430                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())));
431             else
432                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
433         }
434 #elif USE(EGL)
435         if (is<PlatformDisplayX11>(sharedDisplay)) {
436             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared EGL display");
437             if (shouldAdoptRef)
438                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())));
439             else
440                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
441         }
442 #endif
443 #endif
444
445 #if PLATFORM(WAYLAND)
446         if (is<PlatformDisplayWayland>(sharedDisplay)) {
447             GST_DEBUG_OBJECT(pipeline(), "Creating Wayland shared display");
448             if (shouldAdoptRef)
449                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())));
450             else
451                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
452         }
453 #endif
454
455 #if PLATFORM(WPE)
456         ASSERT(is<PlatformDisplayLibWPE>(sharedDisplay));
457         GST_DEBUG_OBJECT(pipeline(), "Creating WPE shared EGL display");
458         if (shouldAdoptRef)
459             m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay())));
460         else
461             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay()));
462 #endif
463
464         ASSERT(m_glDisplay);
465     }
466
467     GLContext* webkitContext = sharedDisplay.sharingGLContext();
468     // EGL and GLX are mutually exclusive, no need for ifdefs here.
469     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
470
471 #if USE(OPENGL_ES)
472     GstGLAPI glAPI = GST_GL_API_GLES2;
473 #elif USE(OPENGL)
474     GstGLAPI glAPI = GST_GL_API_OPENGL;
475 #else
476     ASSERT_NOT_REACHED();
477 #endif
478
479     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
480     if (!contextHandle)
481         return false;
482
483     if (shouldAdoptRef)
484         m_glContext = adoptGRef(gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
485     else
486         m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
487
488     return true;
489 }
490 #endif // USE(GSTREAMER_GL)
491
492 // Returns the size of the video
493 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
494 {
495 #if USE(GSTREAMER_HOLEPUNCH)
496     // When using the holepuch we may not be able to get the video frames size, so we can't use
497     // it. But we need to report some non empty naturalSize for the player's GraphicsLayer
498     // to be properly created.
499     return s_holePunchDefaultFrameSize;
500 #endif
501
502     if (!hasVideo())
503         return FloatSize();
504
505     if (!m_videoSize.isEmpty())
506         return m_videoSize;
507
508     auto sampleLocker = holdLock(m_sampleMutex);
509     if (!GST_IS_SAMPLE(m_sample.get()))
510         return FloatSize();
511
512     GstCaps* caps = gst_sample_get_caps(m_sample.get());
513     if (!caps)
514         return FloatSize();
515
516
517     // TODO: handle possible clean aperture data. See
518     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
519     // TODO: handle possible transformation matrix. See
520     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
521
522     // Get the video PAR and original size, if this fails the
523     // video-sink has likely not yet negotiated its caps.
524     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
525     IntSize originalSize;
526     GstVideoFormat format;
527     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
528         return FloatSize();
529
530 #if USE(TEXTURE_MAPPER_GL)
531     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
532     if (m_renderingCanBeAccelerated) {
533         if (m_videoSourceOrientation.usesWidthAsHeight())
534             originalSize = originalSize.transposedSize();
535     }
536 #endif
537
538     GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
539     GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
540
541     // Calculate DAR based on PAR and video size.
542     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
543     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
544
545     // Divide display width and height by their GCD to avoid possible overflows.
546     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
547     displayWidth /= displayAspectRatioGCD;
548     displayHeight /= displayAspectRatioGCD;
549
550     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
551     guint64 width = 0, height = 0;
552     if (!(originalSize.height() % displayHeight)) {
553         GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
554         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
555         height = static_cast<guint64>(originalSize.height());
556     } else if (!(originalSize.width() % displayWidth)) {
557         GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
558         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
559         width = static_cast<guint64>(originalSize.width());
560     } else {
561         GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
562         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
563         height = static_cast<guint64>(originalSize.height());
564     }
565
566     GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
567     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
568     return m_videoSize;
569 }
570
571 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
572 {
573     if (!m_volumeElement)
574         return;
575
576     GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
577     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
578 }
579
580 float MediaPlayerPrivateGStreamerBase::volume() const
581 {
582     if (!m_volumeElement)
583         return 0;
584
585     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
586 }
587
588
589 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
590 {
591     if (!m_player || !m_volumeElement)
592         return;
593     double volume;
594     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
595     // get_volume() can return values superior to 1.0 if the user
596     // applies software user gain via third party application (GNOME
597     // volume control for instance).
598     volume = CLAMP(volume, 0.0, 1.0);
599     m_player->volumeChanged(static_cast<float>(volume));
600 }
601
602 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
603 {
604     // This is called when m_volumeElement receives the notify::volume signal.
605     GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
606
607     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] {
608         player->notifyPlayerOfVolumeChange();
609     });
610 }
611
612 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
613 {
614     return m_networkState;
615 }
616
617 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
618 {
619     return m_readyState;
620 }
621
622 void MediaPlayerPrivateGStreamerBase::sizeChanged()
623 {
624     notImplemented();
625 }
626
627 void MediaPlayerPrivateGStreamerBase::setMuted(bool mute)
628 {
629     if (!m_volumeElement)
630         return;
631
632     bool currentValue = muted();
633     if (currentValue == mute)
634         return;
635
636     GST_INFO_OBJECT(pipeline(), "Set muted to %s", toString(mute).utf8().data());
637     g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
638 }
639
640 bool MediaPlayerPrivateGStreamerBase::muted() const
641 {
642     if (!m_volumeElement)
643         return false;
644
645     gboolean muted;
646     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
647     GST_INFO_OBJECT(pipeline(), "Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
648     return muted;
649 }
650
651 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
652 {
653     if (!m_player || !m_volumeElement)
654         return;
655
656     gboolean muted;
657     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
658     m_player->muteChanged(static_cast<bool>(muted));
659 }
660
661 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
662 {
663     // This is called when m_volumeElement receives the notify::mute signal.
664     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] {
665         player->notifyPlayerOfMute();
666     });
667 }
668
669 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
670 {
671     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
672 }
673
674 #if USE(TEXTURE_MAPPER_GL)
675 PlatformLayer* MediaPlayerPrivateGStreamerBase::platformLayer() const
676 {
677 #if USE(NICOSIA)
678     return m_nicosiaLayer.ptr();
679 #else
680     return const_cast<MediaPlayerPrivateGStreamerBase*>(this);
681 #endif
682 }
683
684 #if USE(NICOSIA)
685 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
686 {
687 #if USE(GSTREAMER_HOLEPUNCH)
688     pushNextHolePunchBuffer();
689 #endif
690 }
691 #else
692 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
693 {
694     return m_platformLayerProxy.copyRef();
695 }
696
697 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
698 {
699 #if USE(GSTREAMER_HOLEPUNCH)
700     pushNextHolePunchBuffer();
701 #endif
702 }
703 #endif
704
705 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
706 {
707     auto sampleLocker = holdLock(m_sampleMutex);
708     if (!GST_IS_SAMPLE(m_sample.get()))
709         return;
710
711     auto proxyOperation =
712         [this](TextureMapperPlatformLayerProxy& proxy)
713         {
714             LockHolder holder(proxy.lock());
715
716             if (!proxy.isActive())
717                 return;
718
719             std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), !m_usingFallbackVideoSink);
720
721             GLuint textureID = frameHolder->textureID();
722             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
723             if (textureID) {
724                 layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(textureID, frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
725                 layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
726             } else {
727                 layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE);
728                 if (UNLIKELY(!layerBuffer)) {
729                     auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
730                     texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
731                     layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
732                 }
733                 frameHolder->updateTexture(layerBuffer->textureGL());
734                 layerBuffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0));
735             }
736             proxy.pushNextBuffer(WTFMove(layerBuffer));
737         };
738
739 #if USE(NICOSIA)
740     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
741 #else
742     proxyOperation(*m_platformLayerProxy);
743 #endif
744 }
745 #endif // USE(TEXTURE_MAPPER_GL)
746
747 void MediaPlayerPrivateGStreamerBase::repaint()
748 {
749     ASSERT(m_sample);
750     ASSERT(isMainThread());
751
752     m_player->repaint();
753
754     LockHolder lock(m_drawMutex);
755     m_drawCondition.notifyOne();
756 }
757
758 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
759 {
760     bool triggerResize;
761     {
762         auto sampleLocker = holdLock(m_sampleMutex);
763         triggerResize = !m_sample;
764         m_sample = sample;
765     }
766
767     if (triggerResize) {
768         GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
769         m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
770             m_player->sizeChanged();
771         });
772     }
773
774     if (!m_renderingCanBeAccelerated) {
775         LockHolder locker(m_drawMutex);
776         if (m_destroying)
777             return;
778         m_drawTimer.startOneShot(0_s);
779         m_drawCondition.wait(m_drawMutex);
780         return;
781     }
782
783 #if USE(TEXTURE_MAPPER_GL)
784     if (m_usingFallbackVideoSink) {
785         LockHolder lock(m_drawMutex);
786         auto proxyOperation =
787             [this](TextureMapperPlatformLayerProxy& proxy)
788             {
789                 return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); });
790             };
791 #if USE(NICOSIA)
792         if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()))
793             return;
794 #else
795         if (!proxyOperation(*m_platformLayerProxy))
796             return;
797 #endif
798         m_drawTimer.startOneShot(0_s);
799         m_drawCondition.wait(m_drawMutex);
800     } else
801         pushTextureToCompositor();
802 #endif // USE(TEXTURE_MAPPER_GL)
803 }
804
805 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
806 {
807     player->triggerRepaint(sample);
808 }
809
810 void MediaPlayerPrivateGStreamerBase::cancelRepaint(bool destroying)
811 {
812     // The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
813     // to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
814     // the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
815     // main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
816     // thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
817     //
818     // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
819     // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
820     if (!m_renderingCanBeAccelerated) {
821         LockHolder locker(m_drawMutex);
822         m_drawTimer.stop();
823         m_destroying = destroying;
824         m_drawCondition.notifyOne();
825     }
826 }
827
828 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
829 {
830     player->cancelRepaint();
831 }
832
833 #if USE(GSTREAMER_GL)
834 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
835 {
836     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
837     player->triggerRepaint(sample.get());
838     return GST_FLOW_OK;
839 }
840
841 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
842 {
843     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
844     player->triggerRepaint(sample.get());
845     return GST_FLOW_OK;
846 }
847
848 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
849 {
850     auto sampleLocker = holdLock(m_sampleMutex);
851
852     if (m_sample) {
853         // Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions.
854         // This prevents resizing problems when the video changes its quality and a DRAIN is performed.
855         const GstStructure* info = gst_sample_get_info(m_sample.get());
856         m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
857             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
858     }
859
860     auto proxyOperation =
861         [shouldWait = m_isVideoDecoderVideo4Linux, pipeline = pipeline()](TextureMapperPlatformLayerProxy& proxy)
862         {
863             GST_DEBUG_OBJECT(pipeline, "Flushing video sample %s", shouldWait ? "synchronously" : "");
864             if (!shouldWait)
865                 proxy.lock().lock();
866
867             if (proxy.isActive())
868                 proxy.dropCurrentBufferWhilePreservingTexture(shouldWait);
869         };
870
871 #if USE(NICOSIA)
872     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
873 #else
874     proxyOperation(*m_platformLayerProxy);
875 #endif
876 }
877 #endif
878
879 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
880 {
881     m_size = size;
882 }
883
884 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
885 {
886     if (context.paintingDisabled())
887         return;
888
889     if (!m_player->visible())
890         return;
891
892     auto sampleLocker = holdLock(m_sampleMutex);
893     if (!GST_IS_SAMPLE(m_sample.get()))
894         return;
895
896     ImagePaintingOptions paintingOptions(CompositeCopy);
897     if (m_renderingCanBeAccelerated)
898         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
899
900     auto gstImage = ImageGStreamer::createImage(m_sample.get());
901     if (!gstImage)
902         return;
903
904     context.drawImage(gstImage->image(), rect, gstImage->rect(), paintingOptions);
905 }
906
907 #if USE(GSTREAMER_GL)
908 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
909 {
910     UNUSED_PARAM(context);
911
912     if (m_usingFallbackVideoSink)
913         return false;
914
915     if (premultiplyAlpha)
916         return false;
917
918     auto sampleLocker = holdLock(m_sampleMutex);
919
920     if (!GST_IS_SAMPLE(m_sample.get()))
921         return false;
922
923     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), true);
924
925     auto textureID = frameHolder->textureID();
926     if (!textureID)
927         return false;
928
929     auto size = frameHolder->size();
930     if (m_videoSourceOrientation.usesWidthAsHeight())
931         size = size.transposedSize();
932
933     if (!m_videoTextureCopier)
934         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
935
936     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
937 }
938
939 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
940 {
941 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
942     if (m_usingFallbackVideoSink)
943         return nullptr;
944
945     auto sampleLocker = holdLock(m_sampleMutex);
946
947     if (!GST_IS_SAMPLE(m_sample.get()))
948         return nullptr;
949
950     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation), true);
951
952     auto textureID = frameHolder->textureID();
953     if (!textureID)
954         return nullptr;
955
956     auto size = frameHolder->size();
957     if (m_videoSourceOrientation.usesWidthAsHeight())
958         size = size.transposedSize();
959
960     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
961     context->makeContextCurrent();
962
963     if (!m_videoTextureCopier)
964         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
965
966     if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation))
967         return nullptr;
968
969     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
970 #else
971     return nullptr;
972 #endif
973 }
974 #endif // USE(GSTREAMER_GL)
975
976 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
977 {
978     if (m_videoSourceOrientation == orientation)
979         return;
980
981     m_videoSourceOrientation = orientation;
982 }
983
984 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
985 {
986     return true;
987 }
988
989 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
990 {
991     if (m_readyState == MediaPlayer::HaveNothing)
992         return MediaPlayer::Unknown;
993
994     if (isLiveStream())
995         return MediaPlayer::LiveStream;
996
997     return MediaPlayer::Download;
998 }
999
1000 #if USE(GSTREAMER_GL)
1001 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
1002 {
1003     if (!webkitGstCheckVersion(1, 8, 0))
1004         return nullptr;
1005
1006     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
1007     if (!appsink)
1008         return nullptr;
1009
1010     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
1011     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
1012     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
1013
1014     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
1015     gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_PUSH | GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
1016         // In some platforms (e.g. OpenMAX on the Raspberry Pi) when a resolution change occurs the
1017         // pipeline has to be drained before a frame with the new resolution can be decoded.
1018         // In this context, it's important that we don't hold references to any previous frame
1019         // (e.g. m_sample) so that decoding can continue.
1020         // We are also not supposed to keep the original frame after a flush.
1021         if (info->type & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
1022             if (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info)) != GST_QUERY_DRAIN)
1023                 return GST_PAD_PROBE_OK;
1024             GST_DEBUG("Acting upon DRAIN query");
1025         }
1026         if (info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) {
1027             if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) != GST_EVENT_FLUSH_START)
1028                 return GST_PAD_PROBE_OK;
1029             GST_DEBUG("Acting upon flush-start event");
1030         }
1031
1032         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
1033         player->flushCurrentBuffer();
1034         return GST_PAD_PROBE_OK;
1035     }, this, nullptr);
1036
1037     return appsink;
1038 }
1039
1040 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1041 {
1042     if (!webkitGstCheckVersion(1, 8, 0))
1043         return nullptr;
1044
1045     gboolean result = TRUE;
1046     GstElement* videoSink = gst_bin_new(nullptr);
1047     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1048     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1049     GstElement* appsink = createGLAppSink();
1050
1051     if (!appsink || !upload || !colorconvert) {
1052         GST_WARNING("Failed to create GstGL elements");
1053         gst_object_unref(videoSink);
1054
1055         if (upload)
1056             gst_object_unref(upload);
1057         if (colorconvert)
1058             gst_object_unref(colorconvert);
1059         if (appsink)
1060             gst_object_unref(appsink);
1061
1062         g_warning("WebKit wasn't able to find the GStreamer opengl plugin. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin.");
1063         return nullptr;
1064     }
1065
1066     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1067
1068     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
1069
1070     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1071     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1072
1073     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1074     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1075
1076     if (!result) {
1077         GST_WARNING("Failed to link GstGL elements");
1078         gst_object_unref(videoSink);
1079         videoSink = nullptr;
1080     }
1081     return videoSink;
1082 }
1083
1084 void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext()
1085 {
1086     if (!m_glDisplayElementContext)
1087         m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE));
1088
1089     if (m_glDisplayElementContext)
1090         gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get());
1091
1092     if (!m_glAppElementContext)
1093         m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context"));
1094
1095     if (m_glAppElementContext)
1096         gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get());
1097 }
1098 #endif // USE(GSTREAMER_GL)
1099
1100 #if USE(GSTREAMER_HOLEPUNCH)
1101 static void setRectangleToVideoSink(GstElement* videoSink, const IntRect& rect)
1102 {
1103     // Here goes the platform-dependant code to set to the videoSink the size
1104     // and position of the video rendering window. Mark them unused as default.
1105     UNUSED_PARAM(videoSink);
1106     UNUSED_PARAM(rect);
1107 }
1108
1109 class GStreamerHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient {
1110 public:
1111     GStreamerHolePunchClient(GRefPtr<GstElement>&& videoSink) : m_videoSink(WTFMove(videoSink)) { };
1112     void setVideoRectangle(const IntRect& rect) final { setRectangleToVideoSink(m_videoSink.get(), rect); }
1113 private:
1114     GRefPtr<GstElement> m_videoSink;
1115 };
1116
1117 GstElement* MediaPlayerPrivateGStreamerBase::createHolePunchVideoSink()
1118 {
1119     // Here goes the platform-dependant code to create the videoSink. As a default
1120     // we use a fakeVideoSink so nothing is drawn to the page.
1121     GstElement* videoSink =  gst_element_factory_make("fakevideosink", nullptr);
1122
1123     return videoSink;
1124 }
1125
1126 void MediaPlayerPrivateGStreamerBase::pushNextHolePunchBuffer()
1127 {
1128     auto proxyOperation =
1129         [this](TextureMapperPlatformLayerProxy& proxy)
1130         {
1131             LockHolder holder(proxy.lock());
1132             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE);
1133             std::unique_ptr<GStreamerHolePunchClient> holePunchClient = std::make_unique<GStreamerHolePunchClient>(m_videoSink.get());
1134             layerBuffer->setHolePunchClient(WTFMove(holePunchClient));
1135             proxy.pushNextBuffer(WTFMove(layerBuffer));
1136         };
1137
1138 #if USE(NICOSIA)
1139     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
1140 #else
1141     proxyOperation(*m_platformLayerProxy);
1142 #endif
1143 }
1144 #endif
1145
1146 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1147 {
1148     acceleratedRenderingStateChanged();
1149
1150 #if USE(GSTREAMER_HOLEPUNCH)
1151     m_videoSink = createHolePunchVideoSink();
1152     pushNextHolePunchBuffer();
1153     return m_videoSink.get();
1154 #endif
1155
1156 #if USE(GSTREAMER_GL)
1157     if (m_renderingCanBeAccelerated)
1158         m_videoSink = createVideoSinkGL();
1159 #endif
1160
1161     if (!m_videoSink) {
1162         m_usingFallbackVideoSink = true;
1163         m_videoSink = webkitVideoSinkNew();
1164         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1165         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
1166     }
1167
1168     GstElement* videoSink = nullptr;
1169 #if ENABLE(MEDIA_STATISTICS)
1170     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1171     if (m_fpsSink) {
1172         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1173
1174         // Turn off text overlay unless tracing is enabled.
1175         if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
1176             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1177
1178         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1179             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1180             videoSink = m_fpsSink.get();
1181         } else
1182             m_fpsSink = nullptr;
1183     }
1184 #endif
1185
1186     if (!m_fpsSink)
1187         videoSink = m_videoSink.get();
1188
1189     ASSERT(videoSink);
1190
1191     return videoSink;
1192 }
1193
1194 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1195 {
1196     ASSERT(!m_volumeElement);
1197     m_volumeElement = volume;
1198
1199     // We don't set the initial volume because we trust the sink to keep it for us. See
1200     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1201     if (!m_player->platformVolumeConfigurationRequired()) {
1202         GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f", m_player->volume());
1203         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
1204     } else
1205         GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one");
1206
1207     GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s", toString(m_player->muted()).utf8().data());
1208     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1209
1210     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1211     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1212 }
1213
1214 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1215 {
1216     guint64 decodedFrames = 0;
1217     if (m_fpsSink)
1218         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1219     return static_cast<unsigned>(decodedFrames);
1220 }
1221
1222 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1223 {
1224     guint64 framesDropped = 0;
1225     if (m_fpsSink)
1226         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1227     return static_cast<unsigned>(framesDropped);
1228 }
1229
1230 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1231 {
1232     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1233     gint64 position = 0;
1234
1235     if (audioSink() && gst_element_query(audioSink(), query))
1236         gst_query_parse_position(query, 0, &position);
1237
1238     gst_query_unref(query);
1239     return static_cast<unsigned>(position);
1240 }
1241
1242 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1243 {
1244     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1245     gint64 position = 0;
1246
1247     if (gst_element_query(m_videoSink.get(), query))
1248         gst_query_parse_position(query, 0, &position);
1249
1250     gst_query_unref(query);
1251     return static_cast<unsigned>(position);
1252 }
1253
1254 #if ENABLE(ENCRYPTED_MEDIA)
1255 void MediaPlayerPrivateGStreamerBase::initializationDataEncountered(InitData&& initData)
1256 {
1257     ASSERT(!isMainThread());
1258
1259     RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), initData = WTFMove(initData)] {
1260         if (!weakThis)
1261             return;
1262
1263         GST_DEBUG("scheduling initializationDataEncountered event of size %lu", initData.payload()->size());
1264         GST_MEMDUMP("init datas", reinterpret_cast<const uint8_t*>(initData.payload()->data()), initData.payload()->size());
1265         weakThis->m_player->initializationDataEncountered(initData.payloadContainerType(), initData.payload()->tryCreateArrayBuffer());
1266     });
1267 }
1268
1269 void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance)
1270 {
1271     ASSERT(isMainThread());
1272
1273     if (m_cdmInstance == &instance)
1274         return;
1275
1276     if (!m_pipeline) {
1277         GST_ERROR("no pipeline yet");
1278         ASSERT_NOT_REACHED();
1279         return;
1280     }
1281
1282     m_cdmInstance = &instance;
1283
1284     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1285     GstStructure* contextStructure = gst_context_writable_structure(context.get());
1286     gst_structure_set(contextStructure, "cdm-instance", G_TYPE_POINTER, m_cdmInstance.get(), nullptr);
1287     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1288
1289     GST_DEBUG_OBJECT(m_pipeline.get(), "CDM instance %p dispatched as context", m_cdmInstance.get());
1290
1291     m_protectionCondition.notifyAll();
1292 }
1293
1294 void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance)
1295 {
1296     ASSERT(isMainThread());
1297
1298     if (m_cdmInstance != &instance) {
1299         GST_WARNING("passed CDMInstance %p is different from stored one %p", &instance, m_cdmInstance.get());
1300         ASSERT_NOT_REACHED();
1301         return;
1302     }
1303
1304     ASSERT(m_pipeline);
1305
1306     GST_DEBUG_OBJECT(m_pipeline.get(), "detaching CDM instance %p, setting empty context", m_cdmInstance.get());
1307     m_cdmInstance = nullptr;
1308
1309     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1310     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1311
1312     m_protectionCondition.notifyAll();
1313 }
1314
1315 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance)
1316 {
1317     ASSERT(m_cdmInstance.get() == &instance);
1318     GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
1319     attemptToDecryptWithLocalInstance();
1320 }
1321
1322 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
1323 {
1324     bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt")));
1325     GST_DEBUG("attempting to decrypt, event handled %s", boolForPrinting(eventHandled));
1326 }
1327
1328 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1329 {
1330     if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1331         GST_DEBUG_OBJECT(pipeline(), "event %u already handled", GST_EVENT_SEQNUM(event));
1332         return;
1333     }
1334     GST_DEBUG_OBJECT(pipeline(), "handling event %u from MSE", GST_EVENT_SEQNUM(event));
1335     const char* eventKeySystemUUID = nullptr;
1336     GstBuffer* initData = nullptr;
1337     gst_event_parse_protection(event, &eventKeySystemUUID, &initData, nullptr);
1338     initializationDataEncountered({eventKeySystemUUID, initData});
1339 }
1340
1341 void MediaPlayerPrivateGStreamerBase::setWaitingForKey(bool waitingForKey)
1342 {
1343     // We bail out if values did not change or if we are requested to not wait anymore but there are still waiting decryptors.
1344     GST_TRACE("waitingForKey %s, m_waitingForKey %s", boolForPrinting(waitingForKey), boolForPrinting(m_waitingForKey));
1345     if (waitingForKey == m_waitingForKey || (!waitingForKey && this->waitingForKey()))
1346         return;
1347
1348     m_waitingForKey = waitingForKey;
1349     GST_DEBUG("waiting for key changed %s", boolForPrinting(m_waitingForKey));
1350     m_player->waitingForKeyChanged();
1351 }
1352
1353 bool MediaPlayerPrivateGStreamerBase::waitingForKey() const
1354 {
1355     if (!m_pipeline)
1356         return false;
1357
1358     GstState state;
1359     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1360
1361     bool result = false;
1362     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_custom(GST_QUERY_CUSTOM, gst_structure_new_empty("any-decryptor-waiting-for-key")));
1363     if (state >= GST_STATE_PAUSED) {
1364         result = gst_element_query(m_pipeline.get(), query.get());
1365         GST_TRACE("query result %s, on %s", boolForPrinting(result), gst_element_state_get_name(state));
1366     } else if (state >= GST_STATE_READY) {
1367         // Running a query in the pipeline is easier but it only works when the pipeline is set up and running, otherwise we need to inspect it and ask the decryptors directly.
1368         GUniquePtr<GstIterator> iterator(gst_bin_iterate_recurse(GST_BIN(m_pipeline.get())));
1369         GstIteratorResult iteratorResult;
1370         do {
1371             iteratorResult = gst_iterator_fold(iterator.get(), [](const GValue *item, GValue *, gpointer data) -> gboolean {
1372                 GstElement* element = GST_ELEMENT(g_value_get_object(item));
1373                 GstQuery* query = GST_QUERY(data);
1374                 return !WEBKIT_IS_MEDIA_CENC_DECRYPT(element) || !gst_element_query(element, query);
1375             }, nullptr, query.get());
1376             if (iteratorResult == GST_ITERATOR_RESYNC)
1377                 gst_iterator_resync(iterator.get());
1378         } while (iteratorResult == GST_ITERATOR_RESYNC);
1379         if (iteratorResult == GST_ITERATOR_ERROR)
1380             GST_WARNING("iterator returned an error");
1381         result = iteratorResult == GST_ITERATOR_OK;
1382         GST_TRACE("iterator result %d, waiting %s", iteratorResult, boolForPrinting(result));
1383     }
1384
1385     return result;
1386 }
1387 #endif
1388
1389 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1390 {
1391     bool result = false;
1392
1393 #if ENABLE(ENCRYPTED_MEDIA)
1394     result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
1395 #endif
1396
1397     GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
1398     return result;
1399 }
1400
1401 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1402 {
1403     UNUSED_PARAM(parameters);
1404     return result;
1405 }
1406
1407 }
1408
1409 #endif // USE(GSTREAMER)