WeakPtrFactory should populate m_ref lazily.
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerUtilities.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsTypes.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
35 #include "IntRect.h"
36 #include "Logging.h"
37 #include "MediaPlayer.h"
38 #include "NotImplemented.h"
39 #include "VideoSinkGStreamer.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <wtf/glib/GMutexLocker.h>
42 #include <wtf/glib/GUniquePtr.h>
43 #include <wtf/text/AtomicString.h>
44 #include <wtf/text/CString.h>
45 #include <wtf/MathExtras.h>
46
47 #include <gst/audio/streamvolume.h>
48 #include <gst/video/gstvideometa.h>
49
50 #if ENABLE(ENCRYPTED_MEDIA)
51 #include "CDMInstance.h"
52 #include "GStreamerEMEUtilities.h"
53 #include "SharedBuffer.h"
54 #include "WebKitClearKeyDecryptorGStreamer.h"
55 #endif
56
57 #if USE(GSTREAMER_GL)
58 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
59 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
60 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
61 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
62 #else
63 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
64 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
65 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
66 #endif
67
68 #include <gst/app/gstappsink.h>
69
70 #if USE(LIBEPOXY)
71 // Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
72 #include <epoxy/gl.h>
73 #endif
74
75 #define GST_USE_UNSTABLE_API
76 #include <gst/gl/gl.h>
77 #undef GST_USE_UNSTABLE_API
78
79 #include "GLContext.h"
80 #if USE(GLX)
81 #include "GLContextGLX.h"
82 #include <gst/gl/x11/gstgldisplay_x11.h>
83 #endif
84
85 #if USE(EGL)
86 #include "GLContextEGL.h"
87 #include <gst/gl/egl/gstgldisplay_egl.h>
88 #endif
89
90 #if PLATFORM(X11)
91 #include "PlatformDisplayX11.h"
92 #endif
93
94 #if PLATFORM(WAYLAND)
95 #include "PlatformDisplayWayland.h"
96 #elif PLATFORM(WPE)
97 #include "PlatformDisplayWPE.h"
98 #endif
99
100 // gstglapi.h may include eglplatform.h and it includes X.h, which
101 // defines None, breaking MediaPlayer::None enum
102 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
103 #undef None
104 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
105 #include "VideoTextureCopierGStreamer.h"
106 #endif // USE(GSTREAMER_GL)
107
108 #if USE(TEXTURE_MAPPER_GL)
109 #include "BitmapTextureGL.h"
110 #include "BitmapTexturePool.h"
111 #include "TextureMapperContextAttributes.h"
112 #include "TextureMapperGL.h"
113 #include "TextureMapperPlatformLayerBuffer.h"
114 #include "TextureMapperPlatformLayerProxy.h"
115 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
116 #include <cairo-gl.h>
117 #endif
118 #endif // USE(TEXTURE_MAPPER_GL)
119
120 GST_DEBUG_CATEGORY(webkit_media_player_debug);
121 #define GST_CAT_DEFAULT webkit_media_player_debug
122
123 using namespace std;
124
125 namespace WebCore {
126
127 void registerWebKitGStreamerElements()
128 {
129 #if ENABLE(ENCRYPTED_MEDIA)
130     if (!webkitGstCheckVersion(1, 6, 1))
131         return;
132
133     GRefPtr<GstElementFactory> clearKeyDecryptorFactory = adoptGRef(gst_element_factory_find("webkitclearkey"));
134     if (!clearKeyDecryptorFactory)
135         gst_element_register(nullptr, "webkitclearkey", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT);
136 #endif
137 }
138
139 bool MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements()
140 {
141     if (!initializeGStreamer())
142         return false;
143
144     registerWebKitGStreamerElements();
145
146     GRefPtr<GstElementFactory> srcFactory = adoptGRef(gst_element_factory_find("webkitwebsrc"));
147     if (!srcFactory) {
148         GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
149         gst_element_register(0, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
150     }
151
152     return true;
153 }
154
155 static int greatestCommonDivisor(int a, int b)
156 {
157     while (b) {
158         int temp = a;
159         a = b;
160         b = temp % b;
161     }
162
163     return ABS(a);
164 }
165
166 #if USE(TEXTURE_MAPPER_GL)
167 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
168 {
169     switch (orientation) {
170     case DefaultImageOrientation:
171         return 0;
172     case OriginRightTop:
173         return TextureMapperGL::ShouldRotateTexture90;
174     case OriginBottomRight:
175         return TextureMapperGL::ShouldRotateTexture180;
176     case OriginLeftBottom:
177         return TextureMapperGL::ShouldRotateTexture270;
178     default:
179         ASSERT_NOT_REACHED();
180     }
181
182     return 0;
183 }
184 #endif
185
186 #if USE(GSTREAMER_GL)
187 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
188 public:
189     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags)
190     {
191         GstVideoInfo videoInfo;
192         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
193             return;
194
195         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
196         m_flags = flags | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0) | TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
197
198         GstBuffer* buffer = gst_sample_get_buffer(sample);
199         if (UNLIKELY(!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))))
200             return;
201
202         m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
203         m_isValid = true;
204     }
205
206     virtual ~GstVideoFrameHolder()
207     {
208         if (UNLIKELY(!m_isValid))
209             return;
210
211         gst_video_frame_unmap(&m_videoFrame);
212     }
213
214     const IntSize& size() const { return m_size; }
215     TextureMapperGL::Flags flags() const { return m_flags; }
216     GLuint textureID() const { return m_textureID; }
217     bool isValid() const { return m_isValid; }
218
219 private:
220     GstVideoFrame m_videoFrame;
221     IntSize m_size;
222     TextureMapperGL::Flags m_flags;
223     GLuint m_textureID;
224     bool m_isValid { false };
225 };
226 #endif // USE(GSTREAMER_GL)
227
228 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
229     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
230     , m_player(player)
231     , m_fpsSink(nullptr)
232     , m_readyState(MediaPlayer::HaveNothing)
233     , m_networkState(MediaPlayer::Empty)
234     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
235 #if USE(TEXTURE_MAPPER_GL)
236     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
237 #endif
238 {
239     g_mutex_init(&m_sampleMutex);
240 }
241
242 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
243 {
244 #if ENABLE(ENCRYPTED_MEDIA)
245     m_protectionCondition.notifyAll();
246 #endif
247     m_notifier->invalidate();
248
249     if (m_videoSink) {
250         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
251 #if USE(GSTREAMER_GL)
252         if (GST_IS_BIN(m_videoSink.get())) {
253             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
254             g_signal_handlers_disconnect_by_data(appsink.get(), this);
255         }
256 #endif
257     }
258
259     if (m_volumeElement)
260         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
261
262     // This will release the GStreamer thread from m_drawCondition if AC is disabled.
263     cancelRepaint();
264
265     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
266     // about handlers running in the GStreamer thread.
267     if (m_pipeline)
268         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
269
270     g_mutex_clear(&m_sampleMutex);
271
272     m_player = nullptr;
273 }
274
275 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
276 {
277     m_pipeline = pipeline;
278 }
279
280 #if ENABLE(ENCRYPTED_MEDIA)
281 static std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> extractEventsAndSystemsFromMessage(GstMessage* message)
282 {
283     const GstStructure* structure = gst_message_get_structure(message);
284
285     const GValue* streamEncryptionAllowedSystemsValue = gst_structure_get_value(structure, "stream-encryption-systems");
286     ASSERT(streamEncryptionAllowedSystemsValue && G_VALUE_HOLDS(streamEncryptionAllowedSystemsValue, G_TYPE_STRV));
287     const char** streamEncryptionAllowedSystems = reinterpret_cast<const char**>(g_value_get_boxed(streamEncryptionAllowedSystemsValue));
288     ASSERT(streamEncryptionAllowedSystems);
289     Vector<String> streamEncryptionAllowedSystemsVector;
290     unsigned i;
291     for (i = 0; streamEncryptionAllowedSystems[i]; ++i)
292         streamEncryptionAllowedSystemsVector.append(streamEncryptionAllowedSystems[i]);
293
294     const GValue* streamEncryptionEventsList = gst_structure_get_value(structure, "stream-encryption-events");
295     ASSERT(streamEncryptionEventsList && GST_VALUE_HOLDS_LIST(streamEncryptionEventsList));
296     unsigned streamEncryptionEventsListSize = gst_value_list_get_size(streamEncryptionEventsList);
297     Vector<GRefPtr<GstEvent>> streamEncryptionEventsVector;
298     for (i = 0; i < streamEncryptionEventsListSize; ++i)
299         streamEncryptionEventsVector.append(GRefPtr<GstEvent>(static_cast<GstEvent*>(g_value_get_boxed(gst_value_list_get_value(streamEncryptionEventsList, i)))));
300
301     return std::make_pair(streamEncryptionEventsVector, streamEncryptionAllowedSystemsVector);
302 }
303 #endif
304
305 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
306 {
307     UNUSED_PARAM(message);
308     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
309         return false;
310
311     const gchar* contextType;
312     gst_message_parse_context_type(message, &contextType);
313
314 #if USE(GSTREAMER_GL)
315     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType, this));
316     if (elementContext) {
317         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
318         return true;
319     }
320 #endif // USE(GSTREAMER_GL)
321
322 #if ENABLE(ENCRYPTED_MEDIA)
323     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
324         if (isMainThread()) {
325             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
326             ASSERT_NOT_REACHED();
327             return false;
328         }
329         GST_DEBUG("handling drm-preferred-decryption-system-id need context message");
330         LockHolder lock(m_protectionMutex);
331         std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> streamEncryptionInformation = extractEventsAndSystemsFromMessage(message);
332         GST_TRACE("found %" G_GSIZE_FORMAT " protection events", streamEncryptionInformation.first.size());
333         Vector<uint8_t> concatenatedInitDataChunks;
334         unsigned concatenatedInitDataChunksNumber = 0;
335         String eventKeySystemIdString;
336
337         for (auto& event : streamEncryptionInformation.first) {
338             GST_TRACE("handling protection event %u", GST_EVENT_SEQNUM(event.get()));
339             const char* eventKeySystemId = nullptr;
340             GstBuffer* data = nullptr;
341             gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
342
343             GstMapInfo mapInfo;
344             if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
345                 GST_WARNING("cannot map %s protection data", eventKeySystemId);
346                 break;
347             }
348             GST_TRACE("appending init data for %s of size %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
349             GST_MEMDUMP("init data", reinterpret_cast<const unsigned char *>(mapInfo.data), mapInfo.size);
350             concatenatedInitDataChunks.append(mapInfo.data, mapInfo.size);
351             ++concatenatedInitDataChunksNumber;
352             eventKeySystemIdString = eventKeySystemId;
353             if (streamEncryptionInformation.second.contains(eventKeySystemId)) {
354                 GST_TRACE("considering init data handled for %s", eventKeySystemId);
355                 m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
356             }
357             gst_buffer_unmap(data, &mapInfo);
358         }
359
360         if (!concatenatedInitDataChunksNumber)
361             return false;
362
363         if (concatenatedInitDataChunksNumber > 1)
364             eventKeySystemIdString = emptyString();
365
366         RunLoop::main().dispatch([weakThis = m_weakPtrFactory.createWeakPtr(*this), eventKeySystemIdString, initData = WTFMove(concatenatedInitDataChunks)] {
367             if (!weakThis)
368                 return;
369
370             GST_DEBUG("scheduling initializationDataEncountered event for %s with concatenated init datas size of %" G_GSIZE_FORMAT, eventKeySystemIdString.utf8().data(), initData.size());
371             GST_MEMDUMP("init datas", initData.data(), initData.size());
372             weakThis->m_player->initializationDataEncountered(ASCIILiteral("cenc"), ArrayBuffer::create(initData.data(), initData.size()));
373         });
374
375         GST_INFO("waiting for a CDM instance");
376         m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
377             return this->m_cdmInstance;
378         });
379         if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) {
380             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
381             GST_INFO("working with %s, continuing with %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
382
383             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
384             GstStructure* contextStructure = gst_context_writable_structure(context.get());
385             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
386             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
387         } else
388             GST_WARNING("no proper CDM instance attached");
389
390         return true;
391     }
392 #endif // ENABLE(ENCRYPTED_MEDIA)
393
394     return false;
395 }
396
397 #if USE(GSTREAMER_GL)
398 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase* player)
399 {
400     if (!player->ensureGstGLContext())
401         return nullptr;
402
403     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
404         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
405         gst_context_set_gl_display(displayContext, player->gstGLDisplay());
406         return displayContext;
407     }
408
409     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
410         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
411         GstStructure* structure = gst_context_writable_structure(appContext);
412 #if GST_CHECK_VERSION(1, 11, 0)
413         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, player->gstGLContext(), nullptr);
414 #else
415         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, player->gstGLContext(), nullptr);
416 #endif
417         return appContext;
418     }
419
420     return nullptr;
421 }
422
423 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
424 {
425     if (m_glContext)
426         return true;
427
428     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
429     if (!m_glDisplay) {
430 #if PLATFORM(X11)
431 #if USE(GLX)
432         if (is<PlatformDisplayX11>(sharedDisplay))
433             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
434 #elif USE(EGL)
435         if (is<PlatformDisplayX11>(sharedDisplay))
436             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
437 #endif
438 #endif
439
440 #if PLATFORM(WAYLAND)
441         if (is<PlatformDisplayWayland>(sharedDisplay))
442             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
443 #endif
444
445 #if PLATFORM(WPE)
446         ASSERT(is<PlatformDisplayWPE>(sharedDisplay));
447         m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWPE>(sharedDisplay).eglDisplay()));
448 #endif
449
450         ASSERT(m_glDisplay);
451     }
452
453     GLContext* webkitContext = sharedDisplay.sharingGLContext();
454     // EGL and GLX are mutually exclusive, no need for ifdefs here.
455     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
456
457 #if USE(OPENGL_ES_2)
458     GstGLAPI glAPI = GST_GL_API_GLES2;
459 #elif USE(OPENGL)
460     GstGLAPI glAPI = GST_GL_API_OPENGL;
461 #else
462     ASSERT_NOT_REACHED();
463 #endif
464
465     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
466     if (!contextHandle)
467         return false;
468
469     m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
470
471     return true;
472 }
473 #endif // USE(GSTREAMER_GL)
474
475 // Returns the size of the video
476 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
477 {
478     if (!hasVideo())
479         return FloatSize();
480
481     if (!m_videoSize.isEmpty())
482         return m_videoSize;
483
484     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
485     if (!GST_IS_SAMPLE(m_sample.get()))
486         return FloatSize();
487
488     GstCaps* caps = gst_sample_get_caps(m_sample.get());
489     if (!caps)
490         return FloatSize();
491
492
493     // TODO: handle possible clean aperture data. See
494     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
495     // TODO: handle possible transformation matrix. See
496     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
497
498     // Get the video PAR and original size, if this fails the
499     // video-sink has likely not yet negotiated its caps.
500     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
501     IntSize originalSize;
502     GstVideoFormat format;
503     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
504         return FloatSize();
505
506 #if USE(TEXTURE_MAPPER_GL)
507     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
508     if (m_renderingCanBeAccelerated) {
509         if (m_videoSourceOrientation.usesWidthAsHeight())
510             originalSize = originalSize.transposedSize();
511     }
512 #endif
513
514     GST_DEBUG("Original video size: %dx%d", originalSize.width(), originalSize.height());
515     GST_DEBUG("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
516
517     // Calculate DAR based on PAR and video size.
518     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
519     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
520
521     // Divide display width and height by their GCD to avoid possible overflows.
522     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
523     displayWidth /= displayAspectRatioGCD;
524     displayHeight /= displayAspectRatioGCD;
525
526     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
527     guint64 width = 0, height = 0;
528     if (!(originalSize.height() % displayHeight)) {
529         GST_DEBUG("Keeping video original height");
530         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
531         height = static_cast<guint64>(originalSize.height());
532     } else if (!(originalSize.width() % displayWidth)) {
533         GST_DEBUG("Keeping video original width");
534         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
535         width = static_cast<guint64>(originalSize.width());
536     } else {
537         GST_DEBUG("Approximating while keeping original video height");
538         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
539         height = static_cast<guint64>(originalSize.height());
540     }
541
542     GST_DEBUG("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
543     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
544     return m_videoSize;
545 }
546
547 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
548 {
549     if (!m_volumeElement)
550         return;
551
552     GST_DEBUG("Setting volume: %f", volume);
553     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
554 }
555
556 float MediaPlayerPrivateGStreamerBase::volume() const
557 {
558     if (!m_volumeElement)
559         return 0;
560
561     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
562 }
563
564
565 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
566 {
567     if (!m_player || !m_volumeElement)
568         return;
569     double volume;
570     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
571     // get_volume() can return values superior to 1.0 if the user
572     // applies software user gain via third party application (GNOME
573     // volume control for instance).
574     volume = CLAMP(volume, 0.0, 1.0);
575     m_player->volumeChanged(static_cast<float>(volume));
576 }
577
578 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
579 {
580     // This is called when m_volumeElement receives the notify::volume signal.
581     GST_DEBUG("Volume changed to: %f", player->volume());
582
583     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); });
584 }
585
586 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
587 {
588     return m_networkState;
589 }
590
591 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
592 {
593     return m_readyState;
594 }
595
596 void MediaPlayerPrivateGStreamerBase::sizeChanged()
597 {
598     notImplemented();
599 }
600
601 void MediaPlayerPrivateGStreamerBase::setMuted(bool muted)
602 {
603     if (!m_volumeElement)
604         return;
605
606     g_object_set(m_volumeElement.get(), "mute", muted, nullptr);
607 }
608
609 bool MediaPlayerPrivateGStreamerBase::muted() const
610 {
611     if (!m_volumeElement)
612         return false;
613
614     gboolean muted;
615     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
616     return muted;
617 }
618
619 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
620 {
621     if (!m_player || !m_volumeElement)
622         return;
623
624     gboolean muted;
625     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
626     m_player->muteChanged(static_cast<bool>(muted));
627 }
628
629 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
630 {
631     // This is called when m_volumeElement receives the notify::mute signal.
632     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); });
633 }
634
635 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
636 {
637     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
638 }
639
640 #if USE(TEXTURE_MAPPER_GL)
641 void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo)
642 {
643     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
644
645     GstVideoGLTextureUploadMeta* meta;
646     if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
647         if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
648             guint ids[4] = { texture.id(), 0, 0, 0 };
649
650             if (gst_video_gl_texture_upload_meta_upload(meta, ids))
651                 return;
652         }
653     }
654
655     // Right now the TextureMapper only supports chromas with one plane
656     ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
657
658     GstVideoFrame videoFrame;
659     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
660         return;
661
662     int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
663     const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
664     texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
665     gst_video_frame_unmap(&videoFrame);
666 }
667
668 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
669 {
670     return m_platformLayerProxy.copyRef();
671 }
672
673 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
674 {
675 }
676
677 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
678 {
679 #if !USE(GSTREAMER_GL)
680     class ConditionNotifier {
681     public:
682         ConditionNotifier(Lock& lock, Condition& condition)
683             : m_locker(lock), m_condition(condition)
684         {
685         }
686         ~ConditionNotifier()
687         {
688             m_condition.notifyOne();
689         }
690     private:
691         LockHolder m_locker;
692         Condition& m_condition;
693     };
694     ConditionNotifier notifier(m_drawMutex, m_drawCondition);
695 #endif
696
697     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
698     if (!GST_IS_SAMPLE(m_sample.get()))
699         return;
700
701     LockHolder holder(m_platformLayerProxy->lock());
702
703     if (!m_platformLayerProxy->isActive())
704         return;
705
706 #if USE(GSTREAMER_GL)
707     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation));
708     if (UNLIKELY(!frameHolder->isValid()))
709         return;
710
711     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
712     layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
713     m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer));
714 #else
715     GstVideoInfo videoInfo;
716     if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
717         return;
718
719     IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
720     std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GraphicsContext3D::DONT_CARE);
721     if (UNLIKELY(!buffer)) {
722         if (UNLIKELY(!m_context3D))
723             m_context3D = GraphicsContext3D::create(GraphicsContext3DAttributes(), nullptr, GraphicsContext3D::RenderToCurrentGLContext);
724
725         TextureMapperContextAttributes contextAttributes;
726         contextAttributes.initialize();
727
728         auto texture = BitmapTextureGL::create(contextAttributes);
729         texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
730         buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
731     }
732     updateTexture(buffer->textureGL(), videoInfo);
733     buffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0));
734     m_platformLayerProxy->pushNextBuffer(WTFMove(buffer));
735 #endif // USE(GSTREAMER_GL)
736 }
737 #endif // USE(TEXTURE_MAPPER_GL)
738
739 void MediaPlayerPrivateGStreamerBase::repaint()
740 {
741     ASSERT(m_sample);
742     ASSERT(isMainThread());
743
744     m_player->repaint();
745
746     LockHolder lock(m_drawMutex);
747     m_drawCondition.notifyOne();
748 }
749
750 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
751 {
752     bool triggerResize;
753     {
754         WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
755         triggerResize = !m_sample;
756         m_sample = sample;
757     }
758
759     if (triggerResize) {
760         GST_DEBUG("First sample reached the sink, triggering video dimensions update");
761         m_notifier->notify(MainThreadNotification::SizeChanged, [this] { m_player->sizeChanged(); });
762     }
763
764     if (!m_renderingCanBeAccelerated) {
765         LockHolder locker(m_drawMutex);
766         m_drawTimer.startOneShot(0_s);
767         m_drawCondition.wait(m_drawMutex);
768         return;
769     }
770
771 #if USE(TEXTURE_MAPPER_GL)
772 #if USE(GSTREAMER_GL)
773     pushTextureToCompositor();
774 #else
775     {
776         LockHolder lock(m_drawMutex);
777         if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }))
778             return;
779         m_drawCondition.wait(m_drawMutex);
780     }
781 #endif
782 #endif // USE(TEXTURE_MAPPER_GL)
783 }
784
785 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
786 {
787     player->triggerRepaint(sample);
788 }
789
790 void MediaPlayerPrivateGStreamerBase::cancelRepaint()
791 {
792     if (!m_renderingCanBeAccelerated) {
793         m_drawTimer.stop();
794         LockHolder locker(m_drawMutex);
795         m_drawCondition.notifyOne();
796     }
797 }
798
799 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
800 {
801     player->cancelRepaint();
802 }
803
804 #if USE(GSTREAMER_GL)
805 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
806 {
807     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
808     player->triggerRepaint(sample.get());
809     return GST_FLOW_OK;
810 }
811
812 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
813 {
814     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
815     player->triggerRepaint(sample.get());
816     return GST_FLOW_OK;
817 }
818
819 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
820 {
821     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
822     m_sample.clear();
823
824     {
825         LockHolder locker(m_platformLayerProxy->lock());
826
827         if (m_platformLayerProxy->isActive())
828             m_platformLayerProxy->dropCurrentBufferWhilePreservingTexture();
829     }
830 }
831 #endif
832
833 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
834 {
835     m_size = size;
836 }
837
838 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
839 {
840     if (context.paintingDisabled())
841         return;
842
843     if (!m_player->visible())
844         return;
845
846     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
847     if (!GST_IS_SAMPLE(m_sample.get()))
848         return;
849
850     ImagePaintingOptions paintingOptions(CompositeCopy);
851     if (m_renderingCanBeAccelerated)
852         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
853
854     RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
855     if (!gstImage)
856         return;
857
858     if (Image* image = reinterpret_cast<Image*>(gstImage->image()))
859         context.drawImage(*image, rect, gstImage->rect(), paintingOptions);
860 }
861
862 #if USE(GSTREAMER_GL)
863 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
864 {
865     UNUSED_PARAM(context);
866
867     if (m_usingFallbackVideoSink)
868         return false;
869
870     if (premultiplyAlpha)
871         return false;
872
873     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
874
875     GstVideoInfo videoInfo;
876     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
877         return false;
878
879     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
880     GstVideoFrame videoFrame;
881     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
882         return false;
883
884     IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
885     if (m_videoSourceOrientation.usesWidthAsHeight())
886         size = size.transposedSize();
887     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
888
889     if (!m_videoTextureCopier)
890         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
891
892     bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
893
894     gst_video_frame_unmap(&videoFrame);
895
896     return copied;
897 }
898
899 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
900 {
901 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
902     if (m_usingFallbackVideoSink)
903         return nullptr;
904
905     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
906
907     GstVideoInfo videoInfo;
908     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
909         return nullptr;
910
911     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
912     GstVideoFrame videoFrame;
913     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
914         return nullptr;
915
916     IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
917     if (m_videoSourceOrientation.usesWidthAsHeight())
918         size = size.transposedSize();
919
920     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
921     context->makeContextCurrent();
922
923     if (!m_videoTextureCopier)
924         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
925
926     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
927     bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation);
928     gst_video_frame_unmap(&videoFrame);
929
930     if (!copied)
931         return nullptr;
932
933     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
934 #else
935     return nullptr;
936 #endif
937 }
938 #endif // USE(GSTREAMER_GL)
939
940 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
941 {
942     if (m_videoSourceOrientation == orientation)
943         return;
944
945     m_videoSourceOrientation = orientation;
946 }
947
948 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
949 {
950     return true;
951 }
952
953 PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const
954 {
955     return NoPlatformMedia;
956 }
957
958 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
959 {
960     if (m_readyState == MediaPlayer::HaveNothing)
961         return MediaPlayer::Unknown;
962
963     if (isLiveStream())
964         return MediaPlayer::LiveStream;
965
966     return MediaPlayer::Download;
967 }
968
969 #if USE(GSTREAMER_GL)
970 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
971 {
972     if (!webkitGstCheckVersion(1, 8, 0))
973         return nullptr;
974
975     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
976     if (!appsink)
977         return nullptr;
978
979     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
980     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
981     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
982
983     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
984     gst_pad_add_probe (pad.get(), GST_PAD_PROBE_TYPE_EVENT_FLUSH, [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
985         if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_FLUSH_START)
986             return GST_PAD_PROBE_OK;
987
988         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
989         player->flushCurrentBuffer();
990         return GST_PAD_PROBE_OK;
991     }, this, nullptr);
992
993     return appsink;
994 }
995
996 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
997 {
998     // FIXME: Currently it's not possible to get the video frames and caps using this approach until
999     // the pipeline gets into playing state. Due to this, trying to grab a frame and painting it by some
1000     // other mean (canvas or webgl) before playing state can result in a crash.
1001     // This is being handled in https://bugs.webkit.org/show_bug.cgi?id=159460.
1002     if (!webkitGstCheckVersion(1, 8, 0))
1003         return nullptr;
1004
1005     gboolean result = TRUE;
1006     GstElement* videoSink = gst_bin_new(nullptr);
1007     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1008     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1009     GstElement* appsink = createGLAppSink();
1010
1011     if (!appsink || !upload || !colorconvert) {
1012         GST_WARNING("Failed to create GstGL elements");
1013         gst_object_unref(videoSink);
1014
1015         if (upload)
1016             gst_object_unref(upload);
1017         if (colorconvert)
1018             gst_object_unref(colorconvert);
1019         if (appsink)
1020             gst_object_unref(appsink);
1021
1022         return nullptr;
1023     }
1024
1025     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1026
1027     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
1028
1029     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1030     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1031
1032     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1033     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1034
1035     if (!result) {
1036         GST_WARNING("Failed to link GstGL elements");
1037         gst_object_unref(videoSink);
1038         videoSink = nullptr;
1039     }
1040     return videoSink;
1041 }
1042 #endif // USE(GSTREAMER_GL)
1043
1044 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1045 {
1046     acceleratedRenderingStateChanged();
1047
1048 #if USE(GSTREAMER_GL)
1049     if (m_renderingCanBeAccelerated)
1050         m_videoSink = createVideoSinkGL();
1051 #endif
1052
1053     if (!m_videoSink) {
1054         m_usingFallbackVideoSink = true;
1055         m_videoSink = webkitVideoSinkNew();
1056         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1057         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
1058     }
1059
1060     GstElement* videoSink = nullptr;
1061     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1062     if (m_fpsSink) {
1063         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1064
1065         // Turn off text overlay unless logging is enabled.
1066 #if LOG_DISABLED
1067         g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1068 #else
1069         if (!isLogChannelEnabled("Media"))
1070             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1071 #endif // LOG_DISABLED
1072
1073         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1074             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1075             videoSink = m_fpsSink.get();
1076         } else
1077             m_fpsSink = nullptr;
1078     }
1079
1080     if (!m_fpsSink)
1081         videoSink = m_videoSink.get();
1082
1083     ASSERT(videoSink);
1084
1085     return videoSink;
1086 }
1087
1088 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1089 {
1090     ASSERT(!m_volumeElement);
1091     m_volumeElement = volume;
1092
1093     // We don't set the initial volume because we trust the sink to keep it for us. See
1094     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1095     if (!m_player->platformVolumeConfigurationRequired()) {
1096         GST_DEBUG("Setting stream volume to %f", m_player->volume());
1097         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
1098     } else
1099         GST_DEBUG("Not setting stream volume, trusting system one");
1100
1101     GST_DEBUG("Setting stream muted %d",  m_player->muted());
1102     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1103
1104     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1105     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1106 }
1107
1108 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1109 {
1110     guint64 decodedFrames = 0;
1111     if (m_fpsSink)
1112         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1113     return static_cast<unsigned>(decodedFrames);
1114 }
1115
1116 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1117 {
1118     guint64 framesDropped = 0;
1119     if (m_fpsSink)
1120         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1121     return static_cast<unsigned>(framesDropped);
1122 }
1123
1124 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1125 {
1126     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1127     gint64 position = 0;
1128
1129     if (audioSink() && gst_element_query(audioSink(), query))
1130         gst_query_parse_position(query, 0, &position);
1131
1132     gst_query_unref(query);
1133     return static_cast<unsigned>(position);
1134 }
1135
1136 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1137 {
1138     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1139     gint64 position = 0;
1140
1141     if (gst_element_query(m_videoSink.get(), query))
1142         gst_query_parse_position(query, 0, &position);
1143
1144     gst_query_unref(query);
1145     return static_cast<unsigned>(position);
1146 }
1147
1148 #if ENABLE(ENCRYPTED_MEDIA)
1149 void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(const CDMInstance& instance)
1150 {
1151     ASSERT(!m_cdmInstance);
1152     m_cdmInstance = &instance;
1153     GST_DEBUG("CDM instance %p set", m_cdmInstance.get());
1154     m_protectionCondition.notifyAll();
1155 }
1156
1157 void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(const CDMInstance& instance)
1158 {
1159 #ifdef NDEBUG
1160     UNUSED_PARAM(instance);
1161 #endif
1162     ASSERT(m_cdmInstance.get() == &instance);
1163     GST_DEBUG("detaching CDM instance %p", m_cdmInstance.get());
1164     m_cdmInstance = nullptr;
1165     m_protectionCondition.notifyAll();
1166 }
1167
1168 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(const CDMInstance& instance)
1169 {
1170     ASSERT(m_cdmInstance.get() == &instance);
1171     GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
1172     attemptToDecryptWithLocalInstance();
1173 }
1174
1175 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
1176 {
1177     // FIXME.
1178 }
1179
1180 void MediaPlayerPrivateGStreamerBase::dispatchDecryptionKey(GstBuffer* buffer)
1181 {
1182     bool eventHandled = gst_element_send_event(m_pipeline.get(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
1183         gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, buffer, nullptr)));
1184     m_needToResendCredentials = m_handledProtectionEvents.size() > 0;
1185     GST_TRACE("emitted decryption cipher key on pipeline, event handled %s, need to resend credentials %s", boolForPrinting(eventHandled), boolForPrinting(m_needToResendCredentials));
1186 }
1187
1188 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1189 {
1190     if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1191         GST_DEBUG("event %u already handled", GST_EVENT_SEQNUM(event));
1192         m_handledProtectionEvents.remove(GST_EVENT_SEQNUM(event));
1193         if (m_needToResendCredentials) {
1194             GST_DEBUG("resending credentials");
1195             attemptToDecryptWithLocalInstance();
1196         }
1197         return;
1198     }
1199
1200     const gchar* eventKeySystemId = nullptr;
1201     gst_event_parse_protection(event, &eventKeySystemId, nullptr, nullptr);
1202     GST_WARNING("FIXME: unhandled protection event for %s", eventKeySystemId);
1203     ASSERT_NOT_REACHED();
1204 }
1205 #endif
1206
1207 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1208 {
1209     bool result = false;
1210
1211 #if ENABLE(ENCRYPTED_MEDIA)
1212     result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
1213 #endif
1214
1215     GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
1216     return result;
1217 }
1218
1219 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1220 {
1221     UNUSED_PARAM(parameters);
1222     return result;
1223 }
1224
1225 }
1226
1227 #endif // USE(GSTREAMER)