[GStreamer][MiniBrowser] Honor GStreamer command line parameters in MiniBrowser
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerUtilities.h"
31 #include "GraphicsContext.h"
32 #include "ImageGStreamer.h"
33 #include "ImageOrientation.h"
34 #include "IntRect.h"
35 #include "Logging.h"
36 #include "MediaPlayer.h"
37 #include "NotImplemented.h"
38 #include "VideoSinkGStreamer.h"
39 #include "WebKitWebSourceGStreamer.h"
40 #include <wtf/glib/GMutexLocker.h>
41 #include <wtf/glib/GUniquePtr.h>
42 #include <wtf/text/AtomicString.h>
43 #include <wtf/text/CString.h>
44 #include <wtf/MathExtras.h>
45 #include <wtf/StringPrintStream.h>
46
47 #include <gst/audio/streamvolume.h>
48 #include <gst/video/gstvideometa.h>
49
50 #if ENABLE(ENCRYPTED_MEDIA)
51 #include "CDMInstance.h"
52 #include "GStreamerEMEUtilities.h"
53 #include "SharedBuffer.h"
54 #include "WebKitClearKeyDecryptorGStreamer.h"
55 #endif
56
57 #if USE(GSTREAMER_GL)
58 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
59 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
60 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
61 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
62 #else
63 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
64 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
65 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
66 #endif
67
68 #include <gst/app/gstappsink.h>
69
70 #if USE(LIBEPOXY)
71 // Include the <epoxy/gl.h> header before <gst/gl/gl.h>.
72 #include <epoxy/gl.h>
73 #endif
74
75 #define GST_USE_UNSTABLE_API
76 #include <gst/gl/gl.h>
77 #undef GST_USE_UNSTABLE_API
78
79 #include "GLContext.h"
80 #if USE(GLX)
81 #include "GLContextGLX.h"
82 #include <gst/gl/x11/gstgldisplay_x11.h>
83 #endif
84
85 #if USE(EGL)
86 #include "GLContextEGL.h"
87 #include <gst/gl/egl/gstgldisplay_egl.h>
88 #endif
89
90 #if PLATFORM(X11)
91 #include "PlatformDisplayX11.h"
92 #endif
93
94 #if PLATFORM(WAYLAND)
95 #include "PlatformDisplayWayland.h"
96 #elif PLATFORM(WPE)
97 #include "PlatformDisplayWPE.h"
98 #endif
99
100 // gstglapi.h may include eglplatform.h and it includes X.h, which
101 // defines None, breaking MediaPlayer::None enum
102 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
103 #undef None
104 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
105 #include "VideoTextureCopierGStreamer.h"
106 #endif // USE(GSTREAMER_GL)
107
108 #if USE(TEXTURE_MAPPER_GL)
109 #include "BitmapTextureGL.h"
110 #include "BitmapTexturePool.h"
111 #include "TextureMapperContextAttributes.h"
112 #include "TextureMapperGL.h"
113 #include "TextureMapperPlatformLayerBuffer.h"
114 #include "TextureMapperPlatformLayerProxy.h"
115 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
116 #include <cairo-gl.h>
117 #endif
118 #endif // USE(TEXTURE_MAPPER_GL)
119
120 GST_DEBUG_CATEGORY(webkit_media_player_debug);
121 #define GST_CAT_DEFAULT webkit_media_player_debug
122
123
124 namespace WebCore {
125 using namespace std;
126
127 void registerWebKitGStreamerElements()
128 {
129 #if ENABLE(ENCRYPTED_MEDIA)
130     if (!webkitGstCheckVersion(1, 6, 1))
131         return;
132
133     GRefPtr<GstElementFactory> clearKeyDecryptorFactory = adoptGRef(gst_element_factory_find("webkitclearkey"));
134     if (!clearKeyDecryptorFactory)
135         gst_element_register(nullptr, "webkitclearkey", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT);
136 #endif
137 }
138
139 bool MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements()
140 {
141     registerWebKitGStreamerElements();
142
143     GRefPtr<GstElementFactory> srcFactory = adoptGRef(gst_element_factory_find("webkitwebsrc"));
144     if (!srcFactory) {
145         GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
146         gst_element_register(0, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
147     }
148
149     return true;
150 }
151
152 static int greatestCommonDivisor(int a, int b)
153 {
154     while (b) {
155         int temp = a;
156         a = b;
157         b = temp % b;
158     }
159
160     return ABS(a);
161 }
162
163 #if USE(TEXTURE_MAPPER_GL)
164 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
165 {
166     switch (orientation) {
167     case DefaultImageOrientation:
168         return 0;
169     case OriginRightTop:
170         return TextureMapperGL::ShouldRotateTexture90;
171     case OriginBottomRight:
172         return TextureMapperGL::ShouldRotateTexture180;
173     case OriginLeftBottom:
174         return TextureMapperGL::ShouldRotateTexture270;
175     default:
176         ASSERT_NOT_REACHED();
177     }
178
179     return 0;
180 }
181 #endif
182
183 #if USE(GSTREAMER_GL)
184 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
185 public:
186     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags)
187     {
188         GstVideoInfo videoInfo;
189         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
190             return;
191
192         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
193         m_flags = flags | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0) | TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
194
195         GstBuffer* buffer = gst_sample_get_buffer(sample);
196         if (UNLIKELY(!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))))
197             return;
198
199         m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
200         m_isValid = true;
201     }
202
203     virtual ~GstVideoFrameHolder()
204     {
205         if (UNLIKELY(!m_isValid))
206             return;
207
208         gst_video_frame_unmap(&m_videoFrame);
209     }
210
211     const IntSize& size() const { return m_size; }
212     TextureMapperGL::Flags flags() const { return m_flags; }
213     GLuint textureID() const { return m_textureID; }
214     bool isValid() const { return m_isValid; }
215
216 private:
217     GstVideoFrame m_videoFrame;
218     IntSize m_size;
219     TextureMapperGL::Flags m_flags;
220     GLuint m_textureID;
221     bool m_isValid { false };
222 };
223 #endif // USE(GSTREAMER_GL)
224
225 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
226     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
227     , m_player(player)
228     , m_fpsSink(nullptr)
229     , m_readyState(MediaPlayer::HaveNothing)
230     , m_networkState(MediaPlayer::Empty)
231     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
232 #if USE(TEXTURE_MAPPER_GL)
233     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
234 #endif
235 {
236     g_mutex_init(&m_sampleMutex);
237 }
238
239 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
240 {
241 #if ENABLE(ENCRYPTED_MEDIA)
242     m_protectionCondition.notifyAll();
243 #endif
244     m_notifier->invalidate();
245
246     if (m_videoSink) {
247         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
248 #if USE(GSTREAMER_GL)
249         if (GST_IS_BIN(m_videoSink.get())) {
250             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
251             g_signal_handlers_disconnect_by_data(appsink.get(), this);
252         }
253 #endif
254     }
255
256     if (m_volumeElement)
257         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
258
259     // This will release the GStreamer thread from m_drawCondition if AC is disabled.
260     cancelRepaint();
261
262     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
263     // about handlers running in the GStreamer thread.
264     if (m_pipeline)
265         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
266
267     g_mutex_clear(&m_sampleMutex);
268
269     m_player = nullptr;
270 }
271
272 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
273 {
274     m_pipeline = pipeline;
275
276     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
277     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
278         auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
279
280         if (player.handleSyncMessage(message)) {
281             gst_message_unref(message);
282             return GST_BUS_DROP;
283         }
284
285         return GST_BUS_PASS;
286     }, this, nullptr);
287 }
288
289 #if ENABLE(ENCRYPTED_MEDIA)
290 static std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> extractEventsAndSystemsFromMessage(GstMessage* message)
291 {
292     const GstStructure* structure = gst_message_get_structure(message);
293
294     const GValue* streamEncryptionAllowedSystemsValue = gst_structure_get_value(structure, "stream-encryption-systems");
295     ASSERT(streamEncryptionAllowedSystemsValue && G_VALUE_HOLDS(streamEncryptionAllowedSystemsValue, G_TYPE_STRV));
296     const char** streamEncryptionAllowedSystems = reinterpret_cast<const char**>(g_value_get_boxed(streamEncryptionAllowedSystemsValue));
297     ASSERT(streamEncryptionAllowedSystems);
298     Vector<String> streamEncryptionAllowedSystemsVector;
299     unsigned i;
300     for (i = 0; streamEncryptionAllowedSystems[i]; ++i)
301         streamEncryptionAllowedSystemsVector.append(streamEncryptionAllowedSystems[i]);
302
303     const GValue* streamEncryptionEventsList = gst_structure_get_value(structure, "stream-encryption-events");
304     ASSERT(streamEncryptionEventsList && GST_VALUE_HOLDS_LIST(streamEncryptionEventsList));
305     unsigned streamEncryptionEventsListSize = gst_value_list_get_size(streamEncryptionEventsList);
306     Vector<GRefPtr<GstEvent>> streamEncryptionEventsVector;
307     for (i = 0; i < streamEncryptionEventsListSize; ++i)
308         streamEncryptionEventsVector.append(GRefPtr<GstEvent>(static_cast<GstEvent*>(g_value_get_boxed(gst_value_list_get_value(streamEncryptionEventsList, i)))));
309
310     return std::make_pair(streamEncryptionEventsVector, streamEncryptionAllowedSystemsVector);
311 }
312 #endif
313
314 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
315 {
316     UNUSED_PARAM(message);
317     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
318         return false;
319
320     const gchar* contextType;
321     gst_message_parse_context_type(message, &contextType);
322     GST_DEBUG("Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
323
324 #if USE(GSTREAMER_GL)
325     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType));
326     if (elementContext) {
327         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
328         return true;
329     }
330 #endif // USE(GSTREAMER_GL)
331
332 #if ENABLE(ENCRYPTED_MEDIA)
333     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
334         if (isMainThread()) {
335             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
336             ASSERT_NOT_REACHED();
337             return false;
338         }
339         GST_DEBUG("handling drm-preferred-decryption-system-id need context message");
340         LockHolder lock(m_protectionMutex);
341         std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> streamEncryptionInformation = extractEventsAndSystemsFromMessage(message);
342         GST_TRACE("found %" G_GSIZE_FORMAT " protection events", streamEncryptionInformation.first.size());
343         Vector<uint8_t> concatenatedInitDataChunks;
344         unsigned concatenatedInitDataChunksNumber = 0;
345         String eventKeySystemIdString;
346
347         for (auto& event : streamEncryptionInformation.first) {
348             GST_TRACE("handling protection event %u", GST_EVENT_SEQNUM(event.get()));
349             const char* eventKeySystemId = nullptr;
350             GstBuffer* data = nullptr;
351             gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
352
353             GstMapInfo mapInfo;
354             if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
355                 GST_WARNING("cannot map %s protection data", eventKeySystemId);
356                 break;
357             }
358             GST_TRACE("appending init data for %s of size %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
359             GST_MEMDUMP("init data", reinterpret_cast<const unsigned char *>(mapInfo.data), mapInfo.size);
360             concatenatedInitDataChunks.append(mapInfo.data, mapInfo.size);
361             ++concatenatedInitDataChunksNumber;
362             eventKeySystemIdString = eventKeySystemId;
363             if (streamEncryptionInformation.second.contains(eventKeySystemId)) {
364                 GST_TRACE("considering init data handled for %s", eventKeySystemId);
365                 m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
366             }
367             gst_buffer_unmap(data, &mapInfo);
368         }
369
370         if (!concatenatedInitDataChunksNumber)
371             return false;
372
373         if (concatenatedInitDataChunksNumber > 1)
374             eventKeySystemIdString = emptyString();
375
376         RunLoop::main().dispatch([weakThis = m_weakPtrFactory.createWeakPtr(*this), eventKeySystemIdString, initData = WTFMove(concatenatedInitDataChunks)] {
377             if (!weakThis)
378                 return;
379
380             GST_DEBUG("scheduling initializationDataEncountered event for %s with concatenated init datas size of %" G_GSIZE_FORMAT, eventKeySystemIdString.utf8().data(), initData.size());
381             GST_MEMDUMP("init datas", initData.data(), initData.size());
382             weakThis->m_player->initializationDataEncountered(ASCIILiteral("cenc"), ArrayBuffer::create(initData.data(), initData.size()));
383         });
384
385         GST_INFO("waiting for a CDM instance");
386         m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
387             return this->m_cdmInstance;
388         });
389         if (m_cdmInstance && !m_cdmInstance->keySystem().isEmpty()) {
390             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
391             GST_INFO("working with %s, continuing with %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
392
393             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
394             GstStructure* contextStructure = gst_context_writable_structure(context.get());
395             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
396             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
397         } else
398             GST_WARNING("no proper CDM instance attached");
399
400         return true;
401     }
402 #endif // ENABLE(ENCRYPTED_MEDIA)
403
404     return false;
405 }
406
407 #if USE(GSTREAMER_GL)
408 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType)
409 {
410     if (!ensureGstGLContext())
411         return nullptr;
412
413     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
414         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
415         gst_context_set_gl_display(displayContext, gstGLDisplay());
416         return displayContext;
417     }
418
419     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
420         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
421         GstStructure* structure = gst_context_writable_structure(appContext);
422 #if GST_CHECK_VERSION(1, 11, 0)
423         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr);
424 #else
425         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr);
426 #endif
427         return appContext;
428     }
429
430     return nullptr;
431 }
432
433 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
434 {
435     if (m_glContext)
436         return true;
437
438     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
439     if (!m_glDisplay) {
440 #if PLATFORM(X11)
441 #if USE(GLX)
442         if (is<PlatformDisplayX11>(sharedDisplay))
443             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
444 #elif USE(EGL)
445         if (is<PlatformDisplayX11>(sharedDisplay))
446             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
447 #endif
448 #endif
449
450 #if PLATFORM(WAYLAND)
451         if (is<PlatformDisplayWayland>(sharedDisplay))
452             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
453 #endif
454
455 #if PLATFORM(WPE)
456         ASSERT(is<PlatformDisplayWPE>(sharedDisplay));
457         m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWPE>(sharedDisplay).eglDisplay()));
458 #endif
459
460         ASSERT(m_glDisplay);
461     }
462
463     GLContext* webkitContext = sharedDisplay.sharingGLContext();
464     // EGL and GLX are mutually exclusive, no need for ifdefs here.
465     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
466
467 #if USE(OPENGL_ES)
468     GstGLAPI glAPI = GST_GL_API_GLES2;
469 #elif USE(OPENGL)
470     GstGLAPI glAPI = GST_GL_API_OPENGL;
471 #else
472     ASSERT_NOT_REACHED();
473 #endif
474
475     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
476     if (!contextHandle)
477         return false;
478
479     m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
480
481     return true;
482 }
483 #endif // USE(GSTREAMER_GL)
484
485 // Returns the size of the video
486 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
487 {
488     if (!hasVideo())
489         return FloatSize();
490
491     if (!m_videoSize.isEmpty())
492         return m_videoSize;
493
494     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
495     if (!GST_IS_SAMPLE(m_sample.get()))
496         return FloatSize();
497
498     GstCaps* caps = gst_sample_get_caps(m_sample.get());
499     if (!caps)
500         return FloatSize();
501
502
503     // TODO: handle possible clean aperture data. See
504     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
505     // TODO: handle possible transformation matrix. See
506     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
507
508     // Get the video PAR and original size, if this fails the
509     // video-sink has likely not yet negotiated its caps.
510     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
511     IntSize originalSize;
512     GstVideoFormat format;
513     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
514         return FloatSize();
515
516 #if USE(TEXTURE_MAPPER_GL)
517     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
518     if (m_renderingCanBeAccelerated) {
519         if (m_videoSourceOrientation.usesWidthAsHeight())
520             originalSize = originalSize.transposedSize();
521     }
522 #endif
523
524     GST_DEBUG("Original video size: %dx%d", originalSize.width(), originalSize.height());
525     GST_DEBUG("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
526
527     // Calculate DAR based on PAR and video size.
528     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
529     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
530
531     // Divide display width and height by their GCD to avoid possible overflows.
532     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
533     displayWidth /= displayAspectRatioGCD;
534     displayHeight /= displayAspectRatioGCD;
535
536     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
537     guint64 width = 0, height = 0;
538     if (!(originalSize.height() % displayHeight)) {
539         GST_DEBUG("Keeping video original height");
540         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
541         height = static_cast<guint64>(originalSize.height());
542     } else if (!(originalSize.width() % displayWidth)) {
543         GST_DEBUG("Keeping video original width");
544         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
545         width = static_cast<guint64>(originalSize.width());
546     } else {
547         GST_DEBUG("Approximating while keeping original video height");
548         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
549         height = static_cast<guint64>(originalSize.height());
550     }
551
552     GST_DEBUG("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
553     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
554     return m_videoSize;
555 }
556
557 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
558 {
559     if (!m_volumeElement)
560         return;
561
562     GST_DEBUG("Setting volume: %f", volume);
563     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
564 }
565
566 float MediaPlayerPrivateGStreamerBase::volume() const
567 {
568     if (!m_volumeElement)
569         return 0;
570
571     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
572 }
573
574
575 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
576 {
577     if (!m_player || !m_volumeElement)
578         return;
579     double volume;
580     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
581     // get_volume() can return values superior to 1.0 if the user
582     // applies software user gain via third party application (GNOME
583     // volume control for instance).
584     volume = CLAMP(volume, 0.0, 1.0);
585     m_player->volumeChanged(static_cast<float>(volume));
586 }
587
588 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
589 {
590     // This is called when m_volumeElement receives the notify::volume signal.
591     GST_DEBUG("Volume changed to: %f", player->volume());
592
593     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); });
594 }
595
596 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
597 {
598     return m_networkState;
599 }
600
601 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
602 {
603     return m_readyState;
604 }
605
606 void MediaPlayerPrivateGStreamerBase::sizeChanged()
607 {
608     notImplemented();
609 }
610
611 void MediaPlayerPrivateGStreamerBase::setMuted(bool mute)
612 {
613     if (!m_volumeElement)
614         return;
615
616     bool currentValue = muted();
617     if (currentValue == mute)
618         return;
619
620     GST_INFO("Set muted to %s", toString(mute).utf8().data());
621     g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
622 }
623
624 bool MediaPlayerPrivateGStreamerBase::muted() const
625 {
626     if (!m_volumeElement)
627         return false;
628
629     gboolean muted;
630     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
631     GST_INFO("Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
632     return muted;
633 }
634
635 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
636 {
637     if (!m_player || !m_volumeElement)
638         return;
639
640     gboolean muted;
641     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
642     m_player->muteChanged(static_cast<bool>(muted));
643 }
644
645 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
646 {
647     // This is called when m_volumeElement receives the notify::mute signal.
648     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); });
649 }
650
651 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
652 {
653     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
654 }
655
656 #if USE(TEXTURE_MAPPER_GL)
657 void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo)
658 {
659     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
660
661     GstVideoGLTextureUploadMeta* meta;
662     if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
663         if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
664             guint ids[4] = { texture.id(), 0, 0, 0 };
665
666             if (gst_video_gl_texture_upload_meta_upload(meta, ids))
667                 return;
668         }
669     }
670
671     // Right now the TextureMapper only supports chromas with one plane
672     ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
673
674     GstVideoFrame videoFrame;
675     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
676         return;
677
678     int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
679     const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
680     texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
681     gst_video_frame_unmap(&videoFrame);
682 }
683
684 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
685 {
686     return m_platformLayerProxy.copyRef();
687 }
688
689 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
690 {
691 }
692
693 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
694 {
695 #if !USE(GSTREAMER_GL)
696     class ConditionNotifier {
697     public:
698         ConditionNotifier(Lock& lock, Condition& condition)
699             : m_locker(lock), m_condition(condition)
700         {
701         }
702         ~ConditionNotifier()
703         {
704             m_condition.notifyOne();
705         }
706     private:
707         LockHolder m_locker;
708         Condition& m_condition;
709     };
710     ConditionNotifier notifier(m_drawMutex, m_drawCondition);
711 #endif
712
713     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
714     if (!GST_IS_SAMPLE(m_sample.get()))
715         return;
716
717     LockHolder holder(m_platformLayerProxy->lock());
718
719     if (!m_platformLayerProxy->isActive())
720         return;
721
722 #if USE(GSTREAMER_GL)
723     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation));
724     if (UNLIKELY(!frameHolder->isValid()))
725         return;
726
727     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
728     layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
729     m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer));
730 #else
731     GstVideoInfo videoInfo;
732     if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
733         return;
734
735     IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
736     std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GL_DONT_CARE);
737     if (UNLIKELY(!buffer)) {
738         TextureMapperContextAttributes contextAttributes;
739         contextAttributes.initialize();
740
741         auto texture = BitmapTextureGL::create(contextAttributes);
742         texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
743         buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
744     }
745     updateTexture(buffer->textureGL(), videoInfo);
746     buffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0));
747     m_platformLayerProxy->pushNextBuffer(WTFMove(buffer));
748 #endif // USE(GSTREAMER_GL)
749 }
750 #endif // USE(TEXTURE_MAPPER_GL)
751
752 void MediaPlayerPrivateGStreamerBase::repaint()
753 {
754     ASSERT(m_sample);
755     ASSERT(isMainThread());
756
757     m_player->repaint();
758
759     LockHolder lock(m_drawMutex);
760     m_drawCondition.notifyOne();
761 }
762
763 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
764 {
765     bool triggerResize;
766     {
767         WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
768         triggerResize = !m_sample;
769         m_sample = sample;
770     }
771
772     if (triggerResize) {
773         GST_DEBUG("First sample reached the sink, triggering video dimensions update");
774         m_notifier->notify(MainThreadNotification::SizeChanged, [this] { m_player->sizeChanged(); });
775     }
776
777     if (!m_renderingCanBeAccelerated) {
778         LockHolder locker(m_drawMutex);
779         if (m_drawCancelled)
780             return;
781         m_drawTimer.startOneShot(0_s);
782         m_drawCondition.wait(m_drawMutex);
783         return;
784     }
785
786 #if USE(TEXTURE_MAPPER_GL)
787 #if USE(GSTREAMER_GL)
788     pushTextureToCompositor();
789 #else
790     {
791         LockHolder lock(m_drawMutex);
792         if (m_drawCancelled)
793             return;
794         if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }))
795             return;
796         m_drawCondition.wait(m_drawMutex);
797     }
798 #endif
799 #endif // USE(TEXTURE_MAPPER_GL)
800 }
801
802 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
803 {
804     player->triggerRepaint(sample);
805 }
806
807 void MediaPlayerPrivateGStreamerBase::cancelRepaint()
808 {
809     LockHolder locker(m_drawMutex);
810
811     if (!m_renderingCanBeAccelerated) {
812         m_drawTimer.stop();
813     }
814
815     m_drawCancelled = true;
816     m_drawCondition.notifyOne();
817 }
818
819 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
820 {
821     player->cancelRepaint();
822 }
823
824 #if USE(GSTREAMER_GL)
825 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
826 {
827     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
828     player->triggerRepaint(sample.get());
829     return GST_FLOW_OK;
830 }
831
832 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
833 {
834     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
835     player->triggerRepaint(sample.get());
836     return GST_FLOW_OK;
837 }
838
839 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
840 {
841     GST_DEBUG("Flushing video sample");
842     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
843     m_sample.clear();
844
845     {
846         LockHolder locker(m_platformLayerProxy->lock());
847
848         if (m_platformLayerProxy->isActive())
849             m_platformLayerProxy->dropCurrentBufferWhilePreservingTexture();
850     }
851 }
852 #endif
853
854 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
855 {
856     m_size = size;
857 }
858
859 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
860 {
861     if (context.paintingDisabled())
862         return;
863
864     if (!m_player->visible())
865         return;
866
867     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
868     if (!GST_IS_SAMPLE(m_sample.get()))
869         return;
870
871     ImagePaintingOptions paintingOptions(CompositeCopy);
872     if (m_renderingCanBeAccelerated)
873         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
874
875     RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
876     if (!gstImage)
877         return;
878
879     if (Image* image = reinterpret_cast<Image*>(gstImage->image()))
880         context.drawImage(*image, rect, gstImage->rect(), paintingOptions);
881 }
882
883 #if USE(GSTREAMER_GL)
884 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
885 {
886     UNUSED_PARAM(context);
887
888     if (m_usingFallbackVideoSink)
889         return false;
890
891     if (premultiplyAlpha)
892         return false;
893
894     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
895
896     GstVideoInfo videoInfo;
897     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
898         return false;
899
900     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
901     GstVideoFrame videoFrame;
902     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
903         return false;
904
905     IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
906     if (m_videoSourceOrientation.usesWidthAsHeight())
907         size = size.transposedSize();
908     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
909
910     if (!m_videoTextureCopier)
911         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
912
913     bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
914
915     gst_video_frame_unmap(&videoFrame);
916
917     return copied;
918 }
919
920 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
921 {
922 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
923     if (m_usingFallbackVideoSink)
924         return nullptr;
925
926     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
927
928     GstVideoInfo videoInfo;
929     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
930         return nullptr;
931
932     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
933     GstVideoFrame videoFrame;
934     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
935         return nullptr;
936
937     IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
938     if (m_videoSourceOrientation.usesWidthAsHeight())
939         size = size.transposedSize();
940
941     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
942     context->makeContextCurrent();
943
944     if (!m_videoTextureCopier)
945         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
946
947     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
948     bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation);
949     gst_video_frame_unmap(&videoFrame);
950
951     if (!copied)
952         return nullptr;
953
954     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
955 #else
956     return nullptr;
957 #endif
958 }
959 #endif // USE(GSTREAMER_GL)
960
961 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
962 {
963     if (m_videoSourceOrientation == orientation)
964         return;
965
966     m_videoSourceOrientation = orientation;
967 }
968
969 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
970 {
971     return true;
972 }
973
974 PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const
975 {
976     return NoPlatformMedia;
977 }
978
979 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
980 {
981     if (m_readyState == MediaPlayer::HaveNothing)
982         return MediaPlayer::Unknown;
983
984     if (isLiveStream())
985         return MediaPlayer::LiveStream;
986
987     return MediaPlayer::Download;
988 }
989
990 #if USE(GSTREAMER_GL)
991 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
992 {
993     if (!webkitGstCheckVersion(1, 8, 0))
994         return nullptr;
995
996     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
997     if (!appsink)
998         return nullptr;
999
1000     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
1001     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
1002     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
1003
1004     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
1005     gst_pad_add_probe (pad.get(), GST_PAD_PROBE_TYPE_EVENT_FLUSH, [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
1006         if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_FLUSH_START)
1007             return GST_PAD_PROBE_OK;
1008
1009         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
1010         player->flushCurrentBuffer();
1011         return GST_PAD_PROBE_OK;
1012     }, this, nullptr);
1013
1014     return appsink;
1015 }
1016
1017 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1018 {
1019     if (!webkitGstCheckVersion(1, 8, 0))
1020         return nullptr;
1021
1022     gboolean result = TRUE;
1023     GstElement* videoSink = gst_bin_new(nullptr);
1024     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1025     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1026     GstElement* appsink = createGLAppSink();
1027
1028     if (!appsink || !upload || !colorconvert) {
1029         GST_WARNING("Failed to create GstGL elements");
1030         gst_object_unref(videoSink);
1031
1032         if (upload)
1033             gst_object_unref(upload);
1034         if (colorconvert)
1035             gst_object_unref(colorconvert);
1036         if (appsink)
1037             gst_object_unref(appsink);
1038
1039         return nullptr;
1040     }
1041
1042     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1043
1044     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
1045
1046     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1047     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1048
1049     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1050     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1051
1052     if (!result) {
1053         GST_WARNING("Failed to link GstGL elements");
1054         gst_object_unref(videoSink);
1055         videoSink = nullptr;
1056     }
1057     return videoSink;
1058 }
1059
1060 void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext()
1061 {
1062     if (!m_glDisplayElementContext)
1063         m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE));
1064
1065     if (m_glDisplayElementContext)
1066         gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get());
1067
1068     if (!m_glAppElementContext)
1069         m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context"));
1070
1071     if (m_glAppElementContext)
1072         gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get());
1073 }
1074 #endif // USE(GSTREAMER_GL)
1075
1076 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1077 {
1078     acceleratedRenderingStateChanged();
1079
1080 #if USE(GSTREAMER_GL)
1081     if (m_renderingCanBeAccelerated)
1082         m_videoSink = createVideoSinkGL();
1083 #endif
1084
1085     if (!m_videoSink) {
1086         m_usingFallbackVideoSink = true;
1087         m_videoSink = webkitVideoSinkNew();
1088         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1089         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
1090     }
1091
1092     GstElement* videoSink = nullptr;
1093     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1094     if (m_fpsSink) {
1095         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1096
1097         // Turn off text overlay unless logging is enabled.
1098 #if LOG_DISABLED
1099         g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1100 #else
1101         if (!isLogChannelEnabled("Media"))
1102             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1103 #endif // LOG_DISABLED
1104
1105         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1106             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1107             videoSink = m_fpsSink.get();
1108         } else
1109             m_fpsSink = nullptr;
1110     }
1111
1112     if (!m_fpsSink)
1113         videoSink = m_videoSink.get();
1114
1115     ASSERT(videoSink);
1116
1117     return videoSink;
1118 }
1119
1120 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1121 {
1122     ASSERT(!m_volumeElement);
1123     m_volumeElement = volume;
1124
1125     // We don't set the initial volume because we trust the sink to keep it for us. See
1126     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1127     if (!m_player->platformVolumeConfigurationRequired()) {
1128         GST_DEBUG("Setting stream volume to %f", m_player->volume());
1129         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
1130     } else
1131         GST_DEBUG("Not setting stream volume, trusting system one");
1132
1133     GST_DEBUG("Setting stream muted %s",  toString(m_player->muted()).utf8().data());
1134     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1135
1136     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1137     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1138 }
1139
1140 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1141 {
1142     guint64 decodedFrames = 0;
1143     if (m_fpsSink)
1144         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1145     return static_cast<unsigned>(decodedFrames);
1146 }
1147
1148 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1149 {
1150     guint64 framesDropped = 0;
1151     if (m_fpsSink)
1152         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1153     return static_cast<unsigned>(framesDropped);
1154 }
1155
1156 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1157 {
1158     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1159     gint64 position = 0;
1160
1161     if (audioSink() && gst_element_query(audioSink(), query))
1162         gst_query_parse_position(query, 0, &position);
1163
1164     gst_query_unref(query);
1165     return static_cast<unsigned>(position);
1166 }
1167
1168 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1169 {
1170     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1171     gint64 position = 0;
1172
1173     if (gst_element_query(m_videoSink.get(), query))
1174         gst_query_parse_position(query, 0, &position);
1175
1176     gst_query_unref(query);
1177     return static_cast<unsigned>(position);
1178 }
1179
1180 #if ENABLE(ENCRYPTED_MEDIA)
1181 void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance)
1182 {
1183     ASSERT(!m_cdmInstance);
1184     m_cdmInstance = &instance;
1185     GST_DEBUG("CDM instance %p set", m_cdmInstance.get());
1186     m_protectionCondition.notifyAll();
1187 }
1188
1189 void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance)
1190 {
1191 #ifdef NDEBUG
1192     UNUSED_PARAM(instance);
1193 #endif
1194     ASSERT(m_cdmInstance.get() == &instance);
1195     GST_DEBUG("detaching CDM instance %p", m_cdmInstance.get());
1196     m_cdmInstance = nullptr;
1197     m_protectionCondition.notifyAll();
1198 }
1199
1200 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance)
1201 {
1202     ASSERT(m_cdmInstance.get() == &instance);
1203     GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
1204     attemptToDecryptWithLocalInstance();
1205 }
1206
1207 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
1208 {
1209     // FIXME.
1210 }
1211
1212 void MediaPlayerPrivateGStreamerBase::dispatchDecryptionKey(GstBuffer* buffer)
1213 {
1214     bool eventHandled = gst_element_send_event(m_pipeline.get(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
1215         gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, buffer, nullptr)));
1216     m_needToResendCredentials = m_handledProtectionEvents.size() > 0;
1217     GST_TRACE("emitted decryption cipher key on pipeline, event handled %s, need to resend credentials %s", boolForPrinting(eventHandled), boolForPrinting(m_needToResendCredentials));
1218 }
1219
1220 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1221 {
1222     if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1223         GST_DEBUG("event %u already handled", GST_EVENT_SEQNUM(event));
1224         m_handledProtectionEvents.remove(GST_EVENT_SEQNUM(event));
1225         if (m_needToResendCredentials) {
1226             GST_DEBUG("resending credentials");
1227             attemptToDecryptWithLocalInstance();
1228         }
1229         return;
1230     }
1231
1232     const gchar* eventKeySystemId = nullptr;
1233     gst_event_parse_protection(event, &eventKeySystemId, nullptr, nullptr);
1234     GST_WARNING("FIXME: unhandled protection event for %s", eventKeySystemId);
1235     ASSERT_NOT_REACHED();
1236 }
1237 #endif
1238
1239 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1240 {
1241     bool result = false;
1242
1243 #if ENABLE(ENCRYPTED_MEDIA)
1244     result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
1245 #endif
1246
1247     GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
1248     return result;
1249 }
1250
1251 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1252 {
1253     UNUSED_PARAM(parameters);
1254     return result;
1255 }
1256
1257 }
1258
1259 #endif // USE(GSTREAMER)