1134406953cbd73a4ec78f147e80c556a13179ae
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerUtilities.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsTypes.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
35 #include "IntRect.h"
36 #include "MediaPlayer.h"
37 #include "NotImplemented.h"
38 #include "VideoSinkGStreamer.h"
39 #include "WebKitWebSourceGStreamer.h"
40 #include <wtf/glib/GMutexLocker.h>
41 #include <wtf/glib/GUniquePtr.h>
42 #include <wtf/text/AtomicString.h>
43 #include <wtf/text/CString.h>
44 #include <wtf/MathExtras.h>
45
46 #include <gst/audio/streamvolume.h>
47 #include <gst/video/gstvideometa.h>
48
49 #if USE(GSTREAMER_GL)
50 #include <gst/app/gstappsink.h>
51 #define GST_USE_UNSTABLE_API
52 #include <gst/gl/gl.h>
53 #undef GST_USE_UNSTABLE_API
54
55 #include "GLContext.h"
56 #if USE(GLX)
57 #include "GLContextGLX.h"
58 #include <gst/gl/x11/gstgldisplay_x11.h>
59 #endif
60
61 #if USE(EGL)
62 #include "GLContextEGL.h"
63 #include <gst/gl/egl/gstgldisplay_egl.h>
64 #endif
65
66 #if PLATFORM(X11)
67 #include "PlatformDisplayX11.h"
68 #endif
69
70 #if PLATFORM(WAYLAND)
71 #include "PlatformDisplayWayland.h"
72 #endif
73
74 // gstglapi.h may include eglplatform.h and it includes X.h, which
75 // defines None, breaking MediaPlayer::None enum
76 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
77 #undef None
78 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
79 #include "VideoTextureCopierGStreamer.h"
80 #endif // USE(GSTREAMER_GL)
81
82 #if USE(TEXTURE_MAPPER_GL)
83 #include "BitmapTextureGL.h"
84 #include "BitmapTexturePool.h"
85 #include "TextureMapperGL.h"
86 #endif
87 #if USE(COORDINATED_GRAPHICS_THREADED)
88 #include "TextureMapperPlatformLayerBuffer.h"
89 #endif
90
91 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
92 #include <cairo-gl.h>
93 #endif
94
95 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
96 #include "SharedBuffer.h"
97 #include "WebKitClearKeyDecryptorGStreamer.h"
98 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
99 #include "UUID.h"
100 #include <runtime/JSCInlines.h>
101 #include <runtime/TypedArrayInlines.h>
102 #include <runtime/Uint8Array.h>
103 #endif
104 #endif
105
106 GST_DEBUG_CATEGORY(webkit_media_player_debug);
107 #define GST_CAT_DEFAULT webkit_media_player_debug
108
109 using namespace std;
110
111 namespace WebCore {
112
113 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
114 static AtomicString keySystemIdToUuid(const AtomicString&);
115 #endif
116
117 void registerWebKitGStreamerElements()
118 {
119 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
120     if (!webkitGstCheckVersion(1, 6, 1))
121         return;
122
123     GRefPtr<GstElementFactory> clearKeyDecryptorFactory = gst_element_factory_find("webkitclearkey");
124     if (!clearKeyDecryptorFactory)
125         gst_element_register(nullptr, "webkitclearkey", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT);
126 #endif
127 }
128
129 static int greatestCommonDivisor(int a, int b)
130 {
131     while (b) {
132         int temp = a;
133         a = b;
134         b = temp % b;
135     }
136
137     return ABS(a);
138 }
139
140 #if USE(TEXTURE_MAPPER_GL)
141 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
142 {
143     switch (orientation) {
144     case DefaultImageOrientation:
145         return 0;
146     case OriginRightTop:
147         return TextureMapperGL::ShouldRotateTexture90;
148     case OriginBottomRight:
149         return TextureMapperGL::ShouldRotateTexture180;
150     case OriginLeftBottom:
151         return TextureMapperGL::ShouldRotateTexture270;
152     default:
153         ASSERT_NOT_REACHED();
154     }
155
156     return 0;
157 }
158 #endif
159
160 #if USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL)
161 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
162 public:
163     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags)
164     {
165         GstVideoInfo videoInfo;
166         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
167             return;
168
169         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
170         m_flags = flags | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
171
172         GstBuffer* buffer = gst_sample_get_buffer(sample);
173         if (UNLIKELY(!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))))
174             return;
175
176         m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
177         m_isValid = true;
178     }
179
180     virtual ~GstVideoFrameHolder()
181     {
182         if (UNLIKELY(!m_isValid))
183             return;
184
185         gst_video_frame_unmap(&m_videoFrame);
186     }
187
188     const IntSize& size() const { return m_size; }
189     TextureMapperGL::Flags flags() const { return m_flags; }
190     GLuint textureID() const { return m_textureID; }
191     bool isValid() const { return m_isValid; }
192
193 private:
194     GstVideoFrame m_videoFrame;
195     IntSize m_size;
196     TextureMapperGL::Flags m_flags;
197     GLuint m_textureID;
198     bool m_isValid { false };
199 };
200 #endif // USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL)
201
202 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
203     : m_player(player)
204     , m_fpsSink(0)
205     , m_readyState(MediaPlayer::HaveNothing)
206     , m_networkState(MediaPlayer::Empty)
207 #if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
208     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
209 #endif
210     , m_usingFallbackVideoSink(false)
211 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
212     , m_cdmSession(0)
213 #endif
214 {
215     g_mutex_init(&m_sampleMutex);
216 #if USE(COORDINATED_GRAPHICS_THREADED)
217     m_platformLayerProxy = adoptRef(new TextureMapperPlatformLayerProxy());
218 #endif
219 }
220
221 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
222 {
223 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
224     m_protectionCondition.notifyOne();
225 #endif
226     m_notifier.cancelPendingNotifications();
227
228 #if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
229     m_drawTimer.stop();
230     {
231         LockHolder locker(m_drawMutex);
232         m_drawCondition.notifyOne();
233     }
234 #endif
235
236     if (m_videoSink) {
237         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
238 #if USE(GSTREAMER_GL)
239         if (GST_IS_BIN(m_videoSink.get())) {
240             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
241             g_signal_handlers_disconnect_by_data(appsink.get(), this);
242         }
243 #endif
244     }
245
246     g_mutex_clear(&m_sampleMutex);
247
248     m_player = nullptr;
249
250     if (m_volumeElement)
251         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
252
253 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
254     if (client())
255         client()->platformLayerWillBeDestroyed();
256 #endif
257
258 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
259     m_cdmSession = nullptr;
260 #endif
261
262     if (m_pipeline)
263         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
264 }
265
266 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
267 {
268     m_pipeline = pipeline;
269 }
270
271 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
272 static std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> extractEventsAndSystemsFromMessage(GstMessage* message)
273 {
274     const GstStructure* structure = gst_message_get_structure(message);
275
276     const GValue* streamEncryptionAllowedSystemsValue = gst_structure_get_value(structure, "stream-encryption-systems");
277     ASSERT(streamEncryptionAllowedSystemsValue && G_VALUE_HOLDS(streamEncryptionAllowedSystemsValue, G_TYPE_STRV));
278     const char** streamEncryptionAllowedSystems = reinterpret_cast<const char**>(g_value_get_boxed(streamEncryptionAllowedSystemsValue));
279     ASSERT(streamEncryptionAllowedSystems);
280     Vector<String> streamEncryptionAllowedSystemsVector;
281     unsigned i;
282     for (i = 0; streamEncryptionAllowedSystems[i]; ++i)
283         streamEncryptionAllowedSystemsVector.append(streamEncryptionAllowedSystems[i]);
284
285     const GValue* streamEncryptionEventsList = gst_structure_get_value(structure, "stream-encryption-events");
286     ASSERT(streamEncryptionEventsList && GST_VALUE_HOLDS_LIST(streamEncryptionEventsList));
287     unsigned streamEncryptionEventsListSize = gst_value_list_get_size(streamEncryptionEventsList);
288     Vector<GRefPtr<GstEvent>> streamEncryptionEventsVector;
289     for (i = 0; i < streamEncryptionEventsListSize; ++i)
290         streamEncryptionEventsVector.append(GRefPtr<GstEvent>(static_cast<GstEvent*>(g_value_get_boxed(gst_value_list_get_value(streamEncryptionEventsList, i)))));
291
292     return std::make_pair(streamEncryptionEventsVector, streamEncryptionAllowedSystemsVector);
293 }
294 #endif
295
296 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
297 {
298     UNUSED_PARAM(message);
299     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
300         return false;
301
302     const gchar* contextType;
303     gst_message_parse_context_type(message, &contextType);
304
305 #if USE(GSTREAMER_GL)
306     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType, this));
307     if (elementContext) {
308         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
309         return true;
310     }
311 #endif // USE(GSTREAMER_GL)
312
313 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
314     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
315         if (isMainThread()) {
316             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
317             ASSERT_NOT_REACHED();
318             return false;
319         }
320         GST_DEBUG("handling drm-preferred-decryption-system-id need context message");
321         std::pair<Vector<GRefPtr<GstEvent>>, Vector<String>> streamEncryptionInformation = extractEventsAndSystemsFromMessage(message);
322         GST_TRACE("found %" G_GSIZE_FORMAT " protection events", streamEncryptionInformation.first.size());
323         Vector<uint8_t> concatenatedInitDataChunks;
324         unsigned concatenatedInitDataChunksNumber = 0;
325         String eventKeySystemIdString;
326         for (auto& event : streamEncryptionInformation.first) {
327             GST_TRACE("handling protection event %u", GST_EVENT_SEQNUM(event.get()));
328             const char* eventKeySystemId = nullptr;
329             GstBuffer* data = nullptr;
330             gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
331
332             // Here we receive the DRM init data from the pipeline: we will emit
333             // the needkey event with that data and the browser might create a
334             // CDMSession from this event handler. If such a session was created
335             // We will emit the message event from the session to provide the
336             // DRM challenge to the browser and wait for an update. If on the
337             // contrary no session was created we won't wait and let the pipeline
338             // error out by itself.
339             GstMapInfo mapInfo;
340             if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
341                 GST_WARNING("cannot map %s protection data", eventKeySystemId);
342                 break;
343             }
344
345             GST_TRACE("appending init data for %s of size %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
346             GST_MEMDUMP("init data", reinterpret_cast<const unsigned char *>(mapInfo.data), mapInfo.size);
347             concatenatedInitDataChunks.append(mapInfo.data, mapInfo.size);
348             ++concatenatedInitDataChunksNumber;
349             eventKeySystemIdString = eventKeySystemId;
350             if (streamEncryptionInformation.second.contains(eventKeySystemId)) {
351                 GST_TRACE("considering init data handled for %s", eventKeySystemId);
352                 m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
353             }
354             gst_buffer_unmap(data, &mapInfo);
355         }
356
357         if (!concatenatedInitDataChunksNumber)
358             return false;
359
360         if (concatenatedInitDataChunksNumber > 1)
361             eventKeySystemIdString = emptyString();
362
363         RunLoop::main().dispatch([this, eventKeySystemIdString, initData = WTFMove(concatenatedInitDataChunks)] {
364             GST_DEBUG("scheduling keyNeeded event for %s with concatenated init datas size of %" G_GSIZE_FORMAT, eventKeySystemIdString.utf8().data(), initData.size());
365             GST_MEMDUMP("init datas", initData.data(), initData.size());
366
367             // FIXME: Provide a somehow valid sessionId.
368             RefPtr<Uint8Array> initDataArray = Uint8Array::create(initData.data(), initData.size());
369             needKey(initDataArray);
370         });
371
372         GST_INFO("waiting for a key request to arrive");
373         LockHolder lock(m_protectionMutex);
374         m_protectionCondition.waitFor(m_protectionMutex, Seconds(4), [this] {
375             return !this->m_lastGenerateKeyRequestKeySystemUuid.isEmpty();
376         });
377         if (!m_lastGenerateKeyRequestKeySystemUuid.isEmpty()) {
378             GST_INFO("got a key request, continuing with %s on %s", m_lastGenerateKeyRequestKeySystemUuid.utf8().data(), GST_MESSAGE_SRC_NAME(message));
379
380             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
381             GstStructure* contextStructure = gst_context_writable_structure(context.get());
382             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, m_lastGenerateKeyRequestKeySystemUuid.utf8().data(), nullptr);
383             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
384         } else
385             GST_WARNING("did not get a proper key request");
386
387         return true;
388     }
389 #endif // ENABLE(LEGACY_ENCRYPTED_MEDIA)
390
391     return false;
392 }
393
394 #if USE(GSTREAMER_GL)
395 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase* player)
396 {
397     if (!player->ensureGstGLContext())
398         return nullptr;
399
400     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
401         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
402         gst_context_set_gl_display(displayContext, player->gstGLDisplay());
403         return displayContext;
404     }
405
406     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
407         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
408         GstStructure* structure = gst_context_writable_structure(appContext);
409 #if GST_CHECK_VERSION(1, 11, 0)
410         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, player->gstGLContext(), nullptr);
411 #else
412         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, player->gstGLContext(), nullptr);
413 #endif
414         return appContext;
415     }
416
417     return nullptr;
418 }
419
420 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
421 {
422     if (m_glContext)
423         return true;
424
425     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
426     if (!m_glDisplay) {
427 #if PLATFORM(X11)
428 #if USE(GLX)
429         if (is<PlatformDisplayX11>(sharedDisplay))
430             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
431 #elif USE(EGL)
432         if (is<PlatformDisplayX11>(sharedDisplay))
433             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
434 #endif
435 #endif
436
437 #if PLATFORM(WAYLAND)
438         if (is<PlatformDisplayWayland>(sharedDisplay))
439             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
440 #endif
441
442         ASSERT(m_glDisplay);
443     }
444
445     GLContext* webkitContext = sharedDisplay.sharingGLContext();
446     // EGL and GLX are mutually exclusive, no need for ifdefs here.
447     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
448
449 #if USE(OPENGL_ES_2)
450     GstGLAPI glAPI = GST_GL_API_GLES2;
451 #elif USE(OPENGL)
452     GstGLAPI glAPI = GST_GL_API_OPENGL;
453 #else
454     ASSERT_NOT_REACHED();
455 #endif
456
457     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
458     if (!contextHandle)
459         return false;
460
461     m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
462
463     return true;
464 }
465 #endif // USE(GSTREAMER_GL)
466
467 // Returns the size of the video
468 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
469 {
470     if (!hasVideo())
471         return FloatSize();
472
473     if (!m_videoSize.isEmpty())
474         return m_videoSize;
475
476     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
477     if (!GST_IS_SAMPLE(m_sample.get()))
478         return FloatSize();
479
480     GstCaps* caps = gst_sample_get_caps(m_sample.get());
481     if (!caps)
482         return FloatSize();
483
484
485     // TODO: handle possible clean aperture data. See
486     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
487     // TODO: handle possible transformation matrix. See
488     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
489
490     // Get the video PAR and original size, if this fails the
491     // video-sink has likely not yet negotiated its caps.
492     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
493     IntSize originalSize;
494     GstVideoFormat format;
495     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
496         return FloatSize();
497
498 #if USE(TEXTURE_MAPPER_GL)
499     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
500     if (m_renderingCanBeAccelerated) {
501         if (m_videoSourceOrientation.usesWidthAsHeight())
502             originalSize = originalSize.transposedSize();
503     }
504 #endif
505
506     GST_DEBUG("Original video size: %dx%d", originalSize.width(), originalSize.height());
507     GST_DEBUG("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
508
509     // Calculate DAR based on PAR and video size.
510     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
511     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
512
513     // Divide display width and height by their GCD to avoid possible overflows.
514     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
515     displayWidth /= displayAspectRatioGCD;
516     displayHeight /= displayAspectRatioGCD;
517
518     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
519     guint64 width = 0, height = 0;
520     if (!(originalSize.height() % displayHeight)) {
521         GST_DEBUG("Keeping video original height");
522         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
523         height = static_cast<guint64>(originalSize.height());
524     } else if (!(originalSize.width() % displayWidth)) {
525         GST_DEBUG("Keeping video original width");
526         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
527         width = static_cast<guint64>(originalSize.width());
528     } else {
529         GST_DEBUG("Approximating while keeping original video height");
530         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
531         height = static_cast<guint64>(originalSize.height());
532     }
533
534     GST_DEBUG("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
535     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
536     return m_videoSize;
537 }
538
539 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
540 {
541     if (!m_volumeElement)
542         return;
543
544     GST_DEBUG("Setting volume: %f", volume);
545     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
546 }
547
548 float MediaPlayerPrivateGStreamerBase::volume() const
549 {
550     if (!m_volumeElement)
551         return 0;
552
553     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
554 }
555
556
557 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
558 {
559     if (!m_player || !m_volumeElement)
560         return;
561     double volume;
562     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
563     // get_volume() can return values superior to 1.0 if the user
564     // applies software user gain via third party application (GNOME
565     // volume control for instance).
566     volume = CLAMP(volume, 0.0, 1.0);
567     m_player->volumeChanged(static_cast<float>(volume));
568 }
569
570 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
571 {
572     // This is called when m_volumeElement receives the notify::volume signal.
573     GST_DEBUG("Volume changed to: %f", player->volume());
574
575     player->m_notifier.notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); });
576 }
577
578 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
579 {
580     return m_networkState;
581 }
582
583 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
584 {
585     return m_readyState;
586 }
587
588 void MediaPlayerPrivateGStreamerBase::sizeChanged()
589 {
590     notImplemented();
591 }
592
593 void MediaPlayerPrivateGStreamerBase::setMuted(bool muted)
594 {
595     if (!m_volumeElement)
596         return;
597
598     g_object_set(m_volumeElement.get(), "mute", muted, NULL);
599 }
600
601 bool MediaPlayerPrivateGStreamerBase::muted() const
602 {
603     if (!m_volumeElement)
604         return false;
605
606     bool muted;
607     g_object_get(m_volumeElement.get(), "mute", &muted, NULL);
608     return muted;
609 }
610
611 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
612 {
613     if (!m_player || !m_volumeElement)
614         return;
615
616     gboolean muted;
617     g_object_get(m_volumeElement.get(), "mute", &muted, NULL);
618     m_player->muteChanged(static_cast<bool>(muted));
619 }
620
621 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
622 {
623     // This is called when m_volumeElement receives the notify::mute signal.
624     player->m_notifier.notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); });
625 }
626
627 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
628 {
629     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled() && m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player);
630 }
631
632 #if USE(TEXTURE_MAPPER_GL)
633 void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo)
634 {
635     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
636
637     GstVideoGLTextureUploadMeta* meta;
638     if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
639         if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
640             guint ids[4] = { texture.id(), 0, 0, 0 };
641
642             if (gst_video_gl_texture_upload_meta_upload(meta, ids))
643                 return;
644         }
645     }
646
647     // Right now the TextureMapper only supports chromas with one plane
648     ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
649
650     GstVideoFrame videoFrame;
651     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
652         return;
653
654     int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
655     const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
656     texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
657     gst_video_frame_unmap(&videoFrame);
658 }
659 #endif
660
661 #if USE(COORDINATED_GRAPHICS_THREADED)
662 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
663 {
664 #if !USE(GSTREAMER_GL)
665     class ConditionNotifier {
666     public:
667         ConditionNotifier(Lock& lock, Condition& condition)
668             : m_locker(lock), m_condition(condition)
669         {
670         }
671         ~ConditionNotifier()
672         {
673             m_condition.notifyOne();
674         }
675     private:
676         LockHolder m_locker;
677         Condition& m_condition;
678     };
679     ConditionNotifier notifier(m_drawMutex, m_drawCondition);
680 #endif
681
682     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
683     if (!GST_IS_SAMPLE(m_sample.get()))
684         return;
685
686     LockHolder holder(m_platformLayerProxy->lock());
687
688     if (!m_platformLayerProxy->isActive()) {
689         // Consume the buffer (so it gets eventually unreffed) but keep the rest of the info.
690         const GstStructure* info = gst_sample_get_info(m_sample.get());
691         GstStructure* infoCopy = nullptr;
692         if (info)
693             infoCopy = gst_structure_copy(info);
694         m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
695             gst_sample_get_segment(m_sample.get()), infoCopy));
696         return;
697     }
698
699 #if USE(GSTREAMER_GL)
700     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation));
701     if (UNLIKELY(!frameHolder->isValid()))
702         return;
703
704     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags());
705     layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
706     m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer));
707 #else
708     GstVideoInfo videoInfo;
709     if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
710         return;
711
712     IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
713     std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GraphicsContext3D::DONT_CARE);
714     if (UNLIKELY(!buffer)) {
715         if (UNLIKELY(!m_context3D))
716             m_context3D = GraphicsContext3D::create(GraphicsContext3DAttributes(), nullptr, GraphicsContext3D::RenderToCurrentGLContext);
717
718         auto texture = BitmapTextureGL::create(*m_context3D);
719         texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
720         buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
721     }
722     updateTexture(buffer->textureGL(), videoInfo);
723     buffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0));
724     m_platformLayerProxy->pushNextBuffer(WTFMove(buffer));
725 #endif
726 }
727 #endif
728
729 void MediaPlayerPrivateGStreamerBase::repaint()
730 {
731     ASSERT(m_sample);
732     ASSERT(isMainThread());
733
734 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
735     if (m_renderingCanBeAccelerated && client()) {
736         client()->setPlatformLayerNeedsDisplay();
737 #if USE(GSTREAMER_GL)
738         LockHolder lock(m_drawMutex);
739         m_drawCondition.notifyOne();
740 #endif
741         return;
742     }
743 #endif
744
745     m_player->repaint();
746
747 #if USE(GSTREAMER_GL) || USE(COORDINATED_GRAPHICS_THREADED)
748     LockHolder lock(m_drawMutex);
749     m_drawCondition.notifyOne();
750 #endif
751 }
752
753 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
754 {
755     bool triggerResize;
756     {
757         WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
758         triggerResize = !m_sample;
759         m_sample = sample;
760     }
761
762     if (triggerResize) {
763         GST_DEBUG("First sample reached the sink, triggering video dimensions update");
764         m_notifier.notify(MainThreadNotification::SizeChanged, [this] { m_player->sizeChanged(); });
765     }
766
767 #if USE(COORDINATED_GRAPHICS_THREADED)
768     if (!m_renderingCanBeAccelerated) {
769         LockHolder locker(m_drawMutex);
770         m_drawTimer.startOneShot(0);
771         m_drawCondition.wait(m_drawMutex);
772         return;
773     }
774
775 #if USE(GSTREAMER_GL)
776     pushTextureToCompositor();
777 #else
778     {
779         LockHolder lock(m_drawMutex);
780         if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }))
781             return;
782         m_drawCondition.wait(m_drawMutex);
783     }
784 #endif
785     return;
786 #else
787 #if USE(GSTREAMER_GL)
788     {
789         ASSERT(!isMainThread());
790
791         LockHolder locker(m_drawMutex);
792         m_drawTimer.startOneShot(0);
793         m_drawCondition.wait(m_drawMutex);
794     }
795 #else
796     repaint();
797 #endif
798 #endif
799 }
800
801 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
802 {
803     player->triggerRepaint(sample);
804 }
805
806 #if USE(GSTREAMER_GL)
807 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
808 {
809     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
810     player->triggerRepaint(sample.get());
811     return GST_FLOW_OK;
812 }
813
814 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
815 {
816     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
817     player->triggerRepaint(sample.get());
818     return GST_FLOW_OK;
819 }
820 #endif
821
822 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
823 {
824     m_size = size;
825 }
826
827 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
828 {
829     if (context.paintingDisabled())
830         return;
831
832     if (!m_player->visible())
833         return;
834
835     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
836     if (!GST_IS_SAMPLE(m_sample.get()))
837         return;
838
839     ImagePaintingOptions paintingOptions(CompositeCopy);
840     if (m_renderingCanBeAccelerated)
841         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
842
843     RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
844     if (!gstImage)
845         return;
846
847     if (Image* image = reinterpret_cast<Image*>(gstImage->image().get()))
848         context.drawImage(*image, rect, gstImage->rect(), paintingOptions);
849 }
850
851 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
852 void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper& textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity)
853 {
854     if (!m_player->visible())
855         return;
856
857     if (m_usingFallbackVideoSink) {
858         RefPtr<BitmapTexture> texture;
859         IntSize size;
860         TextureMapperGL::Flags flags;
861         {
862             WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
863
864             GstVideoInfo videoInfo;
865             if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
866                 return;
867
868             size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
869             flags = texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
870             texture = textureMapper.acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
871             updateTexture(static_cast<BitmapTextureGL&>(*texture), videoInfo);
872         }
873         TextureMapperGL& texmapGL = reinterpret_cast<TextureMapperGL&>(textureMapper);
874         BitmapTextureGL* textureGL = static_cast<BitmapTextureGL*>(texture.get());
875         texmapGL.drawTexture(textureGL->id(), flags, textureGL->size(), targetRect, matrix, opacity);
876         return;
877     }
878
879 #if USE(GSTREAMER_GL)
880     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
881
882     GstVideoInfo videoInfo;
883     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
884         return;
885
886     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
887     GstVideoFrame videoFrame;
888     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
889         return;
890
891     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
892     TextureMapperGL::Flags flags = texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0);
893
894     IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
895     TextureMapperGL& textureMapperGL = reinterpret_cast<TextureMapperGL&>(textureMapper);
896     textureMapperGL.drawTexture(textureID, flags, size, targetRect, matrix, opacity);
897     gst_video_frame_unmap(&videoFrame);
898 #endif
899 }
900 #endif
901
902 #if USE(GSTREAMER_GL)
903 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
904 // This should be called with the sample mutex locked.
905 GLContext* MediaPlayerPrivateGStreamerBase::prepareContextForCairoPaint(GstVideoInfo& videoInfo, IntSize& size, IntSize& rotatedSize)
906 {
907     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
908         return nullptr;
909
910     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
911     context->makeContextCurrent();
912
913     // Thread-awareness is a huge performance hit on non-Intel drivers.
914     cairo_gl_device_set_thread_aware(context->cairoDevice(), FALSE);
915
916     size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
917     rotatedSize = m_videoSourceOrientation.usesWidthAsHeight() ? size.transposedSize() : size;
918
919     return context;
920 }
921
922 // This should be called with the sample mutex locked.
923 bool MediaPlayerPrivateGStreamerBase::paintToCairoSurface(cairo_surface_t* outputSurface, cairo_device_t* device, GstVideoInfo& videoInfo, const IntSize& size, const IntSize& rotatedSize, bool flipY)
924 {
925     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
926     GstVideoFrame videoFrame;
927     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
928         return false;
929
930     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
931     RefPtr<cairo_surface_t> surface = adoptRef(cairo_gl_surface_create_for_texture(device, CAIRO_CONTENT_COLOR_ALPHA, textureID, size.width(), size.height()));
932     RefPtr<cairo_t> cr = adoptRef(cairo_create(outputSurface));
933
934     switch (m_videoSourceOrientation) {
935     case DefaultImageOrientation:
936         break;
937     case OriginRightTop:
938         cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
939         cairo_rotate(cr.get(), piOverTwoDouble);
940         cairo_translate(cr.get(), -rotatedSize.height() * 0.5, -rotatedSize.width() * 0.5);
941         break;
942     case OriginBottomRight:
943         cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
944         cairo_rotate(cr.get(), piDouble);
945         cairo_translate(cr.get(), -rotatedSize.width() * 0.5, -rotatedSize.height() * 0.5);
946         break;
947     case OriginLeftBottom:
948         cairo_translate(cr.get(), rotatedSize.width() * 0.5, rotatedSize.height() * 0.5);
949         cairo_rotate(cr.get(), 3 * piOverTwoDouble);
950         cairo_translate(cr.get(), -rotatedSize.height() * 0.5, -rotatedSize.width() * 0.5);
951         break;
952     default:
953         ASSERT_NOT_REACHED();
954         break;
955     }
956
957     if (flipY) {
958         cairo_scale(cr.get(), 1.0f, -1.0f);
959         cairo_translate(cr.get(), 0.0f, -size.height());
960     }
961
962     cairo_set_source_surface(cr.get(), surface.get(), 0, 0);
963     cairo_set_operator(cr.get(), CAIRO_OPERATOR_SOURCE);
964     cairo_paint(cr.get());
965
966     gst_video_frame_unmap(&videoFrame);
967
968     return true;
969 }
970 #endif // USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
971
972 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
973 {
974 #if USE(GSTREAMER_GL)
975     UNUSED_PARAM(context);
976
977     if (m_usingFallbackVideoSink)
978         return false;
979
980     if (premultiplyAlpha)
981         return false;
982
983     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
984
985     GstVideoInfo videoInfo;
986     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
987         return false;
988
989     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
990     GstVideoFrame videoFrame;
991     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
992         return false;
993
994     IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
995     if (m_videoSourceOrientation.usesWidthAsHeight())
996         size = size.transposedSize();
997     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
998
999     if (!m_videoTextureCopier)
1000         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>();
1001
1002     bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
1003
1004     gst_video_frame_unmap(&videoFrame);
1005
1006     return copied;
1007 #else
1008     return false;
1009 #endif
1010 }
1011
1012 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
1013 {
1014 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
1015     if (m_usingFallbackVideoSink)
1016         return nullptr;
1017
1018     GstVideoInfo videoInfo;
1019     IntSize size, rotatedSize;
1020     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
1021     GLContext* context = prepareContextForCairoPaint(videoInfo, size, rotatedSize);
1022     if (!context)
1023         return nullptr;
1024
1025     RefPtr<cairo_surface_t> rotatedSurface = adoptRef(cairo_gl_surface_create(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, rotatedSize.width(), rotatedSize.height()));
1026     if (!paintToCairoSurface(rotatedSurface.get(), context->cairoDevice(), videoInfo, size, rotatedSize, false))
1027         return nullptr;
1028
1029     return rotatedSurface;
1030 #else
1031     return nullptr;
1032 #endif
1033 }
1034 #endif
1035
1036 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
1037 {
1038     if (m_videoSourceOrientation == orientation)
1039         return;
1040
1041     m_videoSourceOrientation = orientation;
1042 }
1043
1044 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
1045 {
1046     return true;
1047 }
1048
1049 PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const
1050 {
1051     return NoPlatformMedia;
1052 }
1053
1054 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
1055 {
1056     if (m_readyState == MediaPlayer::HaveNothing)
1057         return MediaPlayer::Unknown;
1058
1059     if (isLiveStream())
1060         return MediaPlayer::LiveStream;
1061
1062     return MediaPlayer::Download;
1063 }
1064
1065 #if USE(GSTREAMER_GL)
1066 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
1067 {
1068     if (!webkitGstCheckVersion(1, 8, 0))
1069         return nullptr;
1070
1071     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
1072     if (!appsink)
1073         return nullptr;
1074
1075     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
1076     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
1077     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
1078
1079     return appsink;
1080 }
1081
1082 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1083 {
1084     // FIXME: Currently it's not possible to get the video frames and caps using this approach until
1085     // the pipeline gets into playing state. Due to this, trying to grab a frame and painting it by some
1086     // other mean (canvas or webgl) before playing state can result in a crash.
1087     // This is being handled in https://bugs.webkit.org/show_bug.cgi?id=159460.
1088     if (!webkitGstCheckVersion(1, 8, 0))
1089         return nullptr;
1090
1091     gboolean result = TRUE;
1092     GstElement* videoSink = gst_bin_new(nullptr);
1093     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1094     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1095     GstElement* appsink = createGLAppSink();
1096
1097     if (!appsink || !upload || !colorconvert) {
1098         GST_WARNING("Failed to create GstGL elements");
1099         gst_object_unref(videoSink);
1100
1101         if (upload)
1102             gst_object_unref(upload);
1103         if (colorconvert)
1104             gst_object_unref(colorconvert);
1105         if (appsink)
1106             gst_object_unref(appsink);
1107
1108         return nullptr;
1109     }
1110
1111     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1112
1113     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) { RGBA }"));
1114
1115     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
1116     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
1117
1118     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1119     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1120
1121     if (!result) {
1122         GST_WARNING("Failed to link GstGL elements");
1123         gst_object_unref(videoSink);
1124         videoSink = nullptr;
1125     }
1126     return videoSink;
1127 }
1128 #endif
1129
1130 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1131 {
1132     acceleratedRenderingStateChanged();
1133
1134 #if USE(GSTREAMER_GL)
1135     if (m_renderingCanBeAccelerated)
1136         m_videoSink = createVideoSinkGL();
1137 #endif
1138
1139     if (!m_videoSink) {
1140         m_usingFallbackVideoSink = true;
1141         m_videoSink = webkitVideoSinkNew();
1142         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1143     }
1144
1145     GstElement* videoSink = nullptr;
1146     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1147     if (m_fpsSink) {
1148         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1149
1150         // Turn off text overlay unless logging is enabled.
1151 #if LOG_DISABLED
1152         g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1153 #else
1154         if (!isLogChannelEnabled("Media"))
1155             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1156 #endif // LOG_DISABLED
1157
1158         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1159             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1160             videoSink = m_fpsSink.get();
1161         } else
1162             m_fpsSink = nullptr;
1163     }
1164
1165     if (!m_fpsSink)
1166         videoSink = m_videoSink.get();
1167
1168     ASSERT(videoSink);
1169
1170     return videoSink;
1171 }
1172
1173 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1174 {
1175     ASSERT(!m_volumeElement);
1176     m_volumeElement = volume;
1177
1178     // We don't set the initial volume because we trust the sink to keep it for us. See
1179     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1180     if (!m_player->platformVolumeConfigurationRequired()) {
1181         GST_DEBUG("Setting stream volume to %f", m_player->volume());
1182         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), NULL);
1183     } else
1184         GST_DEBUG("Not setting stream volume, trusting system one");
1185
1186     GST_DEBUG("Setting stream muted %d",  m_player->muted());
1187     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), NULL);
1188
1189     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1190     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1191 }
1192
1193 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1194 {
1195     guint64 decodedFrames = 0;
1196     if (m_fpsSink)
1197         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, NULL);
1198     return static_cast<unsigned>(decodedFrames);
1199 }
1200
1201 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1202 {
1203     guint64 framesDropped = 0;
1204     if (m_fpsSink)
1205         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, NULL);
1206     return static_cast<unsigned>(framesDropped);
1207 }
1208
1209 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1210 {
1211     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1212     gint64 position = 0;
1213
1214     if (audioSink() && gst_element_query(audioSink(), query))
1215         gst_query_parse_position(query, 0, &position);
1216
1217     gst_query_unref(query);
1218     return static_cast<unsigned>(position);
1219 }
1220
1221 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1222 {
1223     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1224     gint64 position = 0;
1225
1226     if (gst_element_query(m_videoSink.get(), query))
1227         gst_query_parse_position(query, 0, &position);
1228
1229     gst_query_unref(query);
1230     return static_cast<unsigned>(position);
1231 }
1232
1233 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1234 void MediaPlayerPrivateGStreamerBase::needKey(RefPtr<Uint8Array> initData)
1235 {
1236     if (!m_player->keyNeeded(initData.get()))
1237         GST_INFO("no event handler for key needed");
1238 }
1239
1240 void MediaPlayerPrivateGStreamerBase::setCDMSession(CDMSession* session)
1241 {
1242     GST_DEBUG("setting CDM session to %p", session);
1243     m_cdmSession = session;
1244 }
1245
1246 void MediaPlayerPrivateGStreamerBase::keyAdded()
1247 {
1248 }
1249
1250 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1251 {
1252     if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1253         GST_DEBUG("event %u already handled", GST_EVENT_SEQNUM(event));
1254         m_handledProtectionEvents.remove(GST_EVENT_SEQNUM(event));
1255         return;
1256     }
1257
1258     const gchar* eventKeySystemId = nullptr;
1259     GstBuffer* data = nullptr;
1260     gst_event_parse_protection(event, &eventKeySystemId, &data, nullptr);
1261
1262     GstMapInfo mapInfo;
1263     if (!gst_buffer_map(data, &mapInfo, GST_MAP_READ)) {
1264         GST_WARNING("cannot map %s protection data", eventKeySystemId);
1265         return;
1266     }
1267
1268     GST_DEBUG("scheduling keyNeeded event for %s with init data size of %" G_GSIZE_FORMAT, eventKeySystemId, mapInfo.size);
1269     GST_MEMDUMP("init datas", mapInfo.data, mapInfo.size);
1270     RefPtr<Uint8Array> initDataArray = Uint8Array::create(mapInfo.data, mapInfo.size);
1271     needKey(initDataArray);
1272     gst_buffer_unmap(data, &mapInfo);
1273 }
1274
1275 void MediaPlayerPrivateGStreamerBase::receivedGenerateKeyRequest(const String& keySystem)
1276 {
1277     GST_DEBUG("received generate key request for %s", keySystem.utf8().data());
1278     m_lastGenerateKeyRequestKeySystemUuid = keySystemIdToUuid(keySystem);
1279     m_protectionCondition.notifyOne();
1280 }
1281
1282 static AtomicString keySystemIdToUuid(const AtomicString& id)
1283 {
1284     if (equalIgnoringASCIICase(id, CLEAR_KEY_PROTECTION_SYSTEM_ID))
1285         return AtomicString(CLEAR_KEY_PROTECTION_SYSTEM_UUID);
1286
1287     return { };
1288 }
1289
1290 std::unique_ptr<CDMSession> MediaPlayerPrivateGStreamerBase::createSession(const String& keySystem, CDMSessionClient*)
1291 {
1292     GST_INFO("Requested CDMSession for KeySystem %s: Returning null.", keySystem.utf8().data());
1293     return nullptr;
1294 }
1295
1296 void MediaPlayerPrivateGStreamerBase::dispatchDecryptionKey(GstBuffer* buffer)
1297 {
1298     gst_element_send_event(m_pipeline.get(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB,
1299         gst_structure_new("drm-cipher", "key", GST_TYPE_BUFFER, buffer, nullptr)));
1300 }
1301 #endif
1302
1303 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1304 {
1305     GST_INFO("Checking for KeySystem support with %s and type %s: false.", keySystem.utf8().data(), mimeType.utf8().data());
1306     return false;
1307 }
1308
1309 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1310 {
1311     UNUSED_PARAM(parameters);
1312     return result;
1313 }
1314
1315 }
1316
1317 #endif // USE(GSTREAMER)