be52152183fed6d4d1e776acebf0534fef6f881e
[WebKit.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerUtilities.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsTypes.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
35 #include "IntRect.h"
36 #include "Logging.h"
37 #include "MediaPlayer.h"
38 #include "NotImplemented.h"
39 #include "VideoSinkGStreamer.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <wtf/glib/GMutexLocker.h>
42 #include <wtf/glib/GUniquePtr.h>
43 #include <wtf/text/AtomicString.h>
44 #include <wtf/text/CString.h>
45 #include <wtf/MathExtras.h>
46
47 #include <gst/audio/streamvolume.h>
48 #include <gst/video/gstvideometa.h>
49
50 #if USE(GSTREAMER_GL)
51 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
52 #define GST_GL_CAPS_FORMAT "{ BGRx, BGRA }"
53 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureBGRAToRGBA
54 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertBGRAToRGBA
55 #else
56 #define GST_GL_CAPS_FORMAT "{ xRGB, ARGB }"
57 #define TEXTURE_MAPPER_COLOR_CONVERT_FLAG TextureMapperGL::ShouldConvertTextureARGBToRGBA
58 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::ConvertARGBToRGBA
59 #endif
60
61 #include <gst/app/gstappsink.h>
62 #define GST_USE_UNSTABLE_API
63 #include <gst/gl/gl.h>
64 #undef GST_USE_UNSTABLE_API
65
66 #include "GLContext.h"
67 #if USE(GLX)
68 #include "GLContextGLX.h"
69 #include <gst/gl/x11/gstgldisplay_x11.h>
70 #endif
71
72 #if USE(EGL)
73 #if !PLATFORM(WPE)
74 #include "GLContextEGL.h"
75 #endif
76 #include <gst/gl/egl/gstgldisplay_egl.h>
77 #endif
78
79 #if PLATFORM(X11)
80 #include "PlatformDisplayX11.h"
81 #endif
82
83 #if PLATFORM(WAYLAND)
84 #include "PlatformDisplayWayland.h"
85 #elif PLATFORM(WPE)
86 #include "PlatformDisplayWPE.h"
87 #endif
88
89 // gstglapi.h may include eglplatform.h and it includes X.h, which
90 // defines None, breaking MediaPlayer::None enum
91 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
92 #undef None
93 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
94 #include "VideoTextureCopierGStreamer.h"
95 #endif // USE(GSTREAMER_GL)
96
97 #if USE(TEXTURE_MAPPER_GL)
98 #include "BitmapTextureGL.h"
99 #include "BitmapTexturePool.h"
100 #include "TextureMapperGL.h"
101 #include "TextureMapperPlatformLayerBuffer.h"
102 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
103 #include <cairo-gl.h>
104 #endif
105 #endif // USE(TEXTURE_MAPPER_GL)
106
107 #if ENABLE(ENCRYPTED_MEDIA)
108 #include "SharedBuffer.h"
109 #include "WebKitClearKeyDecryptorGStreamer.h"
110 #endif
111
112 GST_DEBUG_CATEGORY(webkit_media_player_debug);
113 #define GST_CAT_DEFAULT webkit_media_player_debug
114
115 using namespace std;
116
117 namespace WebCore {
118
119 void registerWebKitGStreamerElements()
120 {
121 #if ENABLE(ENCRYPTED_MEDIA)
122     if (!webkitGstCheckVersion(1, 6, 1))
123         return;
124
125     GRefPtr<GstElementFactory> clearKeyDecryptorFactory = gst_element_factory_find("webkitclearkey");
126     if (!clearKeyDecryptorFactory)
127         gst_element_register(nullptr, "webkitclearkey", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_CK_DECRYPT);
128 #endif
129 }
130
131 bool MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements()
132 {
133     if (!initializeGStreamer())
134         return false;
135
136     registerWebKitGStreamerElements();
137
138     GRefPtr<GstElementFactory> srcFactory = adoptGRef(gst_element_factory_find("webkitwebsrc"));
139     if (!srcFactory) {
140         GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
141         gst_element_register(0, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
142     }
143
144     return true;
145 }
146
147 static int greatestCommonDivisor(int a, int b)
148 {
149     while (b) {
150         int temp = a;
151         a = b;
152         b = temp % b;
153     }
154
155     return ABS(a);
156 }
157
158 #if USE(TEXTURE_MAPPER_GL)
159 static inline TextureMapperGL::Flags texMapFlagFromOrientation(const ImageOrientation& orientation)
160 {
161     switch (orientation) {
162     case DefaultImageOrientation:
163         return 0;
164     case OriginRightTop:
165         return TextureMapperGL::ShouldRotateTexture90;
166     case OriginBottomRight:
167         return TextureMapperGL::ShouldRotateTexture180;
168     case OriginLeftBottom:
169         return TextureMapperGL::ShouldRotateTexture270;
170     default:
171         ASSERT_NOT_REACHED();
172     }
173
174     return 0;
175 }
176 #endif
177
178 #if USE(GSTREAMER_GL)
179 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
180 public:
181     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags)
182     {
183         GstVideoInfo videoInfo;
184         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
185             return;
186
187         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
188         m_flags = flags | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0) | TEXTURE_MAPPER_COLOR_CONVERT_FLAG;
189
190         GstBuffer* buffer = gst_sample_get_buffer(sample);
191         if (UNLIKELY(!gst_video_frame_map(&m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL))))
192             return;
193
194         m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
195         m_isValid = true;
196     }
197
198     virtual ~GstVideoFrameHolder()
199     {
200         if (UNLIKELY(!m_isValid))
201             return;
202
203         gst_video_frame_unmap(&m_videoFrame);
204     }
205
206     const IntSize& size() const { return m_size; }
207     TextureMapperGL::Flags flags() const { return m_flags; }
208     GLuint textureID() const { return m_textureID; }
209     bool isValid() const { return m_isValid; }
210
211 private:
212     GstVideoFrame m_videoFrame;
213     IntSize m_size;
214     TextureMapperGL::Flags m_flags;
215     GLuint m_textureID;
216     bool m_isValid { false };
217 };
218 #endif // USE(GSTREAMER_GL)
219
220 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
221     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
222     , m_player(player)
223     , m_fpsSink(nullptr)
224     , m_readyState(MediaPlayer::HaveNothing)
225     , m_networkState(MediaPlayer::Empty)
226     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
227 #if USE(TEXTURE_MAPPER_GL)
228     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
229 #endif
230 {
231     g_mutex_init(&m_sampleMutex);
232 }
233
234 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
235 {
236     m_notifier->invalidate();
237
238     cancelRepaint();
239
240     if (m_videoSink) {
241         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
242 #if USE(GSTREAMER_GL)
243         if (GST_IS_BIN(m_videoSink.get())) {
244             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
245             g_signal_handlers_disconnect_by_data(appsink.get(), this);
246         }
247 #endif
248     }
249
250     g_mutex_clear(&m_sampleMutex);
251
252     m_player = nullptr;
253
254     if (m_volumeElement)
255         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
256
257     if (m_pipeline)
258         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
259 }
260
261 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
262 {
263     m_pipeline = pipeline;
264 }
265
266 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
267 {
268     UNUSED_PARAM(message);
269     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
270         return false;
271
272     const gchar* contextType;
273     gst_message_parse_context_type(message, &contextType);
274
275 #if USE(GSTREAMER_GL)
276     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType, this));
277     if (elementContext) {
278         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
279         return true;
280     }
281 #endif // USE(GSTREAMER_GL)
282
283     return false;
284 }
285
286 #if USE(GSTREAMER_GL)
287 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const gchar* contextType, MediaPlayerPrivateGStreamerBase* player)
288 {
289     if (!player->ensureGstGLContext())
290         return nullptr;
291
292     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
293         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
294         gst_context_set_gl_display(displayContext, player->gstGLDisplay());
295         return displayContext;
296     }
297
298     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
299         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
300         GstStructure* structure = gst_context_writable_structure(appContext);
301 #if GST_CHECK_VERSION(1, 11, 0)
302         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, player->gstGLContext(), nullptr);
303 #else
304         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, player->gstGLContext(), nullptr);
305 #endif
306         return appContext;
307     }
308
309     return nullptr;
310 }
311
312 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
313 {
314     if (m_glContext)
315         return true;
316
317     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
318     if (!m_glDisplay) {
319 #if PLATFORM(X11)
320 #if USE(GLX)
321         if (is<PlatformDisplayX11>(sharedDisplay))
322             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
323 #elif USE(EGL)
324         if (is<PlatformDisplayX11>(sharedDisplay))
325             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
326 #endif
327 #endif
328
329 #if PLATFORM(WAYLAND)
330         if (is<PlatformDisplayWayland>(sharedDisplay))
331             m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
332 #endif
333
334 #if PLATFORM(WPE)
335         ASSERT(is<PlatformDisplayWPE>(sharedDisplay));
336         m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWPE>(sharedDisplay).eglDisplay()));
337 #endif
338
339         ASSERT(m_glDisplay);
340     }
341
342     GLContext* webkitContext = sharedDisplay.sharingGLContext();
343     // EGL and GLX are mutually exclusive, no need for ifdefs here.
344     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
345
346 #if USE(OPENGL_ES_2)
347     GstGLAPI glAPI = GST_GL_API_GLES2;
348 #elif USE(OPENGL)
349     GstGLAPI glAPI = GST_GL_API_OPENGL;
350 #else
351     ASSERT_NOT_REACHED();
352 #endif
353
354     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
355     if (!contextHandle)
356         return false;
357
358     m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
359
360     return true;
361 }
362 #endif // USE(GSTREAMER_GL)
363
364 // Returns the size of the video
365 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
366 {
367     if (!hasVideo())
368         return FloatSize();
369
370     if (!m_videoSize.isEmpty())
371         return m_videoSize;
372
373     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
374     if (!GST_IS_SAMPLE(m_sample.get()))
375         return FloatSize();
376
377     GstCaps* caps = gst_sample_get_caps(m_sample.get());
378     if (!caps)
379         return FloatSize();
380
381
382     // TODO: handle possible clean aperture data. See
383     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
384     // TODO: handle possible transformation matrix. See
385     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
386
387     // Get the video PAR and original size, if this fails the
388     // video-sink has likely not yet negotiated its caps.
389     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
390     IntSize originalSize;
391     GstVideoFormat format;
392     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
393         return FloatSize();
394
395 #if USE(TEXTURE_MAPPER_GL)
396     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
397     if (m_renderingCanBeAccelerated) {
398         if (m_videoSourceOrientation.usesWidthAsHeight())
399             originalSize = originalSize.transposedSize();
400     }
401 #endif
402
403     GST_DEBUG("Original video size: %dx%d", originalSize.width(), originalSize.height());
404     GST_DEBUG("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
405
406     // Calculate DAR based on PAR and video size.
407     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
408     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
409
410     // Divide display width and height by their GCD to avoid possible overflows.
411     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
412     displayWidth /= displayAspectRatioGCD;
413     displayHeight /= displayAspectRatioGCD;
414
415     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
416     guint64 width = 0, height = 0;
417     if (!(originalSize.height() % displayHeight)) {
418         GST_DEBUG("Keeping video original height");
419         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
420         height = static_cast<guint64>(originalSize.height());
421     } else if (!(originalSize.width() % displayWidth)) {
422         GST_DEBUG("Keeping video original width");
423         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
424         width = static_cast<guint64>(originalSize.width());
425     } else {
426         GST_DEBUG("Approximating while keeping original video height");
427         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
428         height = static_cast<guint64>(originalSize.height());
429     }
430
431     GST_DEBUG("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
432     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
433     return m_videoSize;
434 }
435
436 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
437 {
438     if (!m_volumeElement)
439         return;
440
441     GST_DEBUG("Setting volume: %f", volume);
442     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
443 }
444
445 float MediaPlayerPrivateGStreamerBase::volume() const
446 {
447     if (!m_volumeElement)
448         return 0;
449
450     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
451 }
452
453
454 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
455 {
456     if (!m_player || !m_volumeElement)
457         return;
458     double volume;
459     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
460     // get_volume() can return values superior to 1.0 if the user
461     // applies software user gain via third party application (GNOME
462     // volume control for instance).
463     volume = CLAMP(volume, 0.0, 1.0);
464     m_player->volumeChanged(static_cast<float>(volume));
465 }
466
467 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
468 {
469     // This is called when m_volumeElement receives the notify::volume signal.
470     GST_DEBUG("Volume changed to: %f", player->volume());
471
472     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); });
473 }
474
475 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
476 {
477     return m_networkState;
478 }
479
480 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
481 {
482     return m_readyState;
483 }
484
485 void MediaPlayerPrivateGStreamerBase::sizeChanged()
486 {
487     notImplemented();
488 }
489
490 void MediaPlayerPrivateGStreamerBase::setMuted(bool muted)
491 {
492     if (!m_volumeElement)
493         return;
494
495     g_object_set(m_volumeElement.get(), "mute", muted, nullptr);
496 }
497
498 bool MediaPlayerPrivateGStreamerBase::muted() const
499 {
500     if (!m_volumeElement)
501         return false;
502
503     gboolean muted;
504     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
505     return muted;
506 }
507
508 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
509 {
510     if (!m_player || !m_volumeElement)
511         return;
512
513     gboolean muted;
514     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
515     m_player->muteChanged(static_cast<bool>(muted));
516 }
517
518 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
519 {
520     // This is called when m_volumeElement receives the notify::mute signal.
521     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); });
522 }
523
524 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
525 {
526     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
527 }
528
529 #if USE(TEXTURE_MAPPER_GL)
530 void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo)
531 {
532     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
533
534     GstVideoGLTextureUploadMeta* meta;
535     if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
536         if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
537             guint ids[4] = { texture.id(), 0, 0, 0 };
538
539             if (gst_video_gl_texture_upload_meta_upload(meta, ids))
540                 return;
541         }
542     }
543
544     // Right now the TextureMapper only supports chromas with one plane
545     ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
546
547     GstVideoFrame videoFrame;
548     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
549         return;
550
551     int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
552     const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
553     texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
554     gst_video_frame_unmap(&videoFrame);
555 }
556 #endif
557
558 #if USE(TEXTURE_MAPPER_GL)
559 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
560 {
561 #if !USE(GSTREAMER_GL)
562     class ConditionNotifier {
563     public:
564         ConditionNotifier(Lock& lock, Condition& condition)
565             : m_locker(lock), m_condition(condition)
566         {
567         }
568         ~ConditionNotifier()
569         {
570             m_condition.notifyOne();
571         }
572     private:
573         LockHolder m_locker;
574         Condition& m_condition;
575     };
576     ConditionNotifier notifier(m_drawMutex, m_drawCondition);
577 #endif
578
579     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
580     if (!GST_IS_SAMPLE(m_sample.get()))
581         return;
582
583     LockHolder holder(m_platformLayerProxy->lock());
584
585     if (!m_platformLayerProxy->isActive())
586         return;
587
588 #if USE(GSTREAMER_GL)
589     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get(), texMapFlagFromOrientation(m_videoSourceOrientation));
590     if (UNLIKELY(!frameHolder->isValid()))
591         return;
592
593     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags(), GraphicsContext3D::RGBA);
594     layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
595     m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer));
596 #else
597     GstVideoInfo videoInfo;
598     if (UNLIKELY(!getSampleVideoInfo(m_sample.get(), videoInfo)))
599         return;
600
601     IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
602     std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GraphicsContext3D::DONT_CARE);
603     if (UNLIKELY(!buffer)) {
604         if (UNLIKELY(!m_context3D))
605             m_context3D = GraphicsContext3D::create(GraphicsContext3DAttributes(), nullptr, GraphicsContext3D::RenderToCurrentGLContext);
606
607         auto texture = BitmapTextureGL::create(*m_context3D);
608         texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
609         buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
610     }
611     updateTexture(buffer->textureGL(), videoInfo);
612     buffer->setExtraFlags(texMapFlagFromOrientation(m_videoSourceOrientation) | (GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0));
613     m_platformLayerProxy->pushNextBuffer(WTFMove(buffer));
614 #endif // USE(GSTREAMER_GL)
615 }
616 #endif // USE(TEXTURE_MAPPER_GL)
617
618 void MediaPlayerPrivateGStreamerBase::repaint()
619 {
620     ASSERT(m_sample);
621     ASSERT(isMainThread());
622
623     m_player->repaint();
624
625 #if USE(GSTREAMER_GL)
626     bool shouldNotifyDraw = !m_renderingCanBeAccelerated;
627 #else
628     bool shouldNotifyDraw = true;
629 #endif
630     if (shouldNotifyDraw) {
631         LockHolder lock(m_drawMutex);
632         m_drawCondition.notifyOne();
633     }
634 }
635
636 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
637 {
638     bool triggerResize;
639     {
640         WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
641         triggerResize = !m_sample;
642         m_sample = sample;
643     }
644
645     if (triggerResize) {
646         GST_DEBUG("First sample reached the sink, triggering video dimensions update");
647         m_notifier->notify(MainThreadNotification::SizeChanged, [this] { m_player->sizeChanged(); });
648     }
649
650     if (!m_renderingCanBeAccelerated) {
651         LockHolder locker(m_drawMutex);
652         m_drawTimer.startOneShot(0_s);
653         m_drawCondition.wait(m_drawMutex);
654         return;
655     }
656
657 #if USE(TEXTURE_MAPPER_GL)
658 #if USE(GSTREAMER_GL)
659     pushTextureToCompositor();
660 #else
661     {
662         LockHolder lock(m_drawMutex);
663         if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }))
664             return;
665         m_drawCondition.wait(m_drawMutex);
666     }
667 #endif
668 #endif // USE(TEXTURE_MAPPER_GL)
669 }
670
671 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
672 {
673     player->triggerRepaint(sample);
674 }
675
676 void MediaPlayerPrivateGStreamerBase::cancelRepaint()
677 {
678 #if USE(GSTREAMER_GL)
679     bool shouldCancelRepaint = !m_renderingCanBeAccelerated;
680 #else
681     bool shouldCancelRepaint = true;
682 #endif
683     if (shouldCancelRepaint) {
684         m_drawTimer.stop();
685         LockHolder locker(m_drawMutex);
686         m_drawCondition.notifyOne();
687     }
688 }
689
690 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
691 {
692     player->cancelRepaint();
693 }
694
695 #if USE(GSTREAMER_GL)
696 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
697 {
698     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
699     player->triggerRepaint(sample.get());
700     return GST_FLOW_OK;
701 }
702
703 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
704 {
705     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
706     player->triggerRepaint(sample.get());
707     return GST_FLOW_OK;
708 }
709
710 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
711 {
712     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
713     m_sample.clear();
714
715     {
716         LockHolder locker(m_platformLayerProxy->lock());
717
718         if (m_platformLayerProxy->isActive())
719             m_platformLayerProxy->dropCurrentBufferWhilePreservingTexture();
720     }
721 }
722 #endif
723
724 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
725 {
726     m_size = size;
727 }
728
729 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
730 {
731     if (context.paintingDisabled())
732         return;
733
734     if (!m_player->visible())
735         return;
736
737     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
738     if (!GST_IS_SAMPLE(m_sample.get()))
739         return;
740
741     ImagePaintingOptions paintingOptions(CompositeCopy);
742     if (m_renderingCanBeAccelerated)
743         paintingOptions.m_orientationDescription.setImageOrientationEnum(m_videoSourceOrientation);
744
745     RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
746     if (!gstImage)
747         return;
748
749     if (Image* image = reinterpret_cast<Image*>(gstImage->image()))
750         context.drawImage(*image, rect, gstImage->rect(), paintingOptions);
751 }
752
753 #if USE(GSTREAMER_GL)
754 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
755 {
756     UNUSED_PARAM(context);
757
758     if (m_usingFallbackVideoSink)
759         return false;
760
761     if (premultiplyAlpha)
762         return false;
763
764     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
765
766     GstVideoInfo videoInfo;
767     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
768         return false;
769
770     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
771     GstVideoFrame videoFrame;
772     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
773         return false;
774
775     IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
776     if (m_videoSourceOrientation.usesWidthAsHeight())
777         size = size.transposedSize();
778     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
779
780     if (!m_videoTextureCopier)
781         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
782
783     bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
784
785     gst_video_frame_unmap(&videoFrame);
786
787     return copied;
788 }
789
790 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
791 {
792 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
793     if (m_usingFallbackVideoSink)
794         return nullptr;
795
796     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
797
798     GstVideoInfo videoInfo;
799     if (!getSampleVideoInfo(m_sample.get(), videoInfo))
800         return nullptr;
801
802     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
803     GstVideoFrame videoFrame;
804     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
805         return nullptr;
806
807     IntSize size(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
808     if (m_videoSourceOrientation.usesWidthAsHeight())
809         size = size.transposedSize();
810
811     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
812     context->makeContextCurrent();
813
814     if (!m_videoTextureCopier)
815         m_videoTextureCopier = std::make_unique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
816
817     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
818     bool copied = m_videoTextureCopier->copyVideoTextureToPlatformTexture(textureID, size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation);
819     gst_video_frame_unmap(&videoFrame);
820
821     if (!copied)
822         return nullptr;
823
824     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
825 #else
826     return nullptr;
827 #endif
828 }
829 #endif // USE(GSTREAMER_GL)
830
831 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(const ImageOrientation& orientation)
832 {
833     if (m_videoSourceOrientation == orientation)
834         return;
835
836     m_videoSourceOrientation = orientation;
837 }
838
839 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
840 {
841     return true;
842 }
843
844 PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const
845 {
846     return NoPlatformMedia;
847 }
848
849 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
850 {
851     if (m_readyState == MediaPlayer::HaveNothing)
852         return MediaPlayer::Unknown;
853
854     if (isLiveStream())
855         return MediaPlayer::LiveStream;
856
857     return MediaPlayer::Download;
858 }
859
860 #if USE(GSTREAMER_GL)
861 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
862 {
863     if (!webkitGstCheckVersion(1, 8, 0))
864         return nullptr;
865
866     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
867     if (!appsink)
868         return nullptr;
869
870     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
871     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
872     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
873
874     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
875     gst_pad_add_probe (pad.get(), GST_PAD_PROBE_TYPE_EVENT_FLUSH, [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
876         if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_EVENT (info)) != GST_EVENT_FLUSH_START)
877             return GST_PAD_PROBE_OK;
878
879         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
880         player->flushCurrentBuffer();
881         return GST_PAD_PROBE_OK;
882     }, this, nullptr);
883
884     return appsink;
885 }
886
887 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
888 {
889     // FIXME: Currently it's not possible to get the video frames and caps using this approach until
890     // the pipeline gets into playing state. Due to this, trying to grab a frame and painting it by some
891     // other mean (canvas or webgl) before playing state can result in a crash.
892     // This is being handled in https://bugs.webkit.org/show_bug.cgi?id=159460.
893     if (!webkitGstCheckVersion(1, 8, 0))
894         return nullptr;
895
896     gboolean result = TRUE;
897     GstElement* videoSink = gst_bin_new(nullptr);
898     GstElement* upload = gst_element_factory_make("glupload", nullptr);
899     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
900     GstElement* appsink = createGLAppSink();
901
902     if (!appsink || !upload || !colorconvert) {
903         GST_WARNING("Failed to create GstGL elements");
904         gst_object_unref(videoSink);
905
906         if (upload)
907             gst_object_unref(upload);
908         if (colorconvert)
909             gst_object_unref(colorconvert);
910         if (appsink)
911             gst_object_unref(appsink);
912
913         return nullptr;
914     }
915
916     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
917
918     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw(" GST_CAPS_FEATURE_MEMORY_GL_MEMORY "), format = (string) " GST_GL_CAPS_FORMAT));
919
920     result &= gst_element_link_pads(upload, "src", colorconvert, "sink");
921     result &= gst_element_link_pads_filtered(colorconvert, "src", appsink, "sink", caps.get());
922
923     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
924     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
925
926     if (!result) {
927         GST_WARNING("Failed to link GstGL elements");
928         gst_object_unref(videoSink);
929         videoSink = nullptr;
930     }
931     return videoSink;
932 }
933 #endif // USE(GSTREAMER_GL)
934
935 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
936 {
937     acceleratedRenderingStateChanged();
938
939 #if USE(GSTREAMER_GL)
940     if (m_renderingCanBeAccelerated)
941         m_videoSink = createVideoSinkGL();
942 #endif
943
944     if (!m_videoSink) {
945         m_usingFallbackVideoSink = true;
946         m_videoSink = webkitVideoSinkNew();
947         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
948         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
949     }
950
951     GstElement* videoSink = nullptr;
952     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
953     if (m_fpsSink) {
954         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
955
956         // Turn off text overlay unless logging is enabled.
957 #if LOG_DISABLED
958         g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
959 #else
960         if (!isLogChannelEnabled("Media"))
961             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
962 #endif // LOG_DISABLED
963
964         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
965             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
966             videoSink = m_fpsSink.get();
967         } else
968             m_fpsSink = nullptr;
969     }
970
971     if (!m_fpsSink)
972         videoSink = m_videoSink.get();
973
974     ASSERT(videoSink);
975
976     return videoSink;
977 }
978
979 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
980 {
981     ASSERT(!m_volumeElement);
982     m_volumeElement = volume;
983
984     // We don't set the initial volume because we trust the sink to keep it for us. See
985     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
986     if (!m_player->platformVolumeConfigurationRequired()) {
987         GST_DEBUG("Setting stream volume to %f", m_player->volume());
988         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), nullptr);
989     } else
990         GST_DEBUG("Not setting stream volume, trusting system one");
991
992     GST_DEBUG("Setting stream muted %d",  m_player->muted());
993     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
994
995     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
996     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
997 }
998
999 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1000 {
1001     guint64 decodedFrames = 0;
1002     if (m_fpsSink)
1003         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1004     return static_cast<unsigned>(decodedFrames);
1005 }
1006
1007 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1008 {
1009     guint64 framesDropped = 0;
1010     if (m_fpsSink)
1011         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1012     return static_cast<unsigned>(framesDropped);
1013 }
1014
1015 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1016 {
1017     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1018     gint64 position = 0;
1019
1020     if (audioSink() && gst_element_query(audioSink(), query))
1021         gst_query_parse_position(query, 0, &position);
1022
1023     gst_query_unref(query);
1024     return static_cast<unsigned>(position);
1025 }
1026
1027 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1028 {
1029     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1030     gint64 position = 0;
1031
1032     if (gst_element_query(m_videoSink.get(), query))
1033         gst_query_parse_position(query, 0, &position);
1034
1035     gst_query_unref(query);
1036     return static_cast<unsigned>(position);
1037 }
1038
1039 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1040 {
1041     GST_INFO("Checking for KeySystem support with %s and type %s: false.", keySystem.utf8().data(), mimeType.utf8().data());
1042     return false;
1043 }
1044
1045 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1046 {
1047     UNUSED_PARAM(parameters);
1048     return result;
1049 }
1050
1051 }
1052
1053 #endif // USE(GSTREAMER)