Replace WTF::move with WTFMove
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010 Igalia S.L
7  *
8  * This library is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Library General Public
10  * License as published by the Free Software Foundation; either
11  * version 2 of the License, or (at your option) any later version.
12  *
13  * This library is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16  * Library General Public License for more details.
17  *
18  * You should have received a copy of the GNU Library General Public License
19  * aint with this library; see the file COPYING.LIB.  If not, write to
20  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
21  * Boston, MA 02110-1301, USA.
22  */
23
24 #include "config.h"
25 #include "MediaPlayerPrivateGStreamerBase.h"
26
27 #if ENABLE(VIDEO) && USE(GSTREAMER)
28
29 #include "ColorSpace.h"
30 #include "GStreamerUtilities.h"
31 #include "GraphicsContext.h"
32 #include "GraphicsTypes.h"
33 #include "ImageGStreamer.h"
34 #include "ImageOrientation.h"
35 #include "IntRect.h"
36 #include "MediaPlayer.h"
37 #include "NotImplemented.h"
38 #include "VideoSinkGStreamer.h"
39 #include "WebKitWebSourceGStreamer.h"
40 #include <gst/gst.h>
41 #include <wtf/glib/GMutexLocker.h>
42 #include <wtf/text/CString.h>
43
44 #include <gst/audio/streamvolume.h>
45 #include <gst/video/gstvideometa.h>
46
47 #if USE(GSTREAMER_GL)
48 #define GST_USE_UNSTABLE_API
49 #include <gst/gl/gstglmemory.h>
50 #undef GST_USE_UNSTABLE_API
51 #endif
52
53 #if GST_CHECK_VERSION(1, 1, 0) && USE(TEXTURE_MAPPER_GL)
54 #include "BitmapTextureGL.h"
55 #include "BitmapTexturePool.h"
56 #include "TextureMapperGL.h"
57 #endif
58 #if USE(COORDINATED_GRAPHICS_THREADED)
59 #include "TextureMapperPlatformLayerBuffer.h"
60 #endif
61
62 #if USE(GSTREAMER_GL)
63 #include "GLContext.h"
64
65 #define GST_USE_UNSTABLE_API
66 #include <gst/gl/gl.h>
67 #undef GST_USE_UNSTABLE_API
68
69 #if USE(GLX)
70 #include "GLContextGLX.h"
71 #include <gst/gl/x11/gstgldisplay_x11.h>
72 #elif USE(EGL)
73 #include "GLContextEGL.h"
74 #include <gst/gl/egl/gstgldisplay_egl.h>
75 #endif
76
77 #if PLATFORM(X11)
78 #include "PlatformDisplayX11.h"
79 #elif PLATFORM(WAYLAND)
80 #include "PlatformDisplayWayland.h"
81 #endif
82
83 // gstglapi.h may include eglplatform.h and it includes X.h, which
84 // defines None, breaking MediaPlayer::None enum
85 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
86 #undef None
87 #endif
88 #endif // USE(GSTREAMER_GL)
89
90 GST_DEBUG_CATEGORY(webkit_media_player_debug);
91 #define GST_CAT_DEFAULT webkit_media_player_debug
92
93 using namespace std;
94
95 namespace WebCore {
96
97 static int greatestCommonDivisor(int a, int b)
98 {
99     while (b) {
100         int temp = a;
101         a = b;
102         b = temp % b;
103     }
104
105     return ABS(a);
106 }
107
108 #if USE(COORDINATED_GRAPHICS_THREADED) && USE(GSTREAMER_GL)
109 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
110 public:
111     explicit GstVideoFrameHolder(GstSample* sample)
112     {
113         GstVideoInfo videoInfo;
114         gst_video_info_init(&videoInfo);
115         GstCaps* caps = gst_sample_get_caps(sample);
116         if (UNLIKELY(!gst_video_info_from_caps(&videoInfo, caps)))
117             return;
118
119         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
120         m_flags = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0;
121
122         GstBuffer* buffer = gst_sample_get_buffer(sample);
123         m_videoFrame = new GstVideoFrame();
124         if (UNLIKELY(!gst_video_frame_map(m_videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))) {
125             delete m_videoFrame;
126             return;
127         }
128
129         m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame->data[0]);
130         m_isValid = true;
131     }
132
133     virtual ~GstVideoFrameHolder()
134     {
135         if (UNLIKELY(!m_isValid))
136             return;
137
138         GstMapInfo* info = &m_videoFrame->map[0];
139         GstGLBaseBuffer* mem = (GstGLBaseBuffer*)info->memory;
140         GstGLWindow* gstWindow = gst_gl_context_get_window(mem->context);
141
142         gst_gl_window_send_message_async(gstWindow, (GstGLWindowCB)unmapVideoFrameCallback, m_videoFrame, (GDestroyNotify)freeVideoFrameCallback);
143     }
144
145     static void unmapVideoFrameCallback(GstVideoFrame* videoFrame)
146     {
147         gst_video_frame_unmap(videoFrame);
148     }
149
150     static void freeVideoFrameCallback(GstVideoFrame* videoFrame)
151     {
152         delete videoFrame;
153     }
154
155     const IntSize& size() const { return m_size; }
156     TextureMapperGL::Flags flags() const { return m_flags; }
157     GLuint textureID() const { return m_textureID; }
158     bool isValid() const { return m_isValid; }
159
160 private:
161     GstVideoFrame* m_videoFrame;
162     IntSize m_size;
163     TextureMapperGL::Flags m_flags;
164     GLuint m_textureID;
165     bool m_isValid { false };
166 };
167 #endif
168
169 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
170     : m_player(player)
171     , m_fpsSink(0)
172     , m_readyState(MediaPlayer::HaveNothing)
173     , m_networkState(MediaPlayer::Empty)
174 #if USE(GSTREAMER_GL)
175     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
176 #endif
177     , m_usingFallbackVideoSink(false)
178 {
179     g_mutex_init(&m_sampleMutex);
180 #if USE(COORDINATED_GRAPHICS_THREADED)
181     m_platformLayerProxy = adoptRef(new TextureMapperPlatformLayerProxy());
182 #endif
183 }
184
185 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
186 {
187     m_notifier.cancelPendingNotifications();
188
189     g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
190
191     g_mutex_clear(&m_sampleMutex);
192
193     m_player = nullptr;
194
195     g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
196
197 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
198     if (client())
199         client()->platformLayerWillBeDestroyed();
200 #endif
201 }
202
203 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
204 {
205     m_pipeline = pipeline;
206 }
207
208 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
209 {
210 #if USE(GSTREAMER_GL)
211     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
212         return false;
213
214     const gchar* contextType;
215     gst_message_parse_context_type(message, &contextType);
216
217     if (!ensureGstGLContext())
218         return false;
219
220     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
221         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
222         gst_context_set_gl_display(displayContext, m_glDisplay.get());
223         gst_element_set_context(GST_ELEMENT(message->src), displayContext);
224         return true;
225     }
226
227     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
228         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
229         GstStructure* structure = gst_context_writable_structure(appContext);
230         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, m_glContext.get(), nullptr);
231         gst_element_set_context(GST_ELEMENT(message->src), appContext);
232         return true;
233     }
234 #else
235     UNUSED_PARAM(message);
236 #endif // USE(GSTREAMER_GL)
237
238     return false;
239 }
240
241 #if USE(GSTREAMER_GL)
242 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
243 {
244     if (m_glContext)
245         return true;
246
247     if (!m_glDisplay) {
248         const auto& sharedDisplay = PlatformDisplay::sharedDisplay();
249 #if PLATFORM(X11)
250         m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
251 #elif PLATFORM(WAYLAND)
252         m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).native()));
253 #endif
254     }
255
256     GLContext* webkitContext = GLContext::sharingContext();
257     // EGL and GLX are mutually exclusive, no need for ifdefs here.
258     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
259
260 #if USE(OPENGL_ES_2)
261     GstGLAPI glAPI = GST_GL_API_GLES2;
262 #elif USE(OPENGL)
263     GstGLAPI glAPI = GST_GL_API_OPENGL;
264 #else
265     ASSERT_NOT_REACHED();
266 #endif
267
268     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
269     if (!contextHandle)
270         return false;
271
272     m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
273
274     return true;
275 }
276 #endif // USE(GSTREAMER_GL)
277
278 // Returns the size of the video
279 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
280 {
281     if (!hasVideo())
282         return FloatSize();
283
284     if (!m_videoSize.isEmpty())
285         return m_videoSize;
286
287     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
288     if (!GST_IS_SAMPLE(m_sample.get()))
289         return FloatSize();
290
291     GstCaps* caps = gst_sample_get_caps(m_sample.get());
292     if (!caps)
293         return FloatSize();
294
295
296     // TODO: handle possible clean aperture data. See
297     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
298     // TODO: handle possible transformation matrix. See
299     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
300
301     // Get the video PAR and original size, if this fails the
302     // video-sink has likely not yet negotiated its caps.
303     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
304     IntSize originalSize;
305     GstVideoFormat format;
306     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
307         return FloatSize();
308
309     LOG_MEDIA_MESSAGE("Original video size: %dx%d", originalSize.width(), originalSize.height());
310     LOG_MEDIA_MESSAGE("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
311
312     // Calculate DAR based on PAR and video size.
313     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
314     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
315
316     // Divide display width and height by their GCD to avoid possible overflows.
317     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
318     displayWidth /= displayAspectRatioGCD;
319     displayHeight /= displayAspectRatioGCD;
320
321     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
322     guint64 width = 0, height = 0;
323     if (!(originalSize.height() % displayHeight)) {
324         LOG_MEDIA_MESSAGE("Keeping video original height");
325         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
326         height = static_cast<guint64>(originalSize.height());
327     } else if (!(originalSize.width() % displayWidth)) {
328         LOG_MEDIA_MESSAGE("Keeping video original width");
329         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
330         width = static_cast<guint64>(originalSize.width());
331     } else {
332         LOG_MEDIA_MESSAGE("Approximating while keeping original video height");
333         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
334         height = static_cast<guint64>(originalSize.height());
335     }
336
337     LOG_MEDIA_MESSAGE("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
338     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
339     return m_videoSize;
340 }
341
342 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
343 {
344     if (!m_volumeElement)
345         return;
346
347     LOG_MEDIA_MESSAGE("Setting volume: %f", volume);
348     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC, static_cast<double>(volume));
349 }
350
351 float MediaPlayerPrivateGStreamerBase::volume() const
352 {
353     if (!m_volumeElement)
354         return 0;
355
356     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
357 }
358
359
360 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
361 {
362     if (!m_player || !m_volumeElement)
363         return;
364     double volume;
365     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_CUBIC);
366     // get_volume() can return values superior to 1.0 if the user
367     // applies software user gain via third party application (GNOME
368     // volume control for instance).
369     volume = CLAMP(volume, 0.0, 1.0);
370     m_player->volumeChanged(static_cast<float>(volume));
371 }
372
373 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
374 {
375     // This is called when m_volumeElement receives the notify::volume signal.
376     LOG_MEDIA_MESSAGE("Volume changed to: %f", player->volume());
377
378     player->m_notifier.notify(MainThreadNotification::VolumeChanged, [player] { player->notifyPlayerOfVolumeChange(); });
379 }
380
381 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
382 {
383     return m_networkState;
384 }
385
386 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
387 {
388     return m_readyState;
389 }
390
391 void MediaPlayerPrivateGStreamerBase::sizeChanged()
392 {
393     notImplemented();
394 }
395
396 void MediaPlayerPrivateGStreamerBase::setMuted(bool muted)
397 {
398     if (!m_volumeElement)
399         return;
400
401     g_object_set(m_volumeElement.get(), "mute", muted, NULL);
402 }
403
404 bool MediaPlayerPrivateGStreamerBase::muted() const
405 {
406     if (!m_volumeElement)
407         return false;
408
409     bool muted;
410     g_object_get(m_volumeElement.get(), "mute", &muted, NULL);
411     return muted;
412 }
413
414 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
415 {
416     if (!m_player || !m_volumeElement)
417         return;
418
419     gboolean muted;
420     g_object_get(m_volumeElement.get(), "mute", &muted, NULL);
421     m_player->muteChanged(static_cast<bool>(muted));
422 }
423
424 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
425 {
426     // This is called when m_volumeElement receives the notify::mute signal.
427     player->m_notifier.notify(MainThreadNotification::MuteChanged, [player] { player->notifyPlayerOfMute(); });
428 }
429
430 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS_MULTIPROCESS)
431 void MediaPlayerPrivateGStreamerBase::updateTexture(BitmapTextureGL& texture, GstVideoInfo& videoInfo)
432 {
433     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
434
435 #if GST_CHECK_VERSION(1, 1, 0)
436     GstVideoGLTextureUploadMeta* meta;
437     if ((meta = gst_buffer_get_video_gl_texture_upload_meta(buffer))) {
438         if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
439             guint ids[4] = { texture.id(), 0, 0, 0 };
440
441             if (gst_video_gl_texture_upload_meta_upload(meta, ids))
442                 return;
443         }
444     }
445 #endif
446
447     // Right now the TextureMapper only supports chromas with one plane
448     ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
449
450     GstVideoFrame videoFrame;
451     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, GST_MAP_READ))
452         return;
453
454     int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0);
455     const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0);
456     texture.updateContents(srcData, WebCore::IntRect(0, 0, GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo)), WebCore::IntPoint(0, 0), stride, BitmapTexture::UpdateCannotModifyOriginalImageData);
457     gst_video_frame_unmap(&videoFrame);
458 }
459 #endif
460
461 #if USE(COORDINATED_GRAPHICS_THREADED)
462 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
463 {
464 #if !USE(GSTREAMER_GL)
465     class ConditionNotifier {
466     public:
467         ConditionNotifier(Lock& lock, Condition& condition)
468             : m_locker(lock), m_condition(condition)
469         {
470         }
471         ~ConditionNotifier()
472         {
473             m_condition.notifyOne();
474         }
475     private:
476         LockHolder m_locker;
477         Condition& m_condition;
478     };
479     ConditionNotifier notifier(m_drawMutex, m_drawCondition);
480 #endif
481
482     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
483     if (!GST_IS_SAMPLE(m_sample.get()))
484         return;
485
486     LockHolder holder(m_platformLayerProxy->lock());
487
488     if (!m_platformLayerProxy->isActive())
489         return;
490
491 #if USE(GSTREAMER_GL)
492     std::unique_ptr<GstVideoFrameHolder> frameHolder = std::make_unique<GstVideoFrameHolder>(m_sample.get());
493     if (UNLIKELY(!frameHolder->isValid()))
494         return;
495
496     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = std::make_unique<TextureMapperPlatformLayerBuffer>(frameHolder->textureID(), frameHolder->size(), frameHolder->flags());
497     layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
498     m_platformLayerProxy->pushNextBuffer(WTFMove(layerBuffer));
499 #else
500     GstCaps* caps = gst_sample_get_caps(m_sample.get());
501     if (UNLIKELY(!caps))
502         return;
503
504     GstVideoInfo videoInfo;
505     gst_video_info_init(&videoInfo);
506     if (UNLIKELY(!gst_video_info_from_caps(&videoInfo, caps)))
507         return;
508
509     IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
510
511     std::unique_ptr<TextureMapperPlatformLayerBuffer> buffer = m_platformLayerProxy->getAvailableBuffer(size, GraphicsContext3D::DONT_CARE);
512     if (UNLIKELY(!buffer)) {
513         if (UNLIKELY(!m_context3D))
514             m_context3D = GraphicsContext3D::create(GraphicsContext3D::Attributes(), nullptr, GraphicsContext3D::RenderToCurrentGLContext);
515
516         RefPtr<BitmapTexture> texture = adoptRef(new BitmapTextureGL(m_context3D));
517         texture->reset(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
518         buffer = std::make_unique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
519     }
520     updateTexture(buffer->textureGL(), videoInfo);
521     m_platformLayerProxy->pushNextBuffer(WTFMove(buffer));
522 #endif
523 }
524 #endif
525
526 void MediaPlayerPrivateGStreamerBase::repaint()
527 {
528     ASSERT(m_sample);
529     ASSERT(isMainThread());
530
531 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
532     if (supportsAcceleratedRendering() && m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player) && client()) {
533         client()->setPlatformLayerNeedsDisplay();
534 #if USE(GSTREAMER_GL)
535         m_drawCondition.notifyOne();
536 #endif
537         return;
538     }
539 #endif
540
541     m_player->repaint();
542
543 #if USE(GSTREAMER_GL)
544     m_drawCondition.notifyOne();
545 #endif
546 }
547
548 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
549 {
550     {
551         WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
552         m_sample = sample;
553     }
554
555 #if USE(COORDINATED_GRAPHICS_THREADED)
556 #if USE(GSTREAMER_GL)
557     pushTextureToCompositor();
558 #else
559     {
560         LockHolder lock(m_drawMutex);
561         if (!m_platformLayerProxy->scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); }))
562             return;
563         m_drawCondition.wait(m_drawMutex);
564     }
565 #endif
566     return;
567 #else
568 #if USE(GSTREAMER_GL)
569     {
570         ASSERT(!isMainThread());
571
572         LockHolder locker(m_drawMutex);
573         m_drawTimer.startOneShot(0);
574         m_drawCondition.wait(m_drawMutex);
575     }
576 #else
577     repaint();
578 #endif
579 #endif
580 }
581
582 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
583 {
584     player->triggerRepaint(sample);
585 }
586
587 #if USE(GSTREAMER_GL)
588 gboolean MediaPlayerPrivateGStreamerBase::drawCallback(MediaPlayerPrivateGStreamerBase* player, GstGLContext*, GstSample* sample)
589 {
590     player->triggerRepaint(sample);
591     return TRUE;
592 }
593 #endif
594
595 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
596 {
597     m_size = size;
598 }
599
600 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
601 {
602 #if USE(COORDINATED_GRAPHICS_THREADED)
603     ASSERT_NOT_REACHED();
604     return;
605 #elif USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
606     if (client())
607         return;
608 #endif
609
610     if (context.paintingDisabled())
611         return;
612
613     if (!m_player->visible())
614         return;
615
616     WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
617     if (!GST_IS_SAMPLE(m_sample.get()))
618         return;
619
620     RefPtr<ImageGStreamer> gstImage = ImageGStreamer::createImage(m_sample.get());
621     if (!gstImage)
622         return;
623
624     if (Image* image = reinterpret_cast<Image*>(gstImage->image().get()))
625         context.drawImage(*image, rect, gstImage->rect(), CompositeCopy);
626 }
627
628 #if USE(TEXTURE_MAPPER_GL) && !USE(COORDINATED_GRAPHICS)
629 void MediaPlayerPrivateGStreamerBase::paintToTextureMapper(TextureMapper& textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity)
630 {
631     if (!m_player->visible())
632         return;
633
634     if (m_usingFallbackVideoSink) {
635         RefPtr<BitmapTexture> texture;
636         {
637             WTF::GMutexLocker<GMutex> lock(m_sampleMutex);
638
639             if (!m_sample)
640                 return;
641
642             GstCaps* caps = gst_sample_get_caps(m_sample.get());
643             if (UNLIKELY(!caps))
644                 return;
645
646             GstVideoInfo videoInfo;
647             gst_video_info_init(&videoInfo);
648             if (UNLIKELY(!gst_video_info_from_caps(&videoInfo, caps)))
649                 return;
650
651             IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
652             texture = textureMapper.acquireTextureFromPool(size, GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
653             updateTexture(static_cast<BitmapTextureGL&>(*texture), videoInfo);
654         }
655         textureMapper.drawTexture(*texture, targetRect, matrix, opacity);
656         return;
657     }
658
659 #if USE(GSTREAMER_GL)
660     if (!GST_IS_SAMPLE(m_sample.get()))
661         return;
662
663     GstCaps* caps = gst_sample_get_caps(m_sample.get());
664     if (!caps)
665         return;
666
667     GstVideoInfo videoInfo;
668     gst_video_info_init(&videoInfo);
669     if (!gst_video_info_from_caps(&videoInfo, caps))
670         return;
671
672     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
673     GstVideoFrame videoFrame;
674     if (!gst_video_frame_map(&videoFrame, &videoInfo, buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL)))
675         return;
676
677     unsigned textureID = *reinterpret_cast<unsigned*>(videoFrame.data[0]);
678     TextureMapperGL::Flags flags = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? TextureMapperGL::ShouldBlend : 0;
679
680     IntSize size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
681     TextureMapperGL& textureMapperGL = reinterpret_cast<TextureMapperGL&>(textureMapper);
682     textureMapperGL.drawTexture(textureID, flags, size, targetRect, matrix, opacity);
683     gst_video_frame_unmap(&videoFrame);
684 #endif
685 }
686 #endif
687
688 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
689 {
690     return true;
691 }
692
693 PlatformMedia MediaPlayerPrivateGStreamerBase::platformMedia() const
694 {
695     return NoPlatformMedia;
696 }
697
698 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
699 {
700     if (m_readyState == MediaPlayer::HaveNothing)
701         return MediaPlayer::Unknown;
702
703     if (isLiveStream())
704         return MediaPlayer::LiveStream;
705
706     return MediaPlayer::Download;
707 }
708
709 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
710 {
711     GstElement* videoSink = nullptr;
712 #if USE(GSTREAMER_GL)
713     if (webkitGstCheckVersion(1, 5, 0)) {
714         m_videoSink = gst_element_factory_make("glimagesink", nullptr);
715         if (m_videoSink) {
716             g_signal_connect_swapped(m_videoSink.get(), "client-draw", G_CALLBACK(drawCallback), this);
717             videoSink = m_videoSink.get();
718         }
719     }
720 #endif
721
722     if (!m_videoSink) {
723         m_usingFallbackVideoSink = true;
724         m_videoSink = webkitVideoSinkNew();
725         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
726     }
727
728     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
729     if (m_fpsSink) {
730         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
731
732         // Turn off text overlay unless logging is enabled.
733 #if LOG_DISABLED
734         g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
735 #else
736         if (!isLogChannelEnabled("Media"))
737             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
738 #endif // LOG_DISABLED
739
740         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
741             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
742             videoSink = m_fpsSink.get();
743         } else
744             m_fpsSink = nullptr;
745     }
746
747     if (!m_fpsSink)
748         videoSink = m_videoSink.get();
749
750     ASSERT(videoSink);
751
752     return videoSink;
753 }
754
755 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
756 {
757     ASSERT(!m_volumeElement);
758     m_volumeElement = volume;
759
760     // We don't set the initial volume because we trust the sink to keep it for us. See
761     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
762     if (!m_player->platformVolumeConfigurationRequired()) {
763         LOG_MEDIA_MESSAGE("Setting stream volume to %f", m_player->volume());
764         g_object_set(m_volumeElement.get(), "volume", m_player->volume(), NULL);
765     } else
766         LOG_MEDIA_MESSAGE("Not setting stream volume, trusting system one");
767
768     LOG_MEDIA_MESSAGE("Setting stream muted %d",  m_player->muted());
769     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), NULL);
770
771     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
772     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
773 }
774
775 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
776 {
777     guint64 decodedFrames = 0;
778     if (m_fpsSink)
779         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, NULL);
780     return static_cast<unsigned>(decodedFrames);
781 }
782
783 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
784 {
785     guint64 framesDropped = 0;
786     if (m_fpsSink)
787         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, NULL);
788     return static_cast<unsigned>(framesDropped);
789 }
790
791 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
792 {
793     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
794     gint64 position = 0;
795
796     if (audioSink() && gst_element_query(audioSink(), query))
797         gst_query_parse_position(query, 0, &position);
798
799     gst_query_unref(query);
800     return static_cast<unsigned>(position);
801 }
802
803 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
804 {
805     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
806     gint64 position = 0;
807
808     if (gst_element_query(m_videoSink.get(), query))
809         gst_query_parse_position(query, 0, &position);
810
811     gst_query_unref(query);
812     return static_cast<unsigned>(position);
813 }
814
815 }
816
817 #endif // USE(GSTREAMER)