[GStreamer] Add support to copy YUV video textures into images
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamerBase.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2015, 2016 Igalia S.L
7  * Copyright (C) 2015, 2016 Metrological Group B.V.
8  *
9  * This library is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Library General Public
11  * License as published by the Free Software Foundation; either
12  * version 2 of the License, or (at your option) any later version.
13  *
14  * This library is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Library General Public License for more details.
18  *
19  * You should have received a copy of the GNU Library General Public License
20  * aint with this library; see the file COPYING.LIB.  If not, write to
21  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
22  * Boston, MA 02110-1301, USA.
23  */
24
25 #include "config.h"
26 #include "MediaPlayerPrivateGStreamerBase.h"
27
28 #if ENABLE(VIDEO) && USE(GSTREAMER)
29
30 #include "GStreamerCommon.h"
31 #include "GraphicsContext.h"
32 #include "ImageGStreamer.h"
33 #include "ImageOrientation.h"
34 #include "IntRect.h"
35 #include "Logging.h"
36 #include "MediaPlayer.h"
37 #include "NotImplemented.h"
38 #include "VideoSinkGStreamer.h"
39 #include "WebKitWebSourceGStreamer.h"
40 #include <wtf/glib/GUniquePtr.h>
41 #include <wtf/text/AtomString.h>
42 #include <wtf/text/CString.h>
43 #include <wtf/MathExtras.h>
44 #include <wtf/StringPrintStream.h>
45
46 #include <gst/audio/streamvolume.h>
47 #include <gst/video/gstvideometa.h>
48
49 #if ENABLE(ENCRYPTED_MEDIA)
50 #include "CDMInstance.h"
51 #include "GStreamerEMEUtilities.h"
52 #include "SharedBuffer.h"
53 #include "WebKitCommonEncryptionDecryptorGStreamer.h"
54 #endif
55
56 #if USE(GSTREAMER_GL)
57 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::NoConvert
58 #define GST_GL_CAPS_FORMAT "{ RGBx, RGBA, I420, Y444, YV12, Y41B, Y42B, NV12, NV21, VUYA }"
59
60 #include <gst/app/gstappsink.h>
61
62 #include "GLContext.h"
63 #if USE(GLX)
64 #include "GLContextGLX.h"
65 #include <gst/gl/x11/gstgldisplay_x11.h>
66 #endif
67
68 #if USE(EGL)
69 #include "GLContextEGL.h"
70 #include <gst/gl/egl/gstgldisplay_egl.h>
71 #endif
72
73 #if PLATFORM(X11)
74 #include "PlatformDisplayX11.h"
75 #endif
76
77 #if PLATFORM(WAYLAND)
78 #include "PlatformDisplayWayland.h"
79 #endif
80
81 #if USE(WPE_RENDERER)
82 #include "PlatformDisplayLibWPE.h"
83 #endif
84
85 // gstglapi.h may include eglplatform.h and it includes X.h, which
86 // defines None, breaking MediaPlayer::None enum
87 #if PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
88 #undef None
89 #endif // PLATFORM(X11) && GST_GL_HAVE_PLATFORM_EGL
90 #include "VideoTextureCopierGStreamer.h"
91 #endif // USE(GSTREAMER_GL)
92
93 #if USE(TEXTURE_MAPPER_GL)
94 #include "BitmapTextureGL.h"
95 #include "BitmapTexturePool.h"
96 #include "GraphicsContext3D.h"
97 #include "TextureMapperContextAttributes.h"
98 #include "TextureMapperPlatformLayerBuffer.h"
99 #include "TextureMapperPlatformLayerProxy.h"
100 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
101 #include <cairo-gl.h>
102 #endif
103 #endif // USE(TEXTURE_MAPPER_GL)
104
105 GST_DEBUG_CATEGORY(webkit_media_player_debug);
106 #define GST_CAT_DEFAULT webkit_media_player_debug
107
108
109 namespace WebCore {
110 using namespace std;
111
112 #if USE(GSTREAMER_HOLEPUNCH)
113 static const FloatSize s_holePunchDefaultFrameSize(1280, 720);
114 #endif
115
116 static int greatestCommonDivisor(int a, int b)
117 {
118     while (b) {
119         int temp = a;
120         a = b;
121         b = temp % b;
122     }
123
124     return ABS(a);
125 }
126
127 #if USE(TEXTURE_MAPPER_GL)
128 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
129 public:
130     explicit GstVideoFrameHolder(GstSample* sample, TextureMapperGL::Flags flags, bool gstGLEnabled)
131     {
132         RELEASE_ASSERT(GST_IS_SAMPLE(sample));
133
134         GstVideoInfo videoInfo;
135         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
136             return;
137
138         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
139         m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
140         m_buffer = gst_sample_get_buffer(sample);
141         if (UNLIKELY(!GST_IS_BUFFER(m_buffer)))
142             return;
143
144 #if USE(GSTREAMER_GL)
145         m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0);
146
147         if (gstGLEnabled) {
148             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL));
149             if (m_isMapped) {
150                 m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
151                 m_hasMappedTextures = true;
152             }
153         } else
154 #else
155         UNUSED_PARAM(flags);
156         UNUSED_PARAM(gstGLEnabled);
157 #endif // USE(GSTREAMER_GL)
158
159         {
160             m_textureID = 0;
161             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer, GST_MAP_READ);
162             if (m_isMapped) {
163                 // Right now the TextureMapper only supports chromas with one plane
164                 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
165             }
166         }
167     }
168
169     virtual ~GstVideoFrameHolder()
170     {
171         if (UNLIKELY(!m_isMapped))
172             return;
173
174         gst_video_frame_unmap(&m_videoFrame);
175     }
176
177     virtual void waitForCPUSync()
178     {
179         GstGLSyncMeta* meta = gst_buffer_get_gl_sync_meta(m_buffer);
180         if (meta) {
181             GstMemory* mem = gst_buffer_peek_memory(m_buffer, 0);
182             GstGLContext* context = ((GstGLBaseMemory*)mem)->context;
183             gst_gl_sync_meta_wait_cpu(meta, context);
184         }
185     }
186
187     const IntSize& size() const { return m_size; }
188     bool hasAlphaChannel() const { return m_hasAlphaChannel; }
189     TextureMapperGL::Flags flags() const { return m_flags; }
190     GLuint textureID() const { return m_textureID; }
191     bool hasMappedTextures() const { return m_hasMappedTextures; }
192     const GstVideoFrame& videoFrame() const { return m_videoFrame; }
193
194     void updateTexture(BitmapTextureGL& texture)
195     {
196         ASSERT(!m_textureID);
197         GstVideoGLTextureUploadMeta* meta;
198         if (m_buffer && (meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer))) {
199             if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
200                 guint ids[4] = { texture.id(), 0, 0, 0 };
201
202                 if (gst_video_gl_texture_upload_meta_upload(meta, ids))
203                     return;
204             }
205         }
206
207         if (!m_isMapped)
208             return;
209
210         int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
211         const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0);
212
213         if (!srcData)
214             return;
215
216         texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride);
217     }
218
219     std::unique_ptr<TextureMapperPlatformLayerBuffer> platformLayerBuffer()
220     {
221         if (!m_hasMappedTextures)
222             return nullptr;
223
224         using Buffer = TextureMapperPlatformLayerBuffer;
225
226         if ((GST_VIDEO_INFO_IS_RGB(&m_videoFrame.info) && GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info) == 1))
227             return makeUnique<Buffer>(Buffer::TextureVariant { Buffer::RGBTexture { *static_cast<GLuint*>(m_videoFrame.data[0]) } }, m_size, m_flags, GraphicsContext3D::RGBA);
228
229         if (GST_VIDEO_INFO_IS_YUV(&m_videoFrame.info)) {
230             if (GST_VIDEO_INFO_N_COMPONENTS(&m_videoFrame.info) < 3 || GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info) > 3)
231                 return nullptr;
232
233             unsigned numberOfPlanes = GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info);
234             std::array<GLuint, 3> planes;
235             std::array<unsigned, 3> yuvPlane;
236             std::array<unsigned, 3> yuvPlaneOffset;
237             for (unsigned i = 0; i < numberOfPlanes; ++i)
238                 planes[i] = *static_cast<GLuint*>(m_videoFrame.data[i]);
239             for (unsigned i = 0; i < 3; ++i) {
240                 yuvPlane[i] = GST_VIDEO_INFO_COMP_PLANE(&m_videoFrame.info, i);
241                 yuvPlaneOffset[i] = GST_VIDEO_INFO_COMP_POFFSET(&m_videoFrame.info, i);
242             }
243
244             std::array<GLfloat, 9> yuvToRgb;
245             if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(&m_videoFrame.info), GST_VIDEO_COLORIMETRY_BT709)) {
246                 yuvToRgb = {
247                     1.164f,  0.0f,    1.787f,
248                     1.164f, -0.213f, -0.531f,
249                     1.164f,  2.112f,  0.0f
250                 };
251             } else {
252                 // Default to bt601. This is the same behaviour as GStreamer's glcolorconvert element.
253                 yuvToRgb = {
254                     1.164f,  0.0f,    1.596f,
255                     1.164f, -0.391f, -0.813f,
256                     1.164f,  2.018f,  0.0f
257                 };
258             }
259
260             return makeUnique<Buffer>( Buffer::TextureVariant { Buffer::YUVTexture { numberOfPlanes, planes, yuvPlane, yuvPlaneOffset, yuvToRgb } }, m_size, m_flags, GraphicsContext3D::RGBA);
261         }
262
263         return nullptr;
264     }
265
266 private:
267     GstBuffer* m_buffer;
268     GstVideoFrame m_videoFrame { };
269     IntSize m_size;
270     bool m_hasAlphaChannel;
271     TextureMapperGL::Flags m_flags { };
272     GLuint m_textureID { 0 };
273     bool m_isMapped { false };
274     bool m_hasMappedTextures { false };
275 };
276 #endif
277
278 void MediaPlayerPrivateGStreamerBase::initializeDebugCategory()
279 {
280     GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
281 }
282
283 MediaPlayerPrivateGStreamerBase::MediaPlayerPrivateGStreamerBase(MediaPlayer* player)
284     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
285     , m_player(player)
286     , m_fpsSink(nullptr)
287     , m_readyState(MediaPlayer::HaveNothing)
288     , m_networkState(MediaPlayer::Empty)
289     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamerBase::repaint)
290 #if USE(TEXTURE_MAPPER_GL)
291 #if USE(NICOSIA)
292     , m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this)))
293 #else
294     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
295 #endif
296 #endif
297 {
298 }
299
300 MediaPlayerPrivateGStreamerBase::~MediaPlayerPrivateGStreamerBase()
301 {
302 #if USE(GSTREAMER_GL)
303     if (m_videoDecoderPlatform == WebKitGstVideoDecoderPlatform::Video4Linux)
304         flushCurrentBuffer();
305 #endif
306 #if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
307     downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
308 #endif
309
310     m_notifier->invalidate();
311
312     if (m_videoSink) {
313         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
314 #if USE(GSTREAMER_GL)
315         if (GST_IS_BIN(m_videoSink.get())) {
316             GRefPtr<GstElement> appsink = adoptGRef(gst_bin_get_by_name(GST_BIN_CAST(m_videoSink.get()), "webkit-gl-video-sink"));
317             g_signal_handlers_disconnect_by_data(appsink.get(), this);
318         }
319 #endif
320     }
321
322     if (m_volumeElement)
323         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
324
325     // This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
326     // waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
327     cancelRepaint(true);
328
329 #if ENABLE(ENCRYPTED_MEDIA)
330     m_cdmAttachmentSemaphore.signal();
331 #endif
332
333     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
334     // about handlers running in the GStreamer thread.
335     if (m_pipeline)
336         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
337
338     m_player = nullptr;
339 }
340
341 void MediaPlayerPrivateGStreamerBase::setPipeline(GstElement* pipeline)
342 {
343     m_pipeline = pipeline;
344
345     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
346     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
347         auto& player = *static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
348
349         if (player.handleSyncMessage(message)) {
350             gst_message_unref(message);
351             return GST_BUS_DROP;
352         }
353
354         return GST_BUS_PASS;
355     }, this, nullptr);
356 }
357
358 bool MediaPlayerPrivateGStreamerBase::handleSyncMessage(GstMessage* message)
359 {
360     UNUSED_PARAM(message);
361     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
362         return false;
363
364     const gchar* contextType;
365     gst_message_parse_context_type(message, &contextType);
366     GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
367
368     if (!g_strcmp0(contextType, WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME)) {
369         GRefPtr<GstContext> context = adoptGRef(gst_context_new(WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME, FALSE));
370         GstStructure* contextStructure = gst_context_writable_structure(context.get());
371
372         ASSERT(m_player);
373         gst_structure_set(contextStructure, "player", G_TYPE_POINTER, m_player, nullptr);
374         gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
375         return true;
376     }
377
378 #if USE(GSTREAMER_GL)
379     GRefPtr<GstContext> elementContext = adoptGRef(requestGLContext(contextType));
380     if (elementContext) {
381         gst_element_set_context(GST_ELEMENT(message->src), elementContext.get());
382         return true;
383     }
384 #endif // USE(GSTREAMER_GL)
385
386 #if ENABLE(ENCRYPTED_MEDIA)
387     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
388         if (isMainThread()) {
389             GST_ERROR("can't handle drm-preferred-decryption-system-id need context message in the main thread");
390             ASSERT_NOT_REACHED();
391             return false;
392         }
393         GST_DEBUG_OBJECT(pipeline(), "handling drm-preferred-decryption-system-id need context message");
394
395         InitData initData;
396         {
397             LockHolder lock(m_protectionMutex);
398             ProtectionSystemEvents protectionSystemEvents(message);
399             GST_TRACE("found %zu protection events, %zu decryptors available", protectionSystemEvents.events().size(), protectionSystemEvents.availableSystems().size());
400
401             for (auto& event : protectionSystemEvents.events()) {
402                 const char* eventKeySystemId = nullptr;
403                 GstBuffer* data = nullptr;
404                 gst_event_parse_protection(event.get(), &eventKeySystemId, &data, nullptr);
405
406                 initData.append({eventKeySystemId, data});
407                 m_handledProtectionEvents.add(GST_EVENT_SEQNUM(event.get()));
408             }
409         }
410         initializationDataEncountered(WTFMove(initData));
411
412         GST_INFO_OBJECT(pipeline(), "waiting for a CDM instance");
413         if (m_cdmAttachmentSemaphore.waitFor(4_s)
414             && m_notifier->isValid() // Check the player is not being destroyed.
415             && !m_cdmInstance->keySystem().isEmpty()) {
416             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
417             GST_INFO_OBJECT(pipeline(), "working with key system %s, continuing with key system %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
418
419             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
420             GstStructure* contextStructure = gst_context_writable_structure(context.get());
421             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
422             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
423         } else
424             GST_WARNING("CDM instance not initializaed");
425
426         return true;
427     }
428 #endif // ENABLE(ENCRYPTED_MEDIA)
429
430     return false;
431 }
432
433 #if USE(GSTREAMER_GL)
434 GstContext* MediaPlayerPrivateGStreamerBase::requestGLContext(const char* contextType)
435 {
436     if (!ensureGstGLContext())
437         return nullptr;
438
439     if (!g_strcmp0(contextType, GST_GL_DISPLAY_CONTEXT_TYPE)) {
440         GstContext* displayContext = gst_context_new(GST_GL_DISPLAY_CONTEXT_TYPE, TRUE);
441         gst_context_set_gl_display(displayContext, gstGLDisplay());
442         return displayContext;
443     }
444
445     if (!g_strcmp0(contextType, "gst.gl.app_context")) {
446         GstContext* appContext = gst_context_new("gst.gl.app_context", TRUE);
447         GstStructure* structure = gst_context_writable_structure(appContext);
448 #if GST_CHECK_VERSION(1, 12, 0)
449         gst_structure_set(structure, "context", GST_TYPE_GL_CONTEXT, gstGLContext(), nullptr);
450 #else
451         gst_structure_set(structure, "context", GST_GL_TYPE_CONTEXT, gstGLContext(), nullptr);
452 #endif
453         return appContext;
454     }
455
456     return nullptr;
457 }
458
459 bool MediaPlayerPrivateGStreamerBase::ensureGstGLContext()
460 {
461     if (m_glContext)
462         return true;
463
464     auto& sharedDisplay = PlatformDisplay::sharedDisplayForCompositing();
465
466     // The floating ref removal support was added in https://bugzilla.gnome.org/show_bug.cgi?id=743062.
467     bool shouldAdoptRef = webkitGstCheckVersion(1, 14, 0);
468     if (!m_glDisplay) {
469 #if PLATFORM(X11)
470 #if USE(GLX)
471         if (is<PlatformDisplayX11>(sharedDisplay)) {
472             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared GL display");
473             if (shouldAdoptRef)
474                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native())));
475             else
476                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_x11_new_with_display(downcast<PlatformDisplayX11>(sharedDisplay).native()));
477         }
478 #elif USE(EGL)
479         if (is<PlatformDisplayX11>(sharedDisplay)) {
480             GST_DEBUG_OBJECT(pipeline(), "Creating X11 shared EGL display");
481             if (shouldAdoptRef)
482                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay())));
483             else
484                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayX11>(sharedDisplay).eglDisplay()));
485         }
486 #endif
487 #endif
488
489 #if PLATFORM(WAYLAND)
490         if (is<PlatformDisplayWayland>(sharedDisplay)) {
491             GST_DEBUG_OBJECT(pipeline(), "Creating Wayland shared display");
492             if (shouldAdoptRef)
493                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay())));
494             else
495                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayWayland>(sharedDisplay).eglDisplay()));
496         }
497 #endif
498
499 #if USE(WPE_RENDERER)
500         if (is<PlatformDisplayLibWPE>(sharedDisplay)) {
501             GST_DEBUG_OBJECT(pipeline(), "Creating WPE shared EGL display");
502             if (shouldAdoptRef)
503                 m_glDisplay = adoptGRef(GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay())));
504             else
505                 m_glDisplay = GST_GL_DISPLAY(gst_gl_display_egl_new_with_egl_display(downcast<PlatformDisplayLibWPE>(sharedDisplay).eglDisplay()));
506         }
507 #endif
508
509         ASSERT(m_glDisplay);
510     }
511
512     GLContext* webkitContext = sharedDisplay.sharingGLContext();
513     // EGL and GLX are mutually exclusive, no need for ifdefs here.
514     GstGLPlatform glPlatform = webkitContext->isEGLContext() ? GST_GL_PLATFORM_EGL : GST_GL_PLATFORM_GLX;
515
516 #if USE(OPENGL_ES)
517     GstGLAPI glAPI = GST_GL_API_GLES2;
518 #elif USE(OPENGL)
519     GstGLAPI glAPI = GST_GL_API_OPENGL;
520 #else
521     ASSERT_NOT_REACHED();
522 #endif
523
524     PlatformGraphicsContext3D contextHandle = webkitContext->platformContext();
525     if (!contextHandle)
526         return false;
527
528     if (shouldAdoptRef)
529         m_glContext = adoptGRef(gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI));
530     else
531         m_glContext = gst_gl_context_new_wrapped(m_glDisplay.get(), reinterpret_cast<guintptr>(contextHandle), glPlatform, glAPI);
532
533     // Activate and fill the GStreamer wrapped context with the Webkit's shared one.
534     auto previousActiveContext = GLContext::current();
535     webkitContext->makeContextCurrent();
536     if (gst_gl_context_activate(m_glContext.get(), TRUE)) {
537         GUniqueOutPtr<GError> error;
538         if (!gst_gl_context_fill_info(m_glContext.get(), &error.outPtr()))
539             GST_WARNING("Failed to fill in GStreamer context: %s", error->message);
540         gst_gl_context_activate(m_glContext.get(), FALSE);
541     } else
542         GST_WARNING("Failed to activate GStreamer context %" GST_PTR_FORMAT, m_glContext.get());
543     if (previousActiveContext)
544         previousActiveContext->makeContextCurrent();
545
546     return true;
547 }
548 #endif // USE(GSTREAMER_GL)
549
550 // Returns the size of the video
551 FloatSize MediaPlayerPrivateGStreamerBase::naturalSize() const
552 {
553     ASSERT(isMainThread());
554 #if USE(GSTREAMER_HOLEPUNCH)
555     // When using the holepuch we may not be able to get the video frames size, so we can't use
556     // it. But we need to report some non empty naturalSize for the player's GraphicsLayer
557     // to be properly created.
558     return s_holePunchDefaultFrameSize;
559 #endif
560
561     if (!hasVideo())
562         return FloatSize();
563
564     if (!m_videoSize.isEmpty())
565         return m_videoSize;
566
567     auto sampleLocker = holdLock(m_sampleMutex);
568
569     if (!GST_IS_SAMPLE(m_sample.get()))
570         return FloatSize();
571
572     GstCaps* caps = gst_sample_get_caps(m_sample.get());
573     if (!caps)
574         return FloatSize();
575
576     m_videoSize = naturalSizeFromCaps(caps);
577     GST_DEBUG_OBJECT(pipeline(), "Natural size: %.0fx%.0f", m_videoSize.width(), m_videoSize.height());
578     return m_videoSize;
579 }
580
581 FloatSize MediaPlayerPrivateGStreamerBase::naturalSizeFromCaps(GstCaps* caps) const
582 {
583     ASSERT(caps);
584
585     // TODO: handle possible clean aperture data. See
586     // https://bugzilla.gnome.org/show_bug.cgi?id=596571
587     // TODO: handle possible transformation matrix. See
588     // https://bugzilla.gnome.org/show_bug.cgi?id=596326
589
590     // Get the video PAR and original size, if this fails the
591     // video-sink has likely not yet negotiated its caps.
592     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
593     IntSize originalSize;
594     GstVideoFormat format;
595     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
596         return FloatSize();
597
598 #if USE(TEXTURE_MAPPER_GL)
599     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
600     if (m_renderingCanBeAccelerated) {
601         if (m_videoSourceOrientation.usesWidthAsHeight())
602             originalSize = originalSize.transposedSize();
603     }
604 #endif
605
606     GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
607     GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
608
609     // Calculate DAR based on PAR and video size.
610     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
611     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
612
613     // Divide display width and height by their GCD to avoid possible overflows.
614     int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
615     displayWidth /= displayAspectRatioGCD;
616     displayHeight /= displayAspectRatioGCD;
617
618     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
619     guint64 width = 0, height = 0;
620     if (!(originalSize.height() % displayHeight)) {
621         GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
622         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
623         height = static_cast<guint64>(originalSize.height());
624     } else if (!(originalSize.width() % displayWidth)) {
625         GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
626         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
627         width = static_cast<guint64>(originalSize.width());
628     } else {
629         GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
630         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
631         height = static_cast<guint64>(originalSize.height());
632     }
633
634     return FloatSize(static_cast<int>(width), static_cast<int>(height));
635 }
636
637 void MediaPlayerPrivateGStreamerBase::setVolume(float volume)
638 {
639     if (!m_volumeElement)
640         return;
641
642     GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
643     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR, static_cast<double>(volume));
644 }
645
646 float MediaPlayerPrivateGStreamerBase::volume() const
647 {
648     if (!m_volumeElement)
649         return 0;
650
651     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR);
652 }
653
654
655 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfVolumeChange()
656 {
657     if (!m_player || !m_volumeElement)
658         return;
659     double volume;
660     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR);
661     // get_volume() can return values superior to 1.0 if the user
662     // applies software user gain via third party application (GNOME
663     // volume control for instance).
664     volume = CLAMP(volume, 0.0, 1.0);
665     m_player->volumeChanged(static_cast<float>(volume));
666 }
667
668 void MediaPlayerPrivateGStreamerBase::volumeChangedCallback(MediaPlayerPrivateGStreamerBase* player)
669 {
670     // This is called when m_volumeElement receives the notify::volume signal.
671     GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
672
673     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] {
674         player->notifyPlayerOfVolumeChange();
675     });
676 }
677
678 MediaPlayer::NetworkState MediaPlayerPrivateGStreamerBase::networkState() const
679 {
680     return m_networkState;
681 }
682
683 MediaPlayer::ReadyState MediaPlayerPrivateGStreamerBase::readyState() const
684 {
685     return m_readyState;
686 }
687
688 void MediaPlayerPrivateGStreamerBase::setMuted(bool mute)
689 {
690     if (!m_volumeElement)
691         return;
692
693     bool currentValue = muted();
694     if (currentValue == mute)
695         return;
696
697     GST_INFO_OBJECT(pipeline(), "Set muted to %s", toString(mute).utf8().data());
698     g_object_set(m_volumeElement.get(), "mute", mute, nullptr);
699 }
700
701 bool MediaPlayerPrivateGStreamerBase::muted() const
702 {
703     if (!m_volumeElement)
704         return false;
705
706     gboolean muted;
707     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
708     GST_INFO_OBJECT(pipeline(), "Player is muted: %s", toString(static_cast<bool>(muted)).utf8().data());
709     return muted;
710 }
711
712 void MediaPlayerPrivateGStreamerBase::notifyPlayerOfMute()
713 {
714     if (!m_player || !m_volumeElement)
715         return;
716
717     gboolean muted;
718     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
719     m_player->muteChanged(static_cast<bool>(muted));
720 }
721
722 void MediaPlayerPrivateGStreamerBase::muteChangedCallback(MediaPlayerPrivateGStreamerBase* player)
723 {
724     // This is called when m_volumeElement receives the notify::mute signal.
725     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] {
726         player->notifyPlayerOfMute();
727     });
728 }
729
730 void MediaPlayerPrivateGStreamerBase::acceleratedRenderingStateChanged()
731 {
732     m_renderingCanBeAccelerated = m_player && m_player->client().mediaPlayerAcceleratedCompositingEnabled();
733 }
734
735 #if USE(TEXTURE_MAPPER_GL)
736 PlatformLayer* MediaPlayerPrivateGStreamerBase::platformLayer() const
737 {
738 #if USE(NICOSIA)
739     return m_nicosiaLayer.ptr();
740 #else
741     return const_cast<MediaPlayerPrivateGStreamerBase*>(this);
742 #endif
743 }
744
745 #if USE(NICOSIA)
746 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
747 {
748 #if USE(GSTREAMER_HOLEPUNCH)
749     pushNextHolePunchBuffer();
750 #endif
751 }
752 #else
753 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamerBase::proxy() const
754 {
755     return m_platformLayerProxy.copyRef();
756 }
757
758 void MediaPlayerPrivateGStreamerBase::swapBuffersIfNeeded()
759 {
760 #if USE(GSTREAMER_HOLEPUNCH)
761     pushNextHolePunchBuffer();
762 #endif
763 }
764 #endif
765
766 void MediaPlayerPrivateGStreamerBase::pushTextureToCompositor()
767 {
768     auto sampleLocker = holdLock(m_sampleMutex);
769     if (!GST_IS_SAMPLE(m_sample.get()))
770         return;
771
772     auto proxyOperation =
773         [this](TextureMapperPlatformLayerProxy& proxy)
774         {
775             LockHolder holder(proxy.lock());
776
777             if (!proxy.isActive())
778                 return;
779
780             std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, !m_usingFallbackVideoSink);
781
782             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
783             if (frameHolder->hasMappedTextures()) {
784                 layerBuffer = frameHolder->platformLayerBuffer();
785                 if (!layerBuffer)
786                     return;
787                 layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
788             } else {
789                 layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE);
790                 if (UNLIKELY(!layerBuffer)) {
791                     auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
792                     texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
793                     layerBuffer = makeUnique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
794                 }
795                 frameHolder->updateTexture(layerBuffer->textureGL());
796                 layerBuffer->setExtraFlags(m_textureMapperFlags | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0));
797             }
798             proxy.pushNextBuffer(WTFMove(layerBuffer));
799         };
800
801 #if USE(NICOSIA)
802     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
803 #else
804     proxyOperation(*m_platformLayerProxy);
805 #endif
806 }
807 #endif // USE(TEXTURE_MAPPER_GL)
808
809 void MediaPlayerPrivateGStreamerBase::repaint()
810 {
811     ASSERT(m_sample);
812     ASSERT(isMainThread());
813
814     m_player->repaint();
815
816     LockHolder lock(m_drawMutex);
817     m_drawCondition.notifyOne();
818 }
819
820 bool MediaPlayerPrivateGStreamerBase::doSamplesHaveDifferentNaturalSizes(GstSample* sampleA, GstSample* sampleB) const
821 {
822     ASSERT(sampleA);
823     ASSERT(sampleB);
824
825     GstCaps* capsA = gst_sample_get_caps(sampleA);
826     GstCaps* capsB = gst_sample_get_caps(sampleB);
827
828     if (LIKELY(capsA == capsB))
829         return false;
830
831     return naturalSizeFromCaps(capsA) != naturalSizeFromCaps(capsB);
832 }
833
834 void MediaPlayerPrivateGStreamerBase::triggerRepaint(GstSample* sample)
835 {
836     bool triggerResize;
837     {
838         auto sampleLocker = holdLock(m_sampleMutex);
839         triggerResize = !m_sample || doSamplesHaveDifferentNaturalSizes(m_sample.get(), sample);
840         if (triggerResize)
841             m_videoSize = FloatSize(); // Force re-calculation in next call to naturalSize().
842         m_sample = sample;
843     }
844
845     if (triggerResize) {
846         GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
847         m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
848             m_player->sizeChanged();
849         });
850     }
851
852     if (!m_renderingCanBeAccelerated) {
853         LockHolder locker(m_drawMutex);
854         if (m_destroying)
855             return;
856         m_drawTimer.startOneShot(0_s);
857         m_drawCondition.wait(m_drawMutex);
858         return;
859     }
860
861 #if USE(TEXTURE_MAPPER_GL)
862     if (m_usingFallbackVideoSink) {
863         LockHolder lock(m_drawMutex);
864         auto proxyOperation =
865             [this](TextureMapperPlatformLayerProxy& proxy)
866             {
867                 return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); });
868             };
869 #if USE(NICOSIA)
870         if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()))
871             return;
872 #else
873         if (!proxyOperation(*m_platformLayerProxy))
874             return;
875 #endif
876         m_drawTimer.startOneShot(0_s);
877         m_drawCondition.wait(m_drawMutex);
878     } else
879         pushTextureToCompositor();
880 #endif // USE(TEXTURE_MAPPER_GL)
881 }
882
883 void MediaPlayerPrivateGStreamerBase::repaintCallback(MediaPlayerPrivateGStreamerBase* player, GstSample* sample)
884 {
885     player->triggerRepaint(sample);
886 }
887
888 void MediaPlayerPrivateGStreamerBase::cancelRepaint(bool destroying)
889 {
890     // The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
891     // to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
892     // the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
893     // main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
894     // thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
895     //
896     // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
897     // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
898     if (!m_renderingCanBeAccelerated) {
899         LockHolder locker(m_drawMutex);
900         m_drawTimer.stop();
901         m_destroying = destroying;
902         m_drawCondition.notifyOne();
903     }
904 }
905
906 void MediaPlayerPrivateGStreamerBase::repaintCancelledCallback(MediaPlayerPrivateGStreamerBase* player)
907 {
908     player->cancelRepaint();
909 }
910
911 #if USE(GSTREAMER_GL)
912 GstFlowReturn MediaPlayerPrivateGStreamerBase::newSampleCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
913 {
914     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_sample(GST_APP_SINK(sink)));
915     player->triggerRepaint(sample.get());
916     return GST_FLOW_OK;
917 }
918
919 GstFlowReturn MediaPlayerPrivateGStreamerBase::newPrerollCallback(GstElement* sink, MediaPlayerPrivateGStreamerBase* player)
920 {
921     GRefPtr<GstSample> sample = adoptGRef(gst_app_sink_pull_preroll(GST_APP_SINK(sink)));
922     player->triggerRepaint(sample.get());
923     return GST_FLOW_OK;
924 }
925
926 void MediaPlayerPrivateGStreamerBase::flushCurrentBuffer()
927 {
928     auto sampleLocker = holdLock(m_sampleMutex);
929
930     if (m_sample) {
931         // Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions.
932         // This prevents resizing problems when the video changes its quality and a DRAIN is performed.
933         const GstStructure* info = gst_sample_get_info(m_sample.get());
934         m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
935             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
936     }
937
938     bool shouldWait = m_videoDecoderPlatform == WebKitGstVideoDecoderPlatform::Video4Linux;
939     auto proxyOperation = [shouldWait, pipeline = pipeline()](TextureMapperPlatformLayerProxy& proxy) {
940         GST_DEBUG_OBJECT(pipeline, "Flushing video sample %s", shouldWait ? "synchronously" : "");
941         LockHolder locker(!shouldWait ? &proxy.lock() : nullptr);
942
943         if (proxy.isActive())
944             proxy.dropCurrentBufferWhilePreservingTexture(shouldWait);
945     };
946
947 #if USE(NICOSIA)
948     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
949 #else
950     proxyOperation(*m_platformLayerProxy);
951 #endif
952 }
953 #endif
954
955 void MediaPlayerPrivateGStreamerBase::setSize(const IntSize& size)
956 {
957     m_size = size;
958 }
959
960 void MediaPlayerPrivateGStreamerBase::paint(GraphicsContext& context, const FloatRect& rect)
961 {
962     if (context.paintingDisabled())
963         return;
964
965     if (!m_player->visible())
966         return;
967
968     auto sampleLocker = holdLock(m_sampleMutex);
969     if (!GST_IS_SAMPLE(m_sample.get()))
970         return;
971
972 #if USE(GSTREAMER_GL)
973     // Ensure the input is RGBA. We handle YUV video natively, so we need to do
974     // this conversion on-demand here.
975     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
976     if (UNLIKELY(!GST_IS_BUFFER(buffer)))
977         return;
978
979     GstCaps* caps = gst_sample_get_caps(m_sample.get());
980
981     GstVideoInfo videoInfo;
982     gst_video_info_init(&videoInfo);
983     if (!gst_video_info_from_caps(&videoInfo, caps))
984         return;
985
986     if (!GST_VIDEO_INFO_IS_RGB(&videoInfo)) {
987         if (!m_colorConvert) {
988             GstMemory* mem = gst_buffer_peek_memory(buffer, 0);
989             GstGLContext* context = ((GstGLBaseMemory*)mem)->context;
990             m_colorConvert = adoptGRef(gst_gl_color_convert_new(context));
991         }
992
993         if (!m_colorConvertInputCaps || !gst_caps_is_equal(m_colorConvertInputCaps.get(), caps)) {
994             m_colorConvertInputCaps = caps;
995             m_colorConvertOutputCaps = adoptGRef(gst_caps_copy(caps));
996 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
997             const gchar* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "BGRx";
998 #else
999             const gchar* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "RGBx";
1000 #endif
1001             gst_caps_set_simple(m_colorConvertOutputCaps.get(), "format", G_TYPE_STRING, formatString, nullptr);
1002             if (!gst_gl_color_convert_set_caps(m_colorConvert.get(), caps, m_colorConvertOutputCaps.get()))
1003                 return;
1004         }
1005
1006         GRefPtr<GstBuffer> rgbBuffer = adoptGRef(gst_gl_color_convert_perform(m_colorConvert.get(), buffer));
1007         if (UNLIKELY(!GST_IS_BUFFER(rgbBuffer.get())))
1008             return;
1009
1010         const GstStructure* info = gst_sample_get_info(m_sample.get());
1011         m_sample = adoptGRef(gst_sample_new(rgbBuffer.get(), m_colorConvertOutputCaps.get(),
1012             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
1013     }
1014 #endif
1015
1016     auto gstImage = ImageGStreamer::createImage(m_sample.get());
1017     if (!gstImage)
1018         return;
1019
1020     context.drawImage(gstImage->image(), rect, gstImage->rect(), { CompositeCopy, m_renderingCanBeAccelerated ? m_videoSourceOrientation : ImageOrientation() });
1021 }
1022
1023 #if USE(GSTREAMER_GL)
1024 bool MediaPlayerPrivateGStreamerBase::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
1025 {
1026     UNUSED_PARAM(context);
1027
1028     if (m_usingFallbackVideoSink)
1029         return false;
1030
1031     if (premultiplyAlpha)
1032         return false;
1033
1034     auto sampleLocker = holdLock(m_sampleMutex);
1035
1036     if (!GST_IS_SAMPLE(m_sample.get()))
1037         return false;
1038
1039     std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, true);
1040
1041     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = frameHolder->platformLayerBuffer();
1042     if (!layerBuffer)
1043         return false;
1044
1045     auto size = frameHolder->size();
1046     if (m_videoSourceOrientation.usesWidthAsHeight())
1047         size = size.transposedSize();
1048
1049     if (!m_videoTextureCopier)
1050         m_videoTextureCopier = makeUnique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
1051
1052     frameHolder->waitForCPUSync();
1053
1054     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(*layerBuffer.get(), size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
1055 }
1056
1057 NativeImagePtr MediaPlayerPrivateGStreamerBase::nativeImageForCurrentTime()
1058 {
1059 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
1060     if (m_usingFallbackVideoSink)
1061         return nullptr;
1062
1063     auto sampleLocker = holdLock(m_sampleMutex);
1064
1065     if (!GST_IS_SAMPLE(m_sample.get()))
1066         return nullptr;
1067
1068     std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_textureMapperFlags, true);
1069
1070     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = frameHolder->platformLayerBuffer();
1071     if (!layerBuffer)
1072         return nullptr;
1073
1074     auto size = frameHolder->size();
1075     if (m_videoSourceOrientation.usesWidthAsHeight())
1076         size = size.transposedSize();
1077
1078     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
1079     context->makeContextCurrent();
1080
1081     if (!m_videoTextureCopier)
1082         m_videoTextureCopier = makeUnique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
1083
1084     frameHolder->waitForCPUSync();
1085
1086     if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(*layerBuffer.get(), size, 0, GraphicsContext3D::TEXTURE_2D, 0, GraphicsContext3D::RGBA, GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, false, m_videoSourceOrientation))
1087         return nullptr;
1088
1089     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
1090 #else
1091     return nullptr;
1092 #endif
1093 }
1094 #endif // USE(GSTREAMER_GL)
1095
1096 void MediaPlayerPrivateGStreamerBase::setVideoSourceOrientation(ImageOrientation orientation)
1097 {
1098     if (m_videoSourceOrientation == orientation)
1099         return;
1100
1101     m_videoSourceOrientation = orientation;
1102 #if USE(TEXTURE_MAPPER_GL)
1103     updateTextureMapperFlags();
1104 #endif
1105 }
1106
1107 #if USE(TEXTURE_MAPPER_GL)
1108 void MediaPlayerPrivateGStreamerBase::updateTextureMapperFlags()
1109 {
1110     switch (m_videoSourceOrientation) {
1111     case ImageOrientation::OriginTopLeft:
1112         m_textureMapperFlags = 0;
1113         break;
1114     case ImageOrientation::OriginRightTop:
1115         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture90;
1116         break;
1117     case ImageOrientation::OriginBottomRight:
1118         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture180;
1119         break;
1120     case ImageOrientation::OriginLeftBottom:
1121         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture270;
1122         break;
1123     default:
1124         // FIXME: Handle OriginTopRight, OriginBottomLeft, OriginLeftTop and OriginRightBottom?
1125         m_textureMapperFlags = 0;
1126         break;
1127     }
1128 }
1129 #endif
1130
1131 bool MediaPlayerPrivateGStreamerBase::supportsFullscreen() const
1132 {
1133     return true;
1134 }
1135
1136 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamerBase::movieLoadType() const
1137 {
1138     if (m_readyState == MediaPlayer::HaveNothing)
1139         return MediaPlayer::Unknown;
1140
1141     if (isLiveStream())
1142         return MediaPlayer::LiveStream;
1143
1144     return MediaPlayer::Download;
1145 }
1146
1147 #if USE(GSTREAMER_GL)
1148 GstElement* MediaPlayerPrivateGStreamerBase::createGLAppSink()
1149 {
1150     GstElement* appsink = gst_element_factory_make("appsink", "webkit-gl-video-sink");
1151     if (!appsink)
1152         return nullptr;
1153
1154     g_object_set(appsink, "enable-last-sample", FALSE, "emit-signals", TRUE, "max-buffers", 1, nullptr);
1155     g_signal_connect(appsink, "new-sample", G_CALLBACK(newSampleCallback), this);
1156     g_signal_connect(appsink, "new-preroll", G_CALLBACK(newPrerollCallback), this);
1157
1158     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(appsink, "sink"));
1159     gst_pad_add_probe(pad.get(), static_cast<GstPadProbeType>(GST_PAD_PROBE_TYPE_PUSH | GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH), [] (GstPad*, GstPadProbeInfo* info,  gpointer userData) -> GstPadProbeReturn {
1160         // In some platforms (e.g. OpenMAX on the Raspberry Pi) when a resolution change occurs the
1161         // pipeline has to be drained before a frame with the new resolution can be decoded.
1162         // In this context, it's important that we don't hold references to any previous frame
1163         // (e.g. m_sample) so that decoding can continue.
1164         // We are also not supposed to keep the original frame after a flush.
1165         if (info->type & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
1166             if (GST_QUERY_TYPE(GST_PAD_PROBE_INFO_QUERY(info)) != GST_QUERY_DRAIN)
1167                 return GST_PAD_PROBE_OK;
1168             GST_DEBUG("Acting upon DRAIN query");
1169         }
1170         if (info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) {
1171             if (GST_EVENT_TYPE(GST_PAD_PROBE_INFO_EVENT(info)) != GST_EVENT_FLUSH_START)
1172                 return GST_PAD_PROBE_OK;
1173             GST_DEBUG("Acting upon flush-start event");
1174         }
1175
1176         auto* player = static_cast<MediaPlayerPrivateGStreamerBase*>(userData);
1177         player->flushCurrentBuffer();
1178         return GST_PAD_PROBE_OK;
1179     }, this, nullptr);
1180
1181     return appsink;
1182 }
1183
1184 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSinkGL()
1185 {
1186     gboolean result = TRUE;
1187     GstElement* videoSink = gst_bin_new(nullptr);
1188     GstElement* upload = gst_element_factory_make("glupload", nullptr);
1189     GstElement* colorconvert = gst_element_factory_make("glcolorconvert", nullptr);
1190     GstElement* appsink = createGLAppSink();
1191
1192     // glsinkbin is not used because it includes glcolorconvert which only process RGBA,
1193     // but we can display YUV formats too.
1194
1195     if (!appsink || !upload || !colorconvert) {
1196         GST_WARNING("Failed to create GstGL elements");
1197         gst_object_unref(videoSink);
1198
1199         if (upload)
1200             gst_object_unref(upload);
1201         if (colorconvert)
1202             gst_object_unref(colorconvert);
1203         if (appsink)
1204             gst_object_unref(appsink);
1205
1206         g_warning("WebKit wasn't able to find the GStreamer opengl plugin. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin.");
1207         return nullptr;
1208     }
1209
1210     gst_bin_add_many(GST_BIN(videoSink), upload, colorconvert, appsink, nullptr);
1211
1212     GRefPtr<GstCaps> caps = adoptGRef(gst_caps_from_string("video/x-raw, format = (string) " GST_GL_CAPS_FORMAT));
1213     gst_caps_set_features(caps.get(), 0, gst_caps_features_new(GST_CAPS_FEATURE_MEMORY_GL_MEMORY, nullptr));
1214     g_object_set(appsink, "caps", caps.get(), nullptr);
1215
1216     result &= gst_element_link_many(upload, colorconvert, appsink, nullptr);
1217
1218     GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(upload, "sink"));
1219     gst_element_add_pad(videoSink, gst_ghost_pad_new("sink", pad.get()));
1220
1221     if (!result) {
1222         GST_WARNING("Failed to link GstGL elements");
1223         gst_object_unref(videoSink);
1224         videoSink = nullptr;
1225     }
1226     return videoSink;
1227 }
1228
1229 void MediaPlayerPrivateGStreamerBase::ensureGLVideoSinkContext()
1230 {
1231     if (!m_glDisplayElementContext)
1232         m_glDisplayElementContext = adoptGRef(requestGLContext(GST_GL_DISPLAY_CONTEXT_TYPE));
1233
1234     if (m_glDisplayElementContext)
1235         gst_element_set_context(m_videoSink.get(), m_glDisplayElementContext.get());
1236
1237     if (!m_glAppElementContext)
1238         m_glAppElementContext = adoptGRef(requestGLContext("gst.gl.app_context"));
1239
1240     if (m_glAppElementContext)
1241         gst_element_set_context(m_videoSink.get(), m_glAppElementContext.get());
1242 }
1243 #endif // USE(GSTREAMER_GL)
1244
1245 #if USE(GSTREAMER_HOLEPUNCH)
1246 static void setRectangleToVideoSink(GstElement* videoSink, const IntRect& rect)
1247 {
1248     // Here goes the platform-dependant code to set to the videoSink the size
1249     // and position of the video rendering window. Mark them unused as default.
1250     UNUSED_PARAM(videoSink);
1251     UNUSED_PARAM(rect);
1252 }
1253
1254 class GStreamerHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient {
1255 public:
1256     GStreamerHolePunchClient(GRefPtr<GstElement>&& videoSink) : m_videoSink(WTFMove(videoSink)) { };
1257     void setVideoRectangle(const IntRect& rect) final { setRectangleToVideoSink(m_videoSink.get(), rect); }
1258 private:
1259     GRefPtr<GstElement> m_videoSink;
1260 };
1261
1262 GstElement* MediaPlayerPrivateGStreamerBase::createHolePunchVideoSink()
1263 {
1264     // Here goes the platform-dependant code to create the videoSink. As a default
1265     // we use a fakeVideoSink so nothing is drawn to the page.
1266     GstElement* videoSink =  gst_element_factory_make("fakevideosink", nullptr);
1267
1268     return videoSink;
1269 }
1270
1271 void MediaPlayerPrivateGStreamerBase::pushNextHolePunchBuffer()
1272 {
1273     auto proxyOperation =
1274         [this](TextureMapperPlatformLayerProxy& proxy)
1275         {
1276             LockHolder holder(proxy.lock());
1277             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = makeUnique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE);
1278             std::unique_ptr<GStreamerHolePunchClient> holePunchClient = makeUnique<GStreamerHolePunchClient>(m_videoSink.get());
1279             layerBuffer->setHolePunchClient(WTFMove(holePunchClient));
1280             proxy.pushNextBuffer(WTFMove(layerBuffer));
1281         };
1282
1283 #if USE(NICOSIA)
1284     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
1285 #else
1286     proxyOperation(*m_platformLayerProxy);
1287 #endif
1288 }
1289 #endif
1290
1291 GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink()
1292 {
1293     acceleratedRenderingStateChanged();
1294
1295 #if USE(GSTREAMER_HOLEPUNCH)
1296     m_videoSink = createHolePunchVideoSink();
1297     pushNextHolePunchBuffer();
1298     return m_videoSink.get();
1299 #endif
1300
1301 #if USE(GSTREAMER_GL)
1302     if (m_renderingCanBeAccelerated)
1303         m_videoSink = createVideoSinkGL();
1304 #endif
1305
1306     if (!m_videoSink) {
1307         m_usingFallbackVideoSink = true;
1308         m_videoSink = webkitVideoSinkNew();
1309         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
1310         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
1311     }
1312
1313     GstElement* videoSink = nullptr;
1314 #if ENABLE(MEDIA_STATISTICS)
1315     m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
1316     if (m_fpsSink) {
1317         g_object_set(m_fpsSink.get(), "silent", TRUE , nullptr);
1318
1319         // Turn off text overlay unless tracing is enabled.
1320         if (gst_debug_category_get_threshold(webkit_media_player_debug) < GST_LEVEL_TRACE)
1321             g_object_set(m_fpsSink.get(), "text-overlay", FALSE , nullptr);
1322
1323         if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink.get()), "video-sink")) {
1324             g_object_set(m_fpsSink.get(), "video-sink", m_videoSink.get(), nullptr);
1325             videoSink = m_fpsSink.get();
1326         } else
1327             m_fpsSink = nullptr;
1328     }
1329 #endif
1330
1331     if (!m_fpsSink)
1332         videoSink = m_videoSink.get();
1333
1334     ASSERT(videoSink);
1335
1336     return videoSink;
1337 }
1338
1339 void MediaPlayerPrivateGStreamerBase::setStreamVolumeElement(GstStreamVolume* volume)
1340 {
1341     ASSERT(!m_volumeElement);
1342     m_volumeElement = volume;
1343
1344     // We don't set the initial volume because we trust the sink to keep it for us. See
1345     // https://bugs.webkit.org/show_bug.cgi?id=118974 for more information.
1346     if (!m_player->platformVolumeConfigurationRequired()) {
1347         GST_DEBUG_OBJECT(pipeline(), "Setting stream volume to %f", m_player->volume());
1348         gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR, static_cast<double>(m_player->volume()));
1349     } else
1350         GST_DEBUG_OBJECT(pipeline(), "Not setting stream volume, trusting system one");
1351
1352     GST_DEBUG_OBJECT(pipeline(), "Setting stream muted %s", toString(m_player->muted()).utf8().data());
1353     g_object_set(m_volumeElement.get(), "mute", m_player->muted(), nullptr);
1354
1355     g_signal_connect_swapped(m_volumeElement.get(), "notify::volume", G_CALLBACK(volumeChangedCallback), this);
1356     g_signal_connect_swapped(m_volumeElement.get(), "notify::mute", G_CALLBACK(muteChangedCallback), this);
1357 }
1358
1359 unsigned MediaPlayerPrivateGStreamerBase::decodedFrameCount() const
1360 {
1361     guint64 decodedFrames = 0;
1362     if (m_fpsSink)
1363         g_object_get(m_fpsSink.get(), "frames-rendered", &decodedFrames, nullptr);
1364     return static_cast<unsigned>(decodedFrames);
1365 }
1366
1367 unsigned MediaPlayerPrivateGStreamerBase::droppedFrameCount() const
1368 {
1369     guint64 framesDropped = 0;
1370     if (m_fpsSink)
1371         g_object_get(m_fpsSink.get(), "frames-dropped", &framesDropped, nullptr);
1372     return static_cast<unsigned>(framesDropped);
1373 }
1374
1375 unsigned MediaPlayerPrivateGStreamerBase::audioDecodedByteCount() const
1376 {
1377     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1378     gint64 position = 0;
1379
1380     if (audioSink() && gst_element_query(audioSink(), query))
1381         gst_query_parse_position(query, 0, &position);
1382
1383     gst_query_unref(query);
1384     return static_cast<unsigned>(position);
1385 }
1386
1387 unsigned MediaPlayerPrivateGStreamerBase::videoDecodedByteCount() const
1388 {
1389     GstQuery* query = gst_query_new_position(GST_FORMAT_BYTES);
1390     gint64 position = 0;
1391
1392     if (gst_element_query(m_videoSink.get(), query))
1393         gst_query_parse_position(query, 0, &position);
1394
1395     gst_query_unref(query);
1396     return static_cast<unsigned>(position);
1397 }
1398
1399 #if ENABLE(ENCRYPTED_MEDIA)
1400 void MediaPlayerPrivateGStreamerBase::initializationDataEncountered(InitData&& initData)
1401 {
1402     ASSERT(!isMainThread());
1403
1404     RunLoop::main().dispatch([weakThis = makeWeakPtr(*this), initData = WTFMove(initData)] {
1405         if (!weakThis)
1406             return;
1407
1408         GST_DEBUG("scheduling initializationDataEncountered event of size %zu", initData.payload()->size());
1409         GST_MEMDUMP("init datas", reinterpret_cast<const uint8_t*>(initData.payload()->data()), initData.payload()->size());
1410         weakThis->m_player->initializationDataEncountered(initData.payloadContainerType(), initData.payload()->tryCreateArrayBuffer());
1411     });
1412 }
1413
1414 void MediaPlayerPrivateGStreamerBase::cdmInstanceAttached(CDMInstance& instance)
1415 {
1416     ASSERT(isMainThread());
1417
1418     if (m_cdmInstance == &instance)
1419         return;
1420
1421     if (!m_pipeline) {
1422         GST_ERROR("no pipeline yet");
1423         ASSERT_NOT_REACHED();
1424         return;
1425     }
1426
1427     m_cdmInstance = &instance;
1428
1429     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1430     GstStructure* contextStructure = gst_context_writable_structure(context.get());
1431     gst_structure_set(contextStructure, "cdm-instance", G_TYPE_POINTER, m_cdmInstance.get(), nullptr);
1432     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1433
1434     GST_DEBUG_OBJECT(m_pipeline.get(), "CDM instance %p dispatched as context", m_cdmInstance.get());
1435
1436     m_cdmAttachmentSemaphore.signal();
1437 }
1438
1439 void MediaPlayerPrivateGStreamerBase::cdmInstanceDetached(CDMInstance& instance)
1440 {
1441     ASSERT(isMainThread());
1442
1443     if (m_cdmInstance != &instance) {
1444         GST_WARNING("passed CDMInstance %p is different from stored one %p", &instance, m_cdmInstance.get());
1445         ASSERT_NOT_REACHED();
1446         return;
1447     }
1448
1449     ASSERT(m_pipeline);
1450
1451     GST_DEBUG_OBJECT(m_pipeline.get(), "detaching CDM instance %p, setting empty context", m_cdmInstance.get());
1452     m_cdmInstance = nullptr;
1453
1454     GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-cdm-instance", FALSE));
1455     gst_element_set_context(GST_ELEMENT(m_pipeline.get()), context.get());
1456 }
1457
1458 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithInstance(CDMInstance& instance)
1459 {
1460     ASSERT(m_cdmInstance.get() == &instance);
1461     GST_TRACE("instance %p, current stored %p", &instance, m_cdmInstance.get());
1462     attemptToDecryptWithLocalInstance();
1463 }
1464
1465 void MediaPlayerPrivateGStreamerBase::attemptToDecryptWithLocalInstance()
1466 {
1467     bool eventHandled = gst_element_send_event(pipeline(), gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM_OOB, gst_structure_new_empty("attempt-to-decrypt")));
1468     GST_DEBUG("attempting to decrypt, event handled %s", boolForPrinting(eventHandled));
1469 }
1470
1471 void MediaPlayerPrivateGStreamerBase::handleProtectionEvent(GstEvent* event)
1472 {
1473     {
1474         LockHolder lock(m_protectionMutex);
1475         if (m_handledProtectionEvents.contains(GST_EVENT_SEQNUM(event))) {
1476             GST_DEBUG_OBJECT(pipeline(), "event %u already handled", GST_EVENT_SEQNUM(event));
1477             return;
1478         }
1479     }
1480     GST_DEBUG_OBJECT(pipeline(), "handling event %u from MSE", GST_EVENT_SEQNUM(event));
1481     const char* eventKeySystemUUID = nullptr;
1482     GstBuffer* initData = nullptr;
1483     gst_event_parse_protection(event, &eventKeySystemUUID, &initData, nullptr);
1484     initializationDataEncountered({eventKeySystemUUID, initData});
1485 }
1486
1487 void MediaPlayerPrivateGStreamerBase::setWaitingForKey(bool waitingForKey)
1488 {
1489     // We bail out if values did not change or if we are requested to not wait anymore but there are still waiting decryptors.
1490     GST_TRACE("waitingForKey %s, m_waitingForKey %s", boolForPrinting(waitingForKey), boolForPrinting(m_waitingForKey));
1491     if (waitingForKey == m_waitingForKey || (!waitingForKey && this->waitingForKey()))
1492         return;
1493
1494     m_waitingForKey = waitingForKey;
1495     GST_DEBUG("waiting for key changed %s", boolForPrinting(m_waitingForKey));
1496     m_player->waitingForKeyChanged();
1497 }
1498
1499 bool MediaPlayerPrivateGStreamerBase::waitingForKey() const
1500 {
1501     if (!m_pipeline)
1502         return false;
1503
1504     GstState state;
1505     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1506
1507     bool result = false;
1508     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_custom(GST_QUERY_CUSTOM, gst_structure_new_empty("any-decryptor-waiting-for-key")));
1509     if (state >= GST_STATE_PAUSED) {
1510         result = gst_element_query(m_pipeline.get(), query.get());
1511         GST_TRACE("query result %s, on %s", boolForPrinting(result), gst_element_state_get_name(state));
1512     } else if (state >= GST_STATE_READY) {
1513         // Running a query in the pipeline is easier but it only works when the pipeline is set up and running, otherwise we need to inspect it and ask the decryptors directly.
1514         GUniquePtr<GstIterator> iterator(gst_bin_iterate_recurse(GST_BIN(m_pipeline.get())));
1515         GstIteratorResult iteratorResult;
1516         do {
1517             iteratorResult = gst_iterator_fold(iterator.get(), [](const GValue *item, GValue *, gpointer data) -> gboolean {
1518                 GstElement* element = GST_ELEMENT(g_value_get_object(item));
1519                 GstQuery* query = GST_QUERY(data);
1520                 return !WEBKIT_IS_MEDIA_CENC_DECRYPT(element) || !gst_element_query(element, query);
1521             }, nullptr, query.get());
1522             if (iteratorResult == GST_ITERATOR_RESYNC)
1523                 gst_iterator_resync(iterator.get());
1524         } while (iteratorResult == GST_ITERATOR_RESYNC);
1525         if (iteratorResult == GST_ITERATOR_ERROR)
1526             GST_WARNING("iterator returned an error");
1527         result = iteratorResult == GST_ITERATOR_OK;
1528         GST_TRACE("iterator result %d, waiting %s", iteratorResult, boolForPrinting(result));
1529     }
1530
1531     return result;
1532 }
1533 #endif
1534
1535 bool MediaPlayerPrivateGStreamerBase::supportsKeySystem(const String& keySystem, const String& mimeType)
1536 {
1537     bool result = false;
1538
1539 #if ENABLE(ENCRYPTED_MEDIA)
1540     result = GStreamerEMEUtilities::isClearKeyKeySystem(keySystem);
1541 #endif
1542
1543     GST_DEBUG("checking for KeySystem support with %s and type %s: %s", keySystem.utf8().data(), mimeType.utf8().data(), boolForPrinting(result));
1544     return result;
1545 }
1546
1547 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerBase::extendedSupportsType(const MediaEngineSupportParameters& parameters, MediaPlayer::SupportsType result)
1548 {
1549     UNUSED_PARAM(parameters);
1550     return result;
1551 }
1552
1553 }
1554
1555 #endif // USE(GSTREAMER)