8c50c221783cc4228f9f630d6734fed76f20a40c
[WebKit.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2014 Cable Television Laboratories, Inc.
7  * Copyright (C) 2009, 2019 Igalia S.L
8  * Copyright (C) 2015, 2019 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "GraphicsContext.h"
32 #include "GStreamerCommon.h"
33 #include "GStreamerRegistryScanner.h"
34 #include "HTTPHeaderNames.h"
35 #include "ImageGStreamer.h"
36 #include "ImageOrientation.h"
37 #include "IntRect.h"
38 #include "Logging.h"
39 #include "MediaPlayer.h"
40 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
41 #include "MIMETypeRegistry.h"
42 #include "NotImplemented.h"
43 #include "SecurityOrigin.h"
44 #include "TimeRanges.h"
45 #include "VideoSinkGStreamer.h"
46 #include "WebKitWebSourceGStreamer.h"
47
48 #if ENABLE(VIDEO_TRACK)
49 #include "AudioTrackPrivateGStreamer.h"
50 #include "InbandMetadataTextTrackPrivateGStreamer.h"
51 #include "InbandTextTrackPrivateGStreamer.h"
52 #include "TextCombinerGStreamer.h"
53 #include "TextSinkGStreamer.h"
54 #include "VideoTrackPrivateGStreamer.h"
55 #endif // ENABLE(VIDEO_TRACK)
56
57 #if ENABLE(MEDIA_STREAM)
58 #include "GStreamerMediaStreamSource.h"
59 #endif
60
61 #if ENABLE(MEDIA_SOURCE)
62 #include "MediaSource.h"
63 #include "WebKitMediaSourceGStreamer.h"
64 #endif
65
66 #if ENABLE(ENCRYPTED_MEDIA)
67 #include "CDMInstance.h"
68 #include "CDMProxyClearKey.h"
69 #include "GStreamerEMEUtilities.h"
70 #include "SharedBuffer.h"
71 #include "WebKitCommonEncryptionDecryptorGStreamer.h"
72 #endif
73
74 #if ENABLE(WEB_AUDIO)
75 #include "AudioSourceProviderGStreamer.h"
76 #endif
77
78 #include <glib.h>
79 #include <gst/audio/streamvolume.h>
80 #include <gst/gst.h>
81 #include <gst/pbutils/missing-plugins.h>
82 #include <gst/video/gstvideometa.h>
83 #include <limits>
84 #include <wtf/FileSystem.h>
85 #include <wtf/glib/GLibUtilities.h>
86 #include <wtf/glib/GUniquePtr.h>
87 #include <wtf/glib/RunLoopSourcePriority.h>
88 #include <wtf/MathExtras.h>
89 #include <wtf/MediaTime.h>
90 #include <wtf/NeverDestroyed.h>
91 #include <wtf/StringPrintStream.h>
92 #include <wtf/text/AtomString.h>
93 #include <wtf/text/CString.h>
94 #include <wtf/text/StringConcatenateNumbers.h>
95 #include <wtf/URL.h>
96 #include <wtf/WallTime.h>
97
98 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
99 #define GST_USE_UNSTABLE_API
100 #include <gst/mpegts/mpegts.h>
101 #undef GST_USE_UNSTABLE_API
102 #endif // ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
103
104 #if ENABLE(VIDEO_TRACK)
105 #define CREATE_TRACK(type, Type) G_STMT_START {                         \
106         m_has##Type = true;                                             \
107         if (!useMediaSource) {                                          \
108             RefPtr<Type##TrackPrivateGStreamer> track = Type##TrackPrivateGStreamer::create(makeWeakPtr(*this), i, stream); \
109             m_##type##Tracks.add(track->id(), track);                   \
110             m_player->add##Type##Track(*track);                         \
111             if (gst_stream_get_stream_flags(stream.get()) & GST_STREAM_FLAG_SELECT) \
112                 m_current##Type##StreamId = String(gst_stream_get_stream_id(stream.get())); \
113         }                                                               \
114     } G_STMT_END
115
116 #define CLEAR_TRACKS(tracks, method) \
117     for (auto& track : tracks.values())\
118         method(*track);\
119     tracks.clear();
120 #else
121 #define CREATE_TRACK(type, Type) G_STMT_START { \
122         m_has##Type## = true;                   \
123     } G_STMT_END
124 #endif // ENABLE(VIDEO_TRACK)
125
126 #if USE(GSTREAMER_GL)
127 #include "GLVideoSinkGStreamer.h"
128 #include "VideoTextureCopierGStreamer.h"
129
130 #define TEXTURE_COPIER_COLOR_CONVERT_FLAG VideoTextureCopierGStreamer::ColorConversion::NoConvert
131 #endif // USE(GSTREAMER_GL)
132
133 #if USE(TEXTURE_MAPPER_GL)
134 #include "BitmapTextureGL.h"
135 #include "BitmapTexturePool.h"
136 #include "TextureMapperContextAttributes.h"
137 #include "TextureMapperPlatformLayerBuffer.h"
138 #include "TextureMapperPlatformLayerProxy.h"
139 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
140 #include <cairo-gl.h>
141 #include "GLContext.h"
142 #include "PlatformDisplay.h"
143 // cairo-gl.h ends up including X.h, which defines None, breaking MediaPlayer:: enums.
144 #undef None
145 #endif
146 #endif // USE(TEXTURE_MAPPER_GL)
147
148 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
149 #include "PlatformDisplayLibWPE.h"
150 #include <gst/gl/egl/gsteglimage.h>
151 #include <gst/gl/egl/gstglmemoryegl.h>
152 #include <wpe/extensions/video-plane-display-dmabuf.h>
153 #endif
154
155 GST_DEBUG_CATEGORY(webkit_media_player_debug);
156 #define GST_CAT_DEFAULT webkit_media_player_debug
157
158 namespace WebCore {
159 using namespace std;
160
161 #if USE(GSTREAMER_HOLEPUNCH)
162 static const FloatSize s_holePunchDefaultFrameSize(1280, 720);
163 #endif
164
165 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
166 {
167     player->handleMessage(message);
168 }
169
170 static void convertToInternalProtocol(URL& url)
171 {
172     if (webkitGstCheckVersion(1, 12, 0))
173         return;
174     if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
175         url.setProtocol("webkit+" + url.protocol());
176 }
177
178 #if USE(TEXTURE_MAPPER_GL)
179 class GstVideoFrameHolder : public TextureMapperPlatformLayerBuffer::UnmanagedBufferDataHolder {
180 public:
181     explicit GstVideoFrameHolder(GstSample* sample, Optional<GstVideoDecoderPlatform> videoDecoderPlatform, TextureMapperGL::Flags flags, bool gstGLEnabled)
182         : m_videoDecoderPlatform(videoDecoderPlatform)
183     {
184         RELEASE_ASSERT(GST_IS_SAMPLE(sample));
185
186         GstVideoInfo videoInfo;
187         if (UNLIKELY(!getSampleVideoInfo(sample, videoInfo)))
188             return;
189
190         m_size = IntSize(GST_VIDEO_INFO_WIDTH(&videoInfo), GST_VIDEO_INFO_HEIGHT(&videoInfo));
191         m_hasAlphaChannel = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo);
192         m_buffer = gst_sample_get_buffer(sample);
193         if (UNLIKELY(!GST_IS_BUFFER(m_buffer.get())))
194             return;
195
196 #if USE(GSTREAMER_GL)
197         m_flags = flags | (m_hasAlphaChannel ? TextureMapperGL::ShouldBlend : 0);
198
199         GstMemory* memory = gst_buffer_peek_memory(m_buffer.get(), 0);
200         if (gst_is_gl_memory(memory))
201             m_textureTarget = gst_gl_memory_get_texture_target(GST_GL_MEMORY_CAST(memory));
202
203 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
204         m_dmabufFD = -1;
205         gsize offset;
206         if (gst_is_gl_memory_egl(memory)) {
207             GstGLMemoryEGL* eglMemory = (GstGLMemoryEGL*) memory;
208             gst_egl_image_export_dmabuf(eglMemory->image, &m_dmabufFD, &m_dmabufStride, &offset);
209         } else if (gst_is_gl_memory(memory)) {
210             GRefPtr<GstEGLImage> eglImage = adoptGRef(gst_egl_image_from_texture(GST_GL_BASE_MEMORY_CAST(memory)->context, GST_GL_MEMORY_CAST(memory), nullptr));
211
212             if (eglImage)
213                 gst_egl_image_export_dmabuf(eglImage.get(), &m_dmabufFD, &m_dmabufStride, &offset);
214         }
215
216         if (hasDMABuf() && m_dmabufStride == -1) {
217             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer.get(), GST_MAP_READ);
218             if (m_isMapped)
219                 m_dmabufStride = GST_VIDEO_INFO_PLANE_STRIDE(&m_videoFrame.info, 0);
220         }
221
222         if (hasDMABuf() && m_dmabufStride)
223             return;
224
225         static std::once_flag s_onceFlag;
226         std::call_once(s_onceFlag, [] {
227             GST_WARNING("Texture export to DMABuf failed, falling back to internal rendering");
228         });
229 #endif // USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
230
231         if (gstGLEnabled) {
232             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer.get(), static_cast<GstMapFlags>(GST_MAP_READ | GST_MAP_GL));
233             if (m_isMapped) {
234                 m_textureID = *reinterpret_cast<GLuint*>(m_videoFrame.data[0]);
235                 m_hasMappedTextures = true;
236             }
237         } else
238 #else
239         UNUSED_PARAM(flags);
240         UNUSED_PARAM(gstGLEnabled);
241 #endif // USE(GSTREAMER_GL)
242
243         {
244             m_textureID = 0;
245             m_isMapped = gst_video_frame_map(&m_videoFrame, &videoInfo, m_buffer.get(), GST_MAP_READ);
246             if (m_isMapped) {
247                 // Right now the TextureMapper only supports chromas with one plane
248                 ASSERT(GST_VIDEO_INFO_N_PLANES(&videoInfo) == 1);
249             }
250         }
251     }
252
253     virtual ~GstVideoFrameHolder()
254     {
255         if (UNLIKELY(!m_isMapped))
256             return;
257
258         gst_video_frame_unmap(&m_videoFrame);
259     }
260
261 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
262     void handoffVideoDmaBuf(struct wpe_video_plane_display_dmabuf_source* videoPlaneDisplayDmaBufSource, const IntRect& rect)
263     {
264         if (m_dmabufFD <= 0)
265             return;
266
267         wpe_video_plane_display_dmabuf_source_update(videoPlaneDisplayDmaBufSource, m_dmabufFD, rect.x(), rect.y(), m_size.width(), m_size.height(), m_dmabufStride, [](void* data) {
268             gst_buffer_unref(GST_BUFFER_CAST(data));
269         }, gst_buffer_ref(m_buffer.get()));
270
271         close(m_dmabufFD);
272         m_dmabufFD = 0;
273     }
274 #endif
275
276 #if USE(GSTREAMER_GL)
277     virtual void waitForCPUSync()
278     {
279         GstGLSyncMeta* meta = gst_buffer_get_gl_sync_meta(m_buffer.get());
280         if (meta) {
281             GstMemory* mem = gst_buffer_peek_memory(m_buffer.get(), 0);
282             GstGLContext* context = ((GstGLBaseMemory*)mem)->context;
283             gst_gl_sync_meta_wait_cpu(meta, context);
284         }
285     }
286 #endif // USE(GSTREAMER_GL)
287
288     const IntSize& size() const { return m_size; }
289     bool hasAlphaChannel() const { return m_hasAlphaChannel; }
290     TextureMapperGL::Flags flags() const { return m_flags; }
291     GLuint textureID() const { return m_textureID; }
292     bool hasMappedTextures() const { return m_hasMappedTextures; }
293     const GstVideoFrame& videoFrame() const { return m_videoFrame; }
294
295     void updateTexture(BitmapTextureGL& texture)
296     {
297         ASSERT(!m_textureID);
298         GstVideoGLTextureUploadMeta* meta;
299         if (m_buffer && (meta = gst_buffer_get_video_gl_texture_upload_meta(m_buffer.get()))) {
300             if (meta->n_textures == 1) { // BRGx & BGRA formats use only one texture.
301                 guint ids[4] = { texture.id(), 0, 0, 0 };
302
303                 if (gst_video_gl_texture_upload_meta_upload(meta, ids))
304                     return;
305             }
306         }
307
308         if (!m_isMapped)
309             return;
310
311         int stride = GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0);
312         const void* srcData = GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0);
313
314         if (!srcData)
315             return;
316
317         texture.updateContents(srcData, WebCore::IntRect(0, 0, m_size.width(), m_size.height()), WebCore::IntPoint(0, 0), stride);
318     }
319
320     std::unique_ptr<TextureMapperPlatformLayerBuffer> platformLayerBuffer()
321     {
322         if (!m_hasMappedTextures)
323             return nullptr;
324
325         using Buffer = TextureMapperPlatformLayerBuffer;
326
327 #if USE(GSTREAMER_GL)
328         if (m_textureTarget == GST_GL_TEXTURE_TARGET_EXTERNAL_OES)
329             return makeUnique<Buffer>(Buffer::TextureVariant { Buffer::ExternalOESTexture { m_textureID } }, m_size, m_flags, GL_DONT_CARE);
330 #endif
331
332         if ((GST_VIDEO_INFO_IS_RGB(&m_videoFrame.info) && GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info) == 1))
333             return makeUnique<Buffer>(Buffer::TextureVariant { Buffer::RGBTexture { m_textureID } }, m_size, m_flags, GL_RGBA);
334
335         if (GST_VIDEO_INFO_IS_YUV(&m_videoFrame.info)) {
336             if (GST_VIDEO_INFO_N_COMPONENTS(&m_videoFrame.info) < 3 || GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info) > 3)
337                 return nullptr;
338
339             if (m_videoDecoderPlatform && *m_videoDecoderPlatform == GstVideoDecoderPlatform::ImxVPU) {
340                 // IMX VPU decoder decodes YUV data only into the Y texture from which the sampler
341                 // then directly produces RGBA data. Textures for other planes aren't used, but
342                 // that's decoder's problem. We have to treat that Y texture as having RGBA data.
343                 return makeUnique<Buffer>(Buffer::TextureVariant { Buffer::RGBTexture { m_textureID } }, m_size, m_flags, GL_RGBA);
344             }
345
346             unsigned numberOfPlanes = GST_VIDEO_INFO_N_PLANES(&m_videoFrame.info);
347             std::array<GLuint, 3> planes;
348             std::array<unsigned, 3> yuvPlane;
349             std::array<unsigned, 3> yuvPlaneOffset;
350             for (unsigned i = 0; i < numberOfPlanes; ++i)
351                 planes[i] = *static_cast<GLuint*>(m_videoFrame.data[i]);
352             for (unsigned i = 0; i < 3; ++i) {
353                 yuvPlane[i] = GST_VIDEO_INFO_COMP_PLANE(&m_videoFrame.info, i);
354                 yuvPlaneOffset[i] = GST_VIDEO_INFO_COMP_POFFSET(&m_videoFrame.info, i);
355             }
356
357             std::array<GLfloat, 9> yuvToRgb;
358             if (gst_video_colorimetry_matches(&GST_VIDEO_INFO_COLORIMETRY(&m_videoFrame.info), GST_VIDEO_COLORIMETRY_BT709)) {
359                 yuvToRgb = {
360                     1.164f,  0.0f,    1.787f,
361                     1.164f, -0.213f, -0.531f,
362                     1.164f,  2.112f,  0.0f
363                 };
364             } else {
365                 // Default to bt601. This is the same behaviour as GStreamer's glcolorconvert element.
366                 yuvToRgb = {
367                     1.164f,  0.0f,    1.596f,
368                     1.164f, -0.391f, -0.813f,
369                     1.164f,  2.018f,  0.0f
370                 };
371             }
372
373             return makeUnique<Buffer>( Buffer::TextureVariant { Buffer::YUVTexture { numberOfPlanes, planes, yuvPlane, yuvPlaneOffset, yuvToRgb } }, m_size, m_flags, GL_RGBA);
374         }
375
376         return nullptr;
377     }
378
379     bool hasDMABuf() const
380     {
381 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
382         return m_dmabufFD >= 0;
383 #else
384         return false;
385 #endif
386     }
387
388 private:
389     GRefPtr<GstBuffer> m_buffer;
390     GstVideoFrame m_videoFrame { };
391     IntSize m_size;
392     bool m_hasAlphaChannel;
393     Optional<GstVideoDecoderPlatform> m_videoDecoderPlatform;
394     TextureMapperGL::Flags m_flags { };
395     GLuint m_textureID { 0 };
396 #if USE(GSTREAMER_GL)
397     GstGLTextureTarget m_textureTarget { GST_GL_TEXTURE_TARGET_NONE };
398 #endif
399     bool m_isMapped { false };
400     bool m_hasMappedTextures { false };
401 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
402     int m_dmabufFD { 0 };
403     int m_dmabufStride { 0 };
404 #endif
405 };
406 #endif
407
408 static void initializeDebugCategory()
409 {
410     static std::once_flag onceFlag;
411     std::call_once(onceFlag, [] {
412         GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
413     });
414 }
415
416 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
417     : m_notifier(MainThreadNotifier<MainThreadNotification>::create())
418     , m_player(player)
419     , m_cachedPosition(MediaTime::invalidTime())
420     , m_cachedDuration(MediaTime::invalidTime())
421     , m_seekTime(MediaTime::invalidTime())
422     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
423     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
424     , m_maxTimeLoaded(MediaTime::zeroTime())
425     , m_preload(player->preload())
426     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
427     , m_drawTimer(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::repaint)
428     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired
429 )
430 #if USE(TEXTURE_MAPPER_GL)
431 #if USE(NICOSIA)
432     , m_nicosiaLayer(Nicosia::ContentLayer::create(Nicosia::ContentLayerTextureMapperImpl::createFactory(*this)))
433 #else
434     , m_platformLayerProxy(adoptRef(new TextureMapperPlatformLayerProxy()))
435 #endif
436 #endif      
437 {
438 #if USE(GLIB)
439     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
440 #endif
441     m_isPlayerShuttingDown.store(false);
442
443 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
444     auto& sharedDisplay = PlatformDisplay::sharedDisplay();
445     if (is<PlatformDisplayLibWPE>(sharedDisplay))
446         m_wpeVideoPlaneDisplayDmaBuf.reset(wpe_video_plane_display_dmabuf_source_create(downcast<PlatformDisplayLibWPE>(sharedDisplay).backend()));
447 #endif
448 }
449
450 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
451 {
452     GST_DEBUG_OBJECT(pipeline(), "Disposing player");
453     m_isPlayerShuttingDown.store(true);
454
455 #if ENABLE(VIDEO_TRACK)
456     for (auto& track : m_audioTracks.values())
457         track->disconnect();
458
459     for (auto& track : m_textTracks.values())
460         track->disconnect();
461
462     for (auto& track : m_videoTracks.values())
463         track->disconnect();
464 #endif
465     if (m_fillTimer.isActive())
466         m_fillTimer.stop();
467
468     if (m_mediaLocations) {
469         gst_structure_free(m_mediaLocations);
470         m_mediaLocations = nullptr;
471     }
472
473     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
474         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
475
476     if (m_autoAudioSink) {
477         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
478             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
479     }
480
481     m_readyTimerHandler.stop();
482     for (auto& missingPluginCallback : m_missingPluginCallbacks) {
483         if (missingPluginCallback)
484             missingPluginCallback->invalidate();
485     }
486     m_missingPluginCallbacks.clear();
487
488     if (m_videoSink) {
489         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
490         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
491     }
492
493     if (m_pipeline) {
494         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
495         ASSERT(bus);
496         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
497         gst_bus_remove_signal_watch(bus.get());
498         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
499         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
500     }
501
502 #if USE(GSTREAMER_GL)
503     if (m_videoDecoderPlatform == GstVideoDecoderPlatform::Video4Linux)
504         flushCurrentBuffer();
505 #endif
506 #if USE(TEXTURE_MAPPER_GL) && USE(NICOSIA)
507     downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).invalidateClient();
508 #endif
509
510     m_notifier->invalidate();
511
512     if (m_videoSink)
513         g_signal_handlers_disconnect_matched(m_videoSink.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
514
515     if (m_volumeElement)
516         g_signal_handlers_disconnect_matched(m_volumeElement.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
517
518     // This will release the GStreamer thread from m_drawCondition in non AC mode in case there's an ongoing triggerRepaint call
519     // waiting there, and ensure that any triggerRepaint call reaching the lock won't wait on m_drawCondition.
520     cancelRepaint(true);
521
522 #if ENABLE(ENCRYPTED_MEDIA)
523     {
524         LockHolder lock(m_cdmAttachmentMutex);
525         m_cdmAttachmentCondition.notifyAll();
526     }
527 #endif
528
529     // The change to GST_STATE_NULL state is always synchronous. So after this gets executed we don't need to worry
530     // about handlers running in the GStreamer thread.
531     if (m_pipeline)
532         gst_element_set_state(m_pipeline.get(), GST_STATE_NULL);
533
534     m_player = nullptr;
535 }
536
537 bool MediaPlayerPrivateGStreamer::isAvailable()
538 {
539     if (!initializeGStreamerAndRegisterWebKitElements())
540         return false;
541
542     // FIXME: This has not been updated for the playbin3 switch.
543     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
544     return factory;
545 }
546
547 class MediaPlayerFactoryGStreamer final : public MediaPlayerFactory {
548 private:
549     MediaPlayerEnums::MediaEngineIdentifier identifier() const final { return MediaPlayerEnums::MediaEngineIdentifier::GStreamer; };
550
551     std::unique_ptr<MediaPlayerPrivateInterface> createMediaEnginePlayer(MediaPlayer* player) const final
552     {
553         return makeUnique<MediaPlayerPrivateGStreamer>(player);
554     }
555
556     void getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types) const final
557     {
558         return MediaPlayerPrivateGStreamer::getSupportedTypes(types);
559     }
560
561     MediaPlayer::SupportsType supportsTypeAndCodecs(const MediaEngineSupportParameters& parameters) const final
562     {
563         return MediaPlayerPrivateGStreamer::supportsType(parameters);
564     }
565
566     bool supportsKeySystem(const String& keySystem, const String& mimeType) const final
567     {
568         return MediaPlayerPrivateGStreamer::supportsKeySystem(keySystem, mimeType);
569     }
570 };
571
572 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
573 {
574     initializeDebugCategory();
575
576     if (isAvailable())
577         registrar(makeUnique<MediaPlayerFactoryGStreamer>());
578 }
579
580 void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const String& pipelineName)
581 {
582     if (m_player->contentMIMEType() == "image/gif") {
583         loadingFailed(MediaPlayer::NetworkState::FormatError, MediaPlayer::ReadyState::HaveNothing, true);
584         return;
585     }
586
587     URL url(URL(), urlString);
588     if (url.protocolIsAbout()) {
589         loadingFailed(MediaPlayer::NetworkState::FormatError, MediaPlayer::ReadyState::HaveNothing, true);
590         return;
591     }
592
593     if (!m_pipeline)
594         createGSTPlayBin(url, pipelineName);
595     syncOnClock(true);
596     if (m_fillTimer.isActive())
597         m_fillTimer.stop();
598
599     ASSERT(m_pipeline);
600
601     setPlaybinURL(url);
602
603     GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data());
604     if (m_preload == MediaPlayer::Preload::None) {
605         GST_INFO_OBJECT(pipeline(), "Delaying load.");
606         m_isDelayingLoad = true;
607     }
608
609     // Reset network and ready states. Those will be set properly once
610     // the pipeline pre-rolled.
611     m_networkState = MediaPlayer::NetworkState::Loading;
612     m_player->networkStateChanged();
613     m_readyState = MediaPlayer::ReadyState::HaveNothing;
614     m_player->readyStateChanged();
615     m_areVolumeAndMuteInitialized = false;
616     m_hasTaintedOrigin = WTF::nullopt;
617
618     if (!m_isDelayingLoad)
619         commitLoad();
620 }
621
622 void MediaPlayerPrivateGStreamer::load(const String& urlString)
623 {
624     loadFull(urlString, String());
625 }
626
627 #if ENABLE(MEDIA_SOURCE)
628 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
629 {
630     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
631     m_networkState = MediaPlayer::NetworkState::FormatError;
632     m_player->networkStateChanged();
633 }
634 #endif
635
636 #if ENABLE(MEDIA_STREAM)
637 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream)
638 {
639     m_streamPrivate = &stream;
640     static Atomic<uint32_t> pipelineId;
641     auto pipelineName = makeString("mediastream-", pipelineId.exchangeAdd(1));
642
643     loadFull(String("mediastream://") + stream.id(), pipelineName);
644     syncOnClock(false);
645
646     m_player->play();
647 }
648 #endif
649
650 void MediaPlayerPrivateGStreamer::cancelLoad()
651 {
652     if (m_networkState < MediaPlayer::NetworkState::Loading || m_networkState == MediaPlayer::NetworkState::Loaded)
653         return;
654
655     if (m_pipeline)
656         changePipelineState(GST_STATE_READY);
657 }
658
659 void MediaPlayerPrivateGStreamer::prepareToPlay()
660 {
661     GST_DEBUG_OBJECT(pipeline(), "Prepare to play");
662     m_preload = MediaPlayer::Preload::Auto;
663     if (m_isDelayingLoad) {
664         m_isDelayingLoad = false;
665         commitLoad();
666     }
667 }
668
669 void MediaPlayerPrivateGStreamer::play()
670 {
671     if (!m_playbackRate) {
672         m_isPlaybackRatePaused = true;
673         return;
674     }
675
676     if (changePipelineState(GST_STATE_PLAYING)) {
677         m_isEndReached = false;
678         m_isDelayingLoad = false;
679         m_preload = MediaPlayer::Preload::Auto;
680         updateDownloadBufferingFlag();
681         GST_INFO_OBJECT(pipeline(), "Play");
682     } else
683         loadingFailed(MediaPlayer::NetworkState::Empty);
684 }
685
686 void MediaPlayerPrivateGStreamer::pause()
687 {
688     m_isPlaybackRatePaused = false;
689     GstState currentState, pendingState;
690     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
691     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
692         return;
693
694     if (changePipelineState(GST_STATE_PAUSED))
695         GST_INFO_OBJECT(pipeline(), "Pause");
696     else
697         loadingFailed(MediaPlayer::NetworkState::Empty);
698 }
699
700 bool MediaPlayerPrivateGStreamer::paused() const
701 {
702     if (!m_pipeline)
703         return true;
704
705     if (m_isEndReached) {
706         GST_DEBUG_OBJECT(pipeline(), "Ignoring pause at EOS");
707         return true;
708     }
709
710     if (m_isPlaybackRatePaused) {
711         GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state");
712         return false;
713     }
714
715     GstState state;
716     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
717     bool paused = state <= GST_STATE_PAUSED;
718     GST_LOG_OBJECT(pipeline(), "Paused: %s", toString(paused).utf8().data());
719     return paused;
720 }
721
722 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
723 {
724     // Default values for rate >= 0.
725     MediaTime startTime = position, endTime = MediaTime::invalidTime();
726
727     if (rate < 0) {
728         startTime = MediaTime::zeroTime();
729         // If we are at beginning of media, start from the end to avoid immediate EOS.
730         endTime = position < MediaTime::zeroTime() ? durationMediaTime() : position;
731     }
732
733     if (!rate)
734         rate = 1.0;
735
736     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
737         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
738 }
739
740 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
741 {
742     if (!m_pipeline || m_didErrorOccur)
743         return;
744
745     GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
746
747     // Avoid useless seeking.
748     if (mediaTime == currentMediaTime()) {
749         GST_DEBUG_OBJECT(pipeline(), "[Seek] seek to EOS position unhandled");
750         return;
751     }
752
753     MediaTime time = std::min(mediaTime, durationMediaTime());
754
755     if (m_isLiveStream) {
756         GST_DEBUG_OBJECT(pipeline(), "[Seek] Live stream seek unhandled");
757         return;
758     }
759
760     GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data());
761
762     if (m_isSeeking) {
763         m_timeOfOverlappingSeek = time;
764         if (m_isSeekPending) {
765             m_seekTime = time;
766             return;
767         }
768     }
769
770     GstState state;
771     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
772     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
773         GST_DEBUG_OBJECT(pipeline(), "[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
774         return;
775     }
776     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
777         m_isSeekPending = true;
778         if (m_isEndReached) {
779             GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline");
780             m_shouldResetPipeline = true;
781             if (!changePipelineState(GST_STATE_PAUSED))
782                 loadingFailed(MediaPlayer::NetworkState::Empty);
783         }
784     } else {
785         // We can seek now.
786         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
787             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(time).utf8().data());
788             return;
789         }
790     }
791
792     m_isSeeking = true;
793     m_seekTime = time;
794     m_isEndReached = false;
795 }
796
797 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
798 {
799     if (!m_isChangingRate)
800         return;
801
802     GST_INFO_OBJECT(pipeline(), "Set playback rate to %f", m_playbackRate);
803
804     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
805     bool mute = m_playbackRate <= 0 || (!m_shouldPreservePitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
806
807     GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio");
808
809     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
810         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
811         m_lastPlaybackRate = m_playbackRate;
812     } else {
813         GST_ERROR_OBJECT(pipeline(), "Set rate to %f failed", m_playbackRate);
814         m_playbackRate = m_lastPlaybackRate;
815     }
816
817     if (m_isPlaybackRatePaused) {
818         GstState state, pending;
819
820         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
821         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
822             changePipelineState(GST_STATE_PLAYING);
823         m_isPlaybackRatePaused = false;
824     }
825
826     m_isChangingRate = false;
827     m_player->rateChanged();
828 }
829
830 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
831 {
832     GST_TRACE_OBJECT(pipeline(), "Cached duration: %s", m_cachedDuration.toString().utf8().data());
833     if (m_cachedDuration.isValid())
834         return m_cachedDuration;
835
836     MediaTime duration = platformDuration();
837     if (!duration || duration.isInvalid())
838         return MediaTime::zeroTime();
839
840     m_cachedDuration = duration;
841
842     return m_cachedDuration;
843 }
844
845 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
846 {
847     if (!m_pipeline || m_didErrorOccur)
848         return MediaTime::invalidTime();
849
850     GST_TRACE_OBJECT(pipeline(), "seeking: %s, seekTime: %s", boolForPrinting(m_isSeeking), m_seekTime.toString().utf8().data());
851     if (m_isSeeking)
852         return m_seekTime;
853
854     return playbackPosition();
855 }
856
857 void MediaPlayerPrivateGStreamer::setRate(float rate)
858 {
859     float rateClamped = clampTo(rate, -20.0, 20.0);
860     if (rateClamped != rate)
861         GST_WARNING_OBJECT(pipeline(), "Clamping original rate (%f) to [-20, 20] (%f), higher rates cause crashes", rate, rateClamped);
862
863     GST_DEBUG_OBJECT(pipeline(), "Setting playback rate to %f", rateClamped);
864     // Avoid useless playback rate update.
865     if (m_playbackRate == rateClamped) {
866         // And make sure that upper layers were notified if rate was set.
867
868         if (!m_isChangingRate && m_player->rate() != m_playbackRate)
869             m_player->rateChanged();
870         return;
871     }
872
873     if (m_isLiveStream) {
874         // Notify upper layers that we cannot handle passed rate.
875         m_isChangingRate = false;
876         m_player->rateChanged();
877         return;
878     }
879
880     GstState state, pending;
881
882     m_playbackRate = rateClamped;
883     m_isChangingRate = true;
884
885     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
886
887     if (!rateClamped) {
888         m_isChangingRate = false;
889         m_isPlaybackRatePaused = true;
890         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
891             changePipelineState(GST_STATE_PAUSED);
892         return;
893     }
894
895     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
896         || (pending == GST_STATE_PAUSED))
897         return;
898
899     updatePlaybackRate();
900 }
901
902 double MediaPlayerPrivateGStreamer::rate() const
903 {
904     return m_playbackRate;
905 }
906
907 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
908 {
909     GST_DEBUG_OBJECT(pipeline(), "Preserving audio pitch: %s", boolForPrinting(preservesPitch));
910     m_shouldPreservePitch = preservesPitch;
911 }
912
913 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
914 {
915     GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data());
916     if (preload == MediaPlayer::Preload::Auto && m_isLiveStream)
917         return;
918
919     m_preload = preload;
920     updateDownloadBufferingFlag();
921
922     if (m_isDelayingLoad && m_preload != MediaPlayer::Preload::None) {
923         m_isDelayingLoad = false;
924         commitLoad();
925     }
926 }
927
928 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
929 {
930     auto timeRanges = makeUnique<PlatformTimeRanges>();
931     if (m_didErrorOccur || m_isLiveStream)
932         return timeRanges;
933
934     MediaTime mediaDuration = durationMediaTime();
935     if (!mediaDuration || mediaDuration.isPositiveInfinite())
936         return timeRanges;
937
938     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_buffering(GST_FORMAT_PERCENT));
939
940     if (!gst_element_query(m_pipeline.get(), query.get()))
941         return timeRanges;
942
943     unsigned numBufferingRanges = gst_query_get_n_buffering_ranges(query.get());
944     for (unsigned index = 0; index < numBufferingRanges; index++) {
945         gint64 rangeStart = 0, rangeStop = 0;
946         if (gst_query_parse_nth_buffering_range(query.get(), index, &rangeStart, &rangeStop)) {
947             uint64_t startTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStart, GST_FORMAT_PERCENT_MAX);
948             uint64_t stopTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStop, GST_FORMAT_PERCENT_MAX);
949             timeRanges->add(MediaTime(startTime, GST_SECOND), MediaTime(stopTime, GST_SECOND));
950         }
951     }
952
953     // Fallback to the more general maxTimeLoaded() if no range has been found.
954     if (!timeRanges->length()) {
955         MediaTime loaded = maxTimeLoaded();
956         if (loaded.isValid() && loaded)
957             timeRanges->add(MediaTime::zeroTime(), loaded);
958     }
959
960     return timeRanges;
961 }
962
963 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
964 {
965     GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, isLiveStream: %s", boolForPrinting(m_didErrorOccur), boolForPrinting(m_isLiveStream));
966     if (m_didErrorOccur)
967         return MediaTime::zeroTime();
968
969     if (m_isLiveStream)
970         return MediaTime::zeroTime();
971
972     MediaTime duration = durationMediaTime();
973     GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
974     // Infinite duration means live stream.
975     if (duration.isPositiveInfinite())
976         return MediaTime::zeroTime();
977
978     return duration;
979 }
980
981 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
982 {
983     if (m_didErrorOccur)
984         return MediaTime::zeroTime();
985
986     MediaTime loaded = m_maxTimeLoaded;
987     if (m_isEndReached)
988         loaded = durationMediaTime();
989     GST_LOG_OBJECT(pipeline(), "maxTimeLoaded: %s", toString(loaded).utf8().data());
990     return loaded;
991 }
992
993 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
994 {
995     if (m_didErrorOccur || m_loadingStalled)
996         return false;
997
998     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
999         GST_LOG_OBJECT(pipeline(), "Last network read position: %" G_GUINT64_FORMAT ", current: %" G_GUINT64_FORMAT, m_readPositionAtLastDidLoadingProgress, m_networkReadPosition);
1000         bool didLoadingProgress = m_readPositionAtLastDidLoadingProgress < m_networkReadPosition;
1001         m_readPositionAtLastDidLoadingProgress = m_networkReadPosition;
1002         GST_LOG_OBJECT(pipeline(), "didLoadingProgress: %s", boolForPrinting(didLoadingProgress));
1003         return didLoadingProgress;
1004     }
1005
1006     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1007         return false;
1008
1009     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1010     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1011     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1012     GST_LOG_OBJECT(pipeline(), "didLoadingProgress: %s", boolForPrinting(didLoadingProgress));
1013     return didLoadingProgress;
1014 }
1015
1016 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1017 {
1018     if (m_didErrorOccur || !m_source || m_isLiveStream)
1019         return 0;
1020
1021     if (m_totalBytes)
1022         return m_totalBytes;
1023
1024     GstFormat fmt = GST_FORMAT_BYTES;
1025     gint64 length = 0;
1026     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1027         GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1028         m_totalBytes = static_cast<unsigned long long>(length);
1029         m_isLiveStream = !length;
1030         return m_totalBytes;
1031     }
1032
1033     // Fall back to querying the source pads manually. See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1034     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1035     bool done = false;
1036     while (!done) {
1037         GValue item = G_VALUE_INIT;
1038         switch (gst_iterator_next(iter, &item)) {
1039         case GST_ITERATOR_OK: {
1040             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1041             gint64 padLength = 0;
1042             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1043                 length = padLength;
1044             break;
1045         }
1046         case GST_ITERATOR_RESYNC:
1047             gst_iterator_resync(iter);
1048             break;
1049         case GST_ITERATOR_ERROR:
1050             FALLTHROUGH;
1051         case GST_ITERATOR_DONE:
1052             done = true;
1053             break;
1054         }
1055
1056         g_value_unset(&item);
1057     }
1058
1059     gst_iterator_free(iter);
1060
1061     GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1062     m_totalBytes = static_cast<unsigned long long>(length);
1063     m_isLiveStream = !length;
1064     return m_totalBytes;
1065 }
1066
1067 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1068 {
1069     if (!m_source)
1070         return false;
1071
1072     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1073         return true;
1074
1075     GUniqueOutPtr<char> originalURI, resolvedURI;
1076     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1077     if (!originalURI || !resolvedURI)
1078         return false;
1079     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1080         return true;
1081
1082     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1083     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1084     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1085 }
1086
1087 Optional<bool> MediaPlayerPrivateGStreamer::wouldTaintOrigin(const SecurityOrigin& origin) const
1088 {
1089     if (webkitGstCheckVersion(1, 12, 0)) {
1090         GST_TRACE_OBJECT(pipeline(), "Checking %u origins", m_origins.size());
1091         for (auto& responseOrigin : m_origins) {
1092             if (!origin.canAccess(*responseOrigin)) {
1093                 GST_DEBUG_OBJECT(pipeline(), "Found reachable response origin");
1094                 return true;
1095             }
1096         }
1097         GST_DEBUG_OBJECT(pipeline(), "No valid response origin found");
1098         return false;
1099     }
1100
1101     // GStreamer < 1.12 has an incomplete uridownloader implementation so we
1102     // can't use WebKitWebSrc for adaptive fragments downloading if this
1103     // version is detected.
1104     UNUSED_PARAM(origin);
1105     return m_hasTaintedOrigin;
1106 }
1107
1108 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
1109 {
1110     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
1111     gst_element_post_message(m_pipeline.get(), message);
1112 }
1113
1114 #if ENABLE(WEB_AUDIO)
1115 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
1116 {
1117     if (!m_audioSourceProvider)
1118         m_audioSourceProvider = makeUnique<AudioSourceProviderGStreamer>();
1119 }
1120
1121 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
1122 {
1123     ensureAudioSourceProvider();
1124     return m_audioSourceProvider.get();
1125 }
1126 #endif
1127
1128 void MediaPlayerPrivateGStreamer::durationChanged()
1129 {
1130     MediaTime previousDuration = durationMediaTime();
1131     m_cachedDuration = MediaTime::invalidTime();
1132
1133     // Avoid emitting durationChanged in the case where the previous
1134     // duration was 0 because that case is already handled by the
1135     // HTMLMediaElement.
1136     if (previousDuration && durationMediaTime() != previousDuration)
1137         m_player->durationChanged();
1138 }
1139
1140 void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1141 {
1142     GST_DEBUG_OBJECT(pipeline(), "Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1143
1144     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1145         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1146
1147     m_source = sourceElement;
1148
1149     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1150         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC_CAST(m_source.get()), m_player);
1151         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1152 #if ENABLE(MEDIA_STREAM)
1153     } else if (WEBKIT_IS_MEDIA_STREAM_SRC(sourceElement)) {
1154         auto stream = m_streamPrivate.get();
1155         ASSERT(stream);
1156         webkitMediaStreamSrcSetStream(WEBKIT_MEDIA_STREAM_SRC(sourceElement), stream);
1157 #endif
1158     }
1159 }
1160
1161 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
1162 {
1163     player->setAudioStreamProperties(object);
1164 }
1165
1166 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
1167 {
1168     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
1169         return;
1170
1171     const char* role = m_player->isVideoPlayer() ? "video" : "music";
1172     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
1173     g_object_set(object, "stream-properties", structure, nullptr);
1174     gst_structure_free(structure);
1175     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
1176     GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get());
1177 }
1178
1179 void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1180 {
1181     player->sourceSetup(sourceElement);
1182 }
1183
1184 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
1185 {
1186     ASSERT(m_pipeline);
1187
1188     GstState currentState, pending;
1189
1190     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
1191     if (currentState == newState || pending == newState) {
1192         GST_DEBUG_OBJECT(pipeline(), "Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
1193             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
1194         return true;
1195     }
1196
1197     GST_DEBUG_OBJECT(pipeline(), "Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
1198         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
1199
1200     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
1201     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
1202     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
1203         return false;
1204
1205     // Create a timer when entering the READY state so that we can free resources if we stay for too long on READY.
1206     // Also lets remove the timer if we request a state change for any state other than READY. See also https://bugs.webkit.org/show_bug.cgi?id=117354
1207     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
1208         // Max interval in seconds to stay in the READY state on manual state change requests.
1209         static const Seconds readyStateTimerDelay { 1_min };
1210         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
1211     } else if (newState != GST_STATE_READY)
1212         m_readyTimerHandler.stop();
1213
1214     return true;
1215 }
1216
1217 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
1218 {
1219     // Clean out everything after file:// url path.
1220     String cleanURLString(url.string());
1221     if (url.isLocalFile())
1222         cleanURLString = cleanURLString.substring(0, url.pathEnd());
1223
1224     m_url = URL(URL(), cleanURLString);
1225     convertToInternalProtocol(m_url);
1226     GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data());
1227     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
1228 }
1229
1230 static void setSyncOnClock(GstElement *element, bool sync)
1231 {
1232     if (!GST_IS_BIN(element)) {
1233         g_object_set(element, "sync", sync, NULL);
1234         return;
1235     }
1236
1237     GstIterator* it = gst_bin_iterate_sinks(GST_BIN(element));
1238     while (gst_iterator_foreach(it, (GstIteratorForeachFunction)([](const GValue* item, void* syncPtr) {
1239         bool* sync = static_cast<bool*>(syncPtr);
1240         setSyncOnClock(GST_ELEMENT(g_value_get_object(item)), *sync);
1241     }), &sync) == GST_ITERATOR_RESYNC)
1242         gst_iterator_resync(it);
1243     gst_iterator_free(it);
1244 }
1245
1246 void MediaPlayerPrivateGStreamer::syncOnClock(bool sync)
1247 {
1248     setSyncOnClock(videoSink(), sync);
1249     setSyncOnClock(audioSink(), sync);
1250 }
1251
1252 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
1253 {
1254     if (UNLIKELY(!m_pipeline || !m_source))
1255         return;
1256
1257     ASSERT(m_isLegacyPlaybin || isMediaSource());
1258
1259     unsigned numTracks = 0;
1260     bool useMediaSource = isMediaSource();
1261     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1262     g_object_get(element, "n-video", &numTracks, nullptr);
1263
1264     GST_INFO_OBJECT(pipeline(), "Media has %d video tracks", numTracks);
1265
1266     bool oldHasVideo = m_hasVideo;
1267     m_hasVideo = numTracks > 0;
1268     if (oldHasVideo != m_hasVideo)
1269         m_player->characteristicChanged();
1270
1271     if (m_hasVideo)
1272         m_player->sizeChanged();
1273
1274     if (useMediaSource) {
1275         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1276         m_player->mediaEngineUpdated();
1277         return;
1278     }
1279
1280 #if ENABLE(VIDEO_TRACK)
1281     Vector<String> validVideoStreams;
1282     for (unsigned i = 0; i < numTracks; ++i) {
1283         GRefPtr<GstPad> pad;
1284         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
1285         ASSERT(pad);
1286
1287         String streamId = "V" + String::number(i);
1288         validVideoStreams.append(streamId);
1289         if (i < m_videoTracks.size()) {
1290             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
1291             if (existingTrack) {
1292                 existingTrack->setIndex(i);
1293                 if (existingTrack->pad() == pad)
1294                     continue;
1295             }
1296         }
1297
1298         auto track = VideoTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
1299         ASSERT(streamId == track->id());
1300         m_videoTracks.add(streamId, track.copyRef());
1301         m_player->addVideoTrack(track.get());
1302     }
1303
1304     purgeInvalidVideoTracks(validVideoStreams);
1305 #endif
1306
1307     m_player->mediaEngineUpdated();
1308 }
1309
1310 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
1311 {
1312     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
1313         player->notifyPlayerOfVideoCaps();
1314     });
1315 }
1316
1317 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
1318 {
1319     m_videoSize = IntSize();
1320     m_player->mediaEngineUpdated();
1321 }
1322
1323 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
1324 {
1325     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] {
1326         player->notifyPlayerOfAudio();
1327     });
1328 }
1329
1330 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
1331 {
1332     if (UNLIKELY(!m_pipeline || !m_source))
1333         return;
1334
1335     ASSERT(m_isLegacyPlaybin || isMediaSource());
1336
1337     unsigned numTracks = 0;
1338     bool useMediaSource = isMediaSource();
1339     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1340     g_object_get(element, "n-audio", &numTracks, nullptr);
1341
1342     GST_INFO_OBJECT(pipeline(), "Media has %d audio tracks", numTracks);
1343     bool oldHasAudio = m_hasAudio;
1344     m_hasAudio = numTracks > 0;
1345     if (oldHasAudio != m_hasAudio)
1346         m_player->characteristicChanged();
1347
1348     if (useMediaSource) {
1349         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1350         m_player->mediaEngineUpdated();
1351         return;
1352     }
1353
1354 #if ENABLE(VIDEO_TRACK)
1355     Vector<String> validAudioStreams;
1356     for (unsigned i = 0; i < numTracks; ++i) {
1357         GRefPtr<GstPad> pad;
1358         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
1359         ASSERT(pad);
1360
1361         String streamId = "A" + String::number(i);
1362         validAudioStreams.append(streamId);
1363         if (i < m_audioTracks.size()) {
1364             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
1365             if (existingTrack) {
1366                 existingTrack->setIndex(i);
1367                 if (existingTrack->pad() == pad)
1368                     continue;
1369             }
1370         }
1371
1372         auto track = AudioTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
1373         ASSERT(streamId == track->id());
1374         m_audioTracks.add(streamId, track);
1375         m_player->addAudioTrack(*track);
1376     }
1377
1378     purgeInvalidAudioTracks(validAudioStreams);
1379 #endif
1380
1381     m_player->mediaEngineUpdated();
1382 }
1383
1384 #if ENABLE(VIDEO_TRACK)
1385 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
1386 {
1387     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] {
1388         player->notifyPlayerOfText();
1389     });
1390 }
1391
1392 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
1393 {
1394     if (UNLIKELY(!m_pipeline || !m_source))
1395         return;
1396
1397     ASSERT(m_isLegacyPlaybin || isMediaSource());
1398
1399     unsigned numTracks = 0;
1400     bool useMediaSource = isMediaSource();
1401     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1402     g_object_get(element, "n-text", &numTracks, nullptr);
1403
1404     GST_INFO_OBJECT(pipeline(), "Media has %d text tracks", numTracks);
1405
1406     if (useMediaSource) {
1407         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1408         return;
1409     }
1410
1411     Vector<String> validTextStreams;
1412     for (unsigned i = 0; i < numTracks; ++i) {
1413         GRefPtr<GstPad> pad;
1414         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
1415         ASSERT(pad);
1416
1417         // We can't assume the pad has a sticky event here like implemented in
1418         // InbandTextTrackPrivateGStreamer because it might be emitted after the
1419         // track was created. So fallback to a dummy stream ID like in the Audio
1420         // and Video tracks.
1421         String streamId = "T" + String::number(i);
1422
1423         validTextStreams.append(streamId);
1424         if (i < m_textTracks.size()) {
1425             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
1426             if (existingTrack) {
1427                 existingTrack->setIndex(i);
1428                 if (existingTrack->pad() == pad)
1429                     continue;
1430             }
1431         }
1432
1433         auto track = InbandTextTrackPrivateGStreamer::create(i, pad);
1434         m_textTracks.add(streamId, track.copyRef());
1435         m_player->addTextTrack(track.get());
1436     }
1437
1438     purgeInvalidTextTracks(validTextStreams);
1439 }
1440
1441 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
1442 {
1443     player->newTextSample();
1444     return GST_FLOW_OK;
1445 }
1446
1447 void MediaPlayerPrivateGStreamer::newTextSample()
1448 {
1449     if (!m_textAppSink)
1450         return;
1451
1452     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
1453         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
1454
1455     GRefPtr<GstSample> sample;
1456     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
1457     ASSERT(sample);
1458
1459     if (streamStartEvent) {
1460         bool found = FALSE;
1461         const gchar* id;
1462         gst_event_parse_stream_start(streamStartEvent.get(), &id);
1463         for (auto& track : m_textTracks.values()) {
1464             if (!strcmp(track->streamId().utf8().data(), id)) {
1465                 track->handleSample(sample);
1466                 found = true;
1467                 break;
1468             }
1469         }
1470         if (!found)
1471             GST_WARNING("Got sample with unknown stream ID %s.", id);
1472     } else
1473         GST_WARNING("Unable to handle sample with no stream start event.");
1474 }
1475 #endif
1476
1477 MediaTime MediaPlayerPrivateGStreamer::platformDuration() const
1478 {
1479     if (!m_pipeline)
1480         return MediaTime::invalidTime();
1481
1482     GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, pipeline state: %s", boolForPrinting(m_didErrorOccur), gst_element_state_get_name(GST_STATE(m_pipeline.get())));
1483     if (m_didErrorOccur)
1484         return MediaTime::invalidTime();
1485
1486     // The duration query would fail on a not-prerolled pipeline.
1487     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
1488         return MediaTime::invalidTime();
1489
1490     int64_t duration = 0;
1491     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &duration) || !GST_CLOCK_TIME_IS_VALID(duration)) {
1492         GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
1493         return MediaTime::positiveInfiniteTime();
1494     }
1495
1496     GST_LOG_OBJECT(pipeline(), "Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
1497     return MediaTime(duration, GST_SECOND);
1498 }
1499
1500 bool MediaPlayerPrivateGStreamer::isMuted() const
1501 {
1502     if (!m_volumeElement)
1503         return false;
1504
1505     gboolean isMuted;
1506     g_object_get(m_volumeElement.get(), "mute", &isMuted, nullptr);
1507     GST_INFO_OBJECT(pipeline(), "Player is muted: %s", boolForPrinting(!!isMuted));
1508     return isMuted;
1509 }
1510
1511 void MediaPlayerPrivateGStreamer::commitLoad()
1512 {
1513     ASSERT(!m_isDelayingLoad);
1514     GST_DEBUG_OBJECT(pipeline(), "Committing load.");
1515
1516     // GStreamer needs to have the pipeline set to a paused state to
1517     // start providing anything useful.
1518     changePipelineState(GST_STATE_PAUSED);
1519
1520     updateDownloadBufferingFlag();
1521     updateStates();
1522 }
1523
1524 void MediaPlayerPrivateGStreamer::fillTimerFired()
1525 {
1526     if (m_didErrorOccur) {
1527         GST_DEBUG_OBJECT(pipeline(), "[Buffering] An error occurred, disabling the fill timer");
1528         m_fillTimer.stop();
1529         return;
1530     }
1531
1532     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_buffering(GST_FORMAT_PERCENT));
1533     double fillStatus = 100.0;
1534     GstBufferingMode mode = GST_BUFFERING_DOWNLOAD;
1535
1536     if (gst_element_query(pipeline(), query.get())) {
1537         gst_query_parse_buffering_stats(query.get(), &mode, nullptr, nullptr, nullptr);
1538
1539         int percentage;
1540         gst_query_parse_buffering_percent(query.get(), nullptr, &percentage);
1541         fillStatus = percentage;
1542     } else if (m_httpResponseTotalSize) {
1543         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Query failed, falling back to network read position estimation");
1544         fillStatus = 100.0 * (static_cast<double>(m_networkReadPosition) / static_cast<double>(m_httpResponseTotalSize));
1545     } else {
1546         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Unable to determine on-disk buffering status");
1547         return;
1548     }
1549
1550     updateBufferingStatus(mode, fillStatus);
1551 }
1552
1553 void MediaPlayerPrivateGStreamer::loadStateChanged()
1554 {
1555     updateStates();
1556 }
1557
1558 void MediaPlayerPrivateGStreamer::timeChanged()
1559 {
1560     updateStates();
1561     m_player->timeChanged();
1562 }
1563
1564 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState networkError, MediaPlayer::ReadyState readyState, bool forceNotifications)
1565 {
1566     GST_WARNING("Loading failed, error: %s", convertEnumerationToString(networkError).utf8().data());
1567
1568     m_didErrorOccur = true;
1569     if (forceNotifications || m_networkState != networkError) {
1570         m_networkState = networkError;
1571         m_player->networkStateChanged();
1572     }
1573     if (forceNotifications || m_readyState != readyState) {
1574         m_readyState = readyState;
1575         m_player->readyStateChanged();
1576     }
1577
1578     // Loading failed, remove ready timer.
1579     m_readyTimerHandler.stop();
1580 }
1581
1582 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
1583 {
1584     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
1585     if (!m_autoAudioSink) {
1586         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
1587         return nullptr;
1588     }
1589
1590     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
1591
1592 #if ENABLE(WEB_AUDIO)
1593     GstElement* audioSinkBin = gst_bin_new("audio-sink");
1594     ensureAudioSourceProvider();
1595     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
1596     return audioSinkBin;
1597 #else
1598     return m_autoAudioSink.get();
1599 #endif
1600 }
1601
1602 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
1603 {
1604     GstElement* sink;
1605     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
1606     return sink;
1607 }
1608
1609 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
1610 {
1611     GST_TRACE_OBJECT(pipeline(), "isEndReached: %s, seeking: %s, seekTime: %s", boolForPrinting(m_isEndReached), boolForPrinting(m_isSeeking), m_seekTime.toString().utf8().data());
1612     if (m_isEndReached && m_isSeeking)
1613         return m_seekTime;
1614
1615     // This constant should remain lower than HTMLMediaElement's maxTimeupdateEventFrequency.
1616     static const Seconds positionCacheThreshold = 200_ms;
1617     Seconds now = WTF::WallTime::now().secondsSinceEpoch();
1618     if (m_lastQueryTime && (now - m_lastQueryTime.value()) < positionCacheThreshold && m_cachedPosition.isValid()) {
1619         GST_TRACE_OBJECT(pipeline(), "Returning cached position: %s", m_cachedPosition.toString().utf8().data());
1620         return m_cachedPosition;
1621     }
1622
1623     m_lastQueryTime = now;
1624
1625     // Position is only available if no async state change is going on and the state is either paused or playing.
1626     gint64 position = GST_CLOCK_TIME_NONE;
1627     GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
1628     if (gst_element_query(m_pipeline.get(), query))
1629         gst_query_parse_position(query, 0, &position);
1630     gst_query_unref(query);
1631
1632     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
1633     GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT ", canFallBackToLastFinishedSeekPosition: %s", GST_TIME_ARGS(gstreamerPosition), boolForPrinting(m_canFallBackToLastFinishedSeekPosition));
1634
1635     MediaTime playbackPosition = MediaTime::zeroTime();
1636
1637     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
1638         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
1639     else if (m_canFallBackToLastFinishedSeekPosition)
1640         playbackPosition = m_seekTime;
1641
1642     m_cachedPosition = playbackPosition;
1643     return playbackPosition;
1644 }
1645
1646 void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
1647 {
1648     // FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531.
1649     if (isMediaSource()) {
1650         GST_FIXME_OBJECT(m_pipeline.get(), "Audio/Video/Text track switching is not yet supported by the MSE backend.");
1651         return;
1652     }
1653
1654     const char* propertyName;
1655     const char* trackTypeAsString;
1656     Vector<String> selectedStreams;
1657     String selectedStreamId;
1658
1659     GstStream* stream = nullptr;
1660
1661     if (!m_isLegacyPlaybin) {
1662         stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
1663         if (!stream) {
1664             GST_WARNING_OBJECT(pipeline(), "No stream to select at index %u", index);
1665             return;
1666         }
1667         selectedStreamId = String::fromUTF8(gst_stream_get_stream_id(stream));
1668         selectedStreams.append(selectedStreamId);
1669     }
1670
1671     switch (trackType) {
1672     case TrackPrivateBaseGStreamer::TrackType::Audio:
1673         propertyName = "current-audio";
1674         trackTypeAsString = "audio";
1675         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentAudioStreamId) {
1676             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
1677             return;
1678         }
1679
1680         if (!m_currentTextStreamId.isEmpty())
1681             selectedStreams.append(m_currentTextStreamId);
1682         if (!m_currentVideoStreamId.isEmpty())
1683             selectedStreams.append(m_currentVideoStreamId);
1684         break;
1685     case TrackPrivateBaseGStreamer::TrackType::Video:
1686         propertyName = "current-video";
1687         trackTypeAsString = "video";
1688         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentVideoStreamId) {
1689             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
1690             return;
1691         }
1692
1693         if (!m_currentAudioStreamId.isEmpty())
1694             selectedStreams.append(m_currentAudioStreamId);
1695         if (!m_currentTextStreamId.isEmpty())
1696             selectedStreams.append(m_currentTextStreamId);
1697         break;
1698     case TrackPrivateBaseGStreamer::TrackType::Text:
1699         propertyName = "current-text";
1700         trackTypeAsString = "text";
1701         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentTextStreamId) {
1702             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
1703             return;
1704         }
1705
1706         if (!m_currentAudioStreamId.isEmpty())
1707             selectedStreams.append(m_currentAudioStreamId);
1708         if (!m_currentVideoStreamId.isEmpty())
1709             selectedStreams.append(m_currentVideoStreamId);
1710         break;
1711     case TrackPrivateBaseGStreamer::TrackType::Unknown:
1712         FALLTHROUGH;
1713     default:
1714         ASSERT_NOT_REACHED();
1715     }
1716
1717     GST_INFO_OBJECT(pipeline(), "Enabling %s track with index: %u", trackTypeAsString, index);
1718     if (m_isLegacyPlaybin)
1719         g_object_set(m_pipeline.get(), propertyName, index, nullptr);
1720     else {
1721         GList* selectedStreamsList = nullptr;
1722
1723         for (const auto& streamId : selectedStreams)
1724             selectedStreamsList = g_list_append(selectedStreamsList, g_strdup(streamId.utf8().data()));
1725
1726         // TODO: MSE GstStream API support: https://bugs.webkit.org/show_bug.cgi?id=182531
1727         gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreamsList));
1728         g_list_free_full(selectedStreamsList, reinterpret_cast<GDestroyNotify>(g_free));
1729     }
1730 }
1731
1732 void MediaPlayerPrivateGStreamer::updateTracks()
1733 {
1734     ASSERT(!m_isLegacyPlaybin);
1735
1736     bool useMediaSource = isMediaSource();
1737     unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
1738
1739     bool oldHasAudio = m_hasAudio;
1740     bool oldHasVideo = m_hasVideo;
1741     // New stream collections override previous ones.
1742     clearTracks();
1743     unsigned textTrackIndex = 0;
1744     for (unsigned i = 0; i < length; i++) {
1745         GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
1746         String streamId(gst_stream_get_stream_id(stream.get()));
1747         GstStreamType type = gst_stream_get_stream_type(stream.get());
1748
1749         GST_DEBUG_OBJECT(pipeline(), "Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
1750         if (type & GST_STREAM_TYPE_AUDIO)
1751             CREATE_TRACK(audio, Audio);
1752         else if (type & GST_STREAM_TYPE_VIDEO)
1753             CREATE_TRACK(video, Video);
1754         else if (type & GST_STREAM_TYPE_TEXT && !useMediaSource) {
1755 #if ENABLE(VIDEO_TRACK)
1756             auto track = InbandTextTrackPrivateGStreamer::create(textTrackIndex++, stream);
1757             m_textTracks.add(streamId, track.copyRef());
1758             m_player->addTextTrack(track.get());
1759 #endif
1760         } else
1761             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
1762     }
1763
1764     if (oldHasVideo != m_hasVideo || oldHasAudio != m_hasAudio)
1765         m_player->characteristicChanged();
1766
1767     if (m_hasVideo)
1768         m_player->sizeChanged();
1769
1770     m_player->mediaEngineUpdated();
1771 }
1772
1773 void MediaPlayerPrivateGStreamer::clearTracks()
1774 {
1775 #if ENABLE(VIDEO_TRACK)
1776     CLEAR_TRACKS(m_audioTracks, m_player->removeAudioTrack);
1777     CLEAR_TRACKS(m_videoTracks, m_player->removeVideoTrack);
1778     CLEAR_TRACKS(m_textTracks, m_player->removeTextTrack);
1779 #endif // ENABLE(VIDEO_TRACK)
1780 }
1781
1782 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
1783 {
1784     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] {
1785         player->notifyPlayerOfVideo();
1786     });
1787 }
1788
1789 void MediaPlayerPrivateGStreamer::setPipeline(GstElement* pipeline)
1790 {
1791     m_pipeline = pipeline;
1792
1793     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
1794     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
1795         auto& player = *static_cast<MediaPlayerPrivateGStreamer*>(userData);
1796
1797         if (player.handleSyncMessage(message)) {
1798             gst_message_unref(message);
1799             return GST_BUS_DROP;
1800         }
1801
1802         return GST_BUS_PASS;
1803     }, this, nullptr);
1804 }
1805
1806 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
1807 {
1808     if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_STREAM_COLLECTION && !m_isLegacyPlaybin) {
1809         GRefPtr<GstStreamCollection> collection;
1810         gst_message_parse_stream_collection(message, &collection.outPtr());
1811
1812         if (collection) {
1813             m_streamCollection.swap(collection);
1814             m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
1815                 this->updateTracks();
1816             });
1817         }
1818     }
1819
1820     if (GST_MESSAGE_TYPE(message) != GST_MESSAGE_NEED_CONTEXT)
1821         return false;
1822
1823     const gchar* contextType;
1824     if (!gst_message_parse_context_type(message, &contextType))
1825         return false;
1826
1827     GST_DEBUG_OBJECT(pipeline(), "Handling %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
1828
1829     if (!g_strcmp0(contextType, WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME)) {
1830         GRefPtr<GstContext> context = adoptGRef(gst_context_new(WEBKIT_WEB_SRC_PLAYER_CONTEXT_TYPE_NAME, FALSE));
1831         GstStructure* contextStructure = gst_context_writable_structure(context.get());
1832
1833         ASSERT(m_player);
1834         gst_structure_set(contextStructure, "player", G_TYPE_POINTER, m_player, nullptr);
1835         gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
1836         return true;
1837     }
1838
1839 #if ENABLE(ENCRYPTED_MEDIA)
1840     if (!g_strcmp0(contextType, "drm-preferred-decryption-system-id")) {
1841         initializationDataEncountered(parseInitDataFromProtectionMessage(message));
1842         bool isCDMAttached = waitForCDMAttachment();
1843         if (isCDMAttached && !isPlayerShuttingDown() && !m_cdmInstance->keySystem().isEmpty()) {
1844             const char* preferredKeySystemUuid = GStreamerEMEUtilities::keySystemToUuid(m_cdmInstance->keySystem());
1845             GST_INFO_OBJECT(pipeline(), "working with key system %s, continuing with key system %s on %s", m_cdmInstance->keySystem().utf8().data(), preferredKeySystemUuid, GST_MESSAGE_SRC_NAME(message));
1846
1847             GRefPtr<GstContext> context = adoptGRef(gst_context_new("drm-preferred-decryption-system-id", FALSE));
1848             GstStructure* contextStructure = gst_context_writable_structure(context.get());
1849             gst_structure_set(contextStructure, "decryption-system-id", G_TYPE_STRING, preferredKeySystemUuid, nullptr);
1850             gst_element_set_context(GST_ELEMENT(GST_MESSAGE_SRC(message)), context.get());
1851             return true;
1852         }
1853
1854         GST_WARNING_OBJECT(pipeline(), "waiting for a CDM failed, no CDM available");
1855         return false;
1856     }
1857 #endif // ENABLE(ENCRYPTED_MEDIA)
1858
1859     GST_DEBUG_OBJECT(pipeline(), "Unhandled %s need-context message for %s", contextType, GST_MESSAGE_SRC_NAME(message));
1860     return false;
1861 }
1862
1863 // Returns the size of the video.
1864 FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
1865 {
1866 #if USE(GSTREAMER_HOLEPUNCH)
1867     // When using the holepuch we may not be able to get the video frames size, so we can't use
1868     // it. But we need to report some non empty naturalSize for the player's GraphicsLayer
1869     // to be properly created.
1870     return s_holePunchDefaultFrameSize;
1871 #endif
1872
1873 #if ENABLE(MEDIA_STREAM)
1874     if (!m_isLegacyPlaybin && !m_currentVideoStreamId.isEmpty()) {
1875         RefPtr<VideoTrackPrivateGStreamer> videoTrack = m_videoTracks.get(m_currentVideoStreamId);
1876
1877         if (videoTrack) {
1878             auto tags = adoptGRef(gst_stream_get_tags(videoTrack->stream()));
1879             gint width, height;
1880
1881             if (tags && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_WIDTH, &width) && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_HEIGHT, &height))
1882                 return FloatSize(width, height);
1883         }
1884     }
1885 #endif // ENABLE(MEDIA_STREAM)
1886
1887     if (!hasVideo())
1888         return FloatSize();
1889
1890     if (!m_videoSize.isEmpty())
1891         return m_videoSize;
1892
1893     auto sampleLocker = holdLock(m_sampleMutex);
1894     if (!GST_IS_SAMPLE(m_sample.get()))
1895         return FloatSize();
1896
1897     GstCaps* caps = gst_sample_get_caps(m_sample.get());
1898     if (!caps)
1899         return FloatSize();
1900
1901     // TODO: handle possible clean aperture data. See https://bugzilla.gnome.org/show_bug.cgi?id=596571
1902     // TODO: handle possible transformation matrix. See https://bugzilla.gnome.org/show_bug.cgi?id=596326
1903
1904     // Get the video PAR and original size, if this fails the
1905     // video-sink has likely not yet negotiated its caps.
1906     int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
1907     IntSize originalSize;
1908     GstVideoFormat format;
1909     if (!getVideoSizeAndFormatFromCaps(caps, originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
1910         return FloatSize();
1911
1912 #if USE(TEXTURE_MAPPER_GL)
1913     // When using accelerated compositing, if the video is tagged as rotated 90 or 270 degrees, swap width and height.
1914     if (m_canRenderingBeAccelerated) {
1915         if (m_videoSourceOrientation.usesWidthAsHeight())
1916             originalSize = originalSize.transposedSize();
1917     }
1918 #endif
1919
1920     GST_DEBUG_OBJECT(pipeline(), "Original video size: %dx%d", originalSize.width(), originalSize.height());
1921     GST_DEBUG_OBJECT(pipeline(), "Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);
1922
1923     // Calculate DAR based on PAR and video size.
1924     int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
1925     int displayHeight = originalSize.height() * pixelAspectRatioDenominator;
1926
1927     // Divide display width and height by their GCD to avoid possible overflows.
1928     int displayAspectRatioGCD = gst_util_greatest_common_divisor(displayWidth, displayHeight);
1929     displayWidth /= displayAspectRatioGCD;
1930     displayHeight /= displayAspectRatioGCD;
1931
1932     // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
1933     uint64_t width = 0, height = 0;
1934     if (!(originalSize.height() % displayHeight)) {
1935         GST_DEBUG_OBJECT(pipeline(), "Keeping video original height");
1936         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
1937         height = originalSize.height();
1938     } else if (!(originalSize.width() % displayWidth)) {
1939         GST_DEBUG_OBJECT(pipeline(), "Keeping video original width");
1940         height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
1941         width = originalSize.width();
1942     } else {
1943         GST_DEBUG_OBJECT(pipeline(), "Approximating while keeping original video height");
1944         width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
1945         height = originalSize.height();
1946     }
1947
1948     GST_DEBUG_OBJECT(pipeline(), "Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
1949     m_videoSize = FloatSize(static_cast<int>(width), static_cast<int>(height));
1950     return m_videoSize;
1951 }
1952
1953 void MediaPlayerPrivateGStreamer::setVolume(float volume)
1954 {
1955     if (!m_volumeElement)
1956         return;
1957
1958     GST_DEBUG_OBJECT(pipeline(), "Setting volume: %f", volume);
1959     gst_stream_volume_set_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR, static_cast<double>(volume));
1960 }
1961
1962 float MediaPlayerPrivateGStreamer::volume() const
1963 {
1964     if (!m_volumeElement)
1965         return 0;
1966
1967     return gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR);
1968 }
1969
1970 void MediaPlayerPrivateGStreamer::notifyPlayerOfVolumeChange()
1971 {
1972     if (!m_player || !m_volumeElement)
1973         return;
1974     double volume;
1975     volume = gst_stream_volume_get_volume(m_volumeElement.get(), GST_STREAM_VOLUME_FORMAT_LINEAR);
1976     // get_volume() can return values superior to 1.0 if the user
1977     // applies software user gain via third party application (GNOME
1978     // volume control for instance).
1979     volume = CLAMP(volume, 0.0, 1.0);
1980     m_player->volumeChanged(static_cast<float>(volume));
1981 }
1982
1983 void MediaPlayerPrivateGStreamer::volumeChangedCallback(MediaPlayerPrivateGStreamer* player)
1984 {
1985     // This is called when m_volumeElement receives the notify::volume signal.
1986     GST_DEBUG_OBJECT(player->pipeline(), "Volume changed to: %f", player->volume());
1987
1988     player->m_notifier->notify(MainThreadNotification::VolumeChanged, [player] {
1989         player->notifyPlayerOfVolumeChange();
1990     });
1991 }
1992
1993 MediaPlayer::NetworkState MediaPlayerPrivateGStreamer::networkState() const
1994 {
1995     return m_networkState;
1996 }
1997
1998 MediaPlayer::ReadyState MediaPlayerPrivateGStreamer::readyState() const
1999 {
2000     return m_readyState;
2001 }
2002
2003 void MediaPlayerPrivateGStreamer::setMuted(bool shouldMute)
2004 {
2005     if (!m_volumeElement || shouldMute == isMuted())
2006         return;
2007
2008     GST_INFO_OBJECT(pipeline(), "Muted? %s", boolForPrinting(shouldMute));
2009     g_object_set(m_volumeElement.get(), "mute", shouldMute, nullptr);
2010 }
2011
2012 void MediaPlayerPrivateGStreamer::notifyPlayerOfMute()
2013 {
2014     if (!m_player || !m_volumeElement)
2015         return;
2016
2017     gboolean muted;
2018     g_object_get(m_volumeElement.get(), "mute", &muted, nullptr);
2019     m_player->muteChanged(static_cast<bool>(muted));
2020 }
2021
2022 void MediaPlayerPrivateGStreamer::muteChangedCallback(MediaPlayerPrivateGStreamer* player)
2023 {
2024     // This is called when m_volumeElement receives the notify::mute signal.
2025     player->m_notifier->notify(MainThreadNotification::MuteChanged, [player] {
2026         player->notifyPlayerOfMute();
2027     });
2028 }
2029
2030 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
2031 {
2032     GUniqueOutPtr<GError> err;
2033     GUniqueOutPtr<gchar> debug;
2034     MediaPlayer::NetworkState error;
2035     bool issueError = true;
2036     bool attemptNextLocation = false;
2037     const GstStructure* structure = gst_message_get_structure(message);
2038     GstState requestedState, currentState;
2039
2040     m_canFallBackToLastFinishedSeekPosition = false;
2041
2042     if (structure) {
2043         const gchar* messageTypeName = gst_structure_get_name(structure);
2044
2045         // Redirect messages are sent from elements, like qtdemux, to
2046         // notify of the new location(s) of the media.
2047         if (!g_strcmp0(messageTypeName, "redirect")) {
2048             mediaLocationChanged(message);
2049             return;
2050         }
2051     }
2052
2053     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
2054     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
2055
2056     GST_LOG_OBJECT(pipeline(), "Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
2057     switch (GST_MESSAGE_TYPE(message)) {
2058     case GST_MESSAGE_ERROR:
2059         if (m_shouldResetPipeline || !m_missingPluginCallbacks.isEmpty() || m_didErrorOccur)
2060             break;
2061         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
2062         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
2063
2064         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
2065
2066         error = MediaPlayer::NetworkState::Empty;
2067         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
2068             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
2069             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
2070             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
2071             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
2072             error = MediaPlayer::NetworkState::FormatError;
2073         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
2074             // Let the mediaPlayerClient handle the stream error, in this case the HTMLMediaElement will emit a stalled event.
2075             GST_ERROR("Decode error, let the Media element emit a stalled event.");
2076             m_loadingStalled = true;
2077             break;
2078         } else if (err->domain == GST_STREAM_ERROR) {
2079             error = MediaPlayer::NetworkState::DecodeError;
2080             attemptNextLocation = true;
2081         } else if (err->domain == GST_RESOURCE_ERROR)
2082             error = MediaPlayer::NetworkState::NetworkError;
2083
2084         if (attemptNextLocation)
2085             issueError = !loadNextLocation();
2086         if (issueError) {
2087             m_didErrorOccur = true;
2088             if (m_networkState != error) {
2089                 m_networkState = error;
2090                 m_player->networkStateChanged();
2091             }
2092         }
2093         break;
2094     case GST_MESSAGE_EOS:
2095         didEnd();
2096         break;
2097     case GST_MESSAGE_ASYNC_DONE:
2098         if (!messageSourceIsPlaybin || m_isDelayingLoad)
2099             break;
2100         asyncStateChangeDone();
2101         break;
2102     case GST_MESSAGE_STATE_CHANGED: {
2103         if (!messageSourceIsPlaybin || m_isDelayingLoad)
2104             break;
2105         updateStates();
2106
2107         // Construct a filename for the graphviz dot file output.
2108         GstState newState;
2109         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
2110         CString dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), '.',
2111             gst_element_state_get_name(currentState), '_', gst_element_state_get_name(newState)).utf8();
2112         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
2113
2114         break;
2115     }
2116     case GST_MESSAGE_BUFFERING:
2117         processBufferingStats(message);
2118         break;
2119     case GST_MESSAGE_DURATION_CHANGED:
2120         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
2121         if (messageSourceIsPlaybin && !isMediaSource())
2122             durationChanged();
2123         break;
2124     case GST_MESSAGE_REQUEST_STATE:
2125         gst_message_parse_request_state(message, &requestedState);
2126         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
2127         if (requestedState < currentState) {
2128             GST_INFO_OBJECT(pipeline(), "Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
2129                 gst_element_state_get_name(requestedState));
2130             m_requestedState = requestedState;
2131             if (!changePipelineState(requestedState))
2132                 loadingFailed(MediaPlayer::NetworkState::Empty);
2133         }
2134         break;
2135     case GST_MESSAGE_CLOCK_LOST:
2136         // This can only happen in PLAYING state and we should just
2137         // get a new clock by moving back to PAUSED and then to
2138         // PLAYING again.
2139         // This can happen if the stream that ends in a sink that
2140         // provides the current clock disappears, for example if
2141         // the audio sink provides the clock and the audio stream
2142         // is disabled. It also happens relatively often with
2143         // HTTP adaptive streams when switching between different
2144         // variants of a stream.
2145         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
2146         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
2147         break;
2148     case GST_MESSAGE_LATENCY:
2149         // Recalculate the latency, we don't need any special handling
2150         // here other than the GStreamer default.
2151         // This can happen if the latency of live elements changes, or
2152         // for one reason or another a new live element is added or
2153         // removed from the pipeline.
2154         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
2155         break;
2156     case GST_MESSAGE_ELEMENT:
2157         if (gst_is_missing_plugin_message(message)) {
2158             if (gst_install_plugins_supported()) {
2159                 auto missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = makeWeakPtr(*this)](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
2160                     if (!weakThis) {
2161                         GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
2162                         return;
2163                     }
2164
2165                     GST_DEBUG("got missing plugin installation callback with result %u", result);
2166                     RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
2167                     weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
2168                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
2169                         return;
2170
2171                     weakThis->changePipelineState(GST_STATE_READY);
2172                     weakThis->changePipelineState(GST_STATE_PAUSED);
2173                 });
2174                 m_missingPluginCallbacks.append(missingPluginCallback.copyRef());
2175                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
2176                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
2177                 m_player->requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), missingPluginCallback.get());
2178             }
2179         }
2180 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
2181         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
2182             processMpegTsSection(section);
2183             gst_mpegts_section_unref(section);
2184         }
2185 #endif
2186         else if (gst_structure_has_name(structure, "http-headers")) {
2187             GST_DEBUG_OBJECT(pipeline(), "Processing HTTP headers: %" GST_PTR_FORMAT, structure);
2188             if (const char* uri = gst_structure_get_string(structure, "uri")) {
2189                 URL url(URL(), uri);
2190                 convertToInternalProtocol(url);
2191                 m_origins.add(SecurityOrigin::create(url));
2192
2193                 if (url != m_url) {
2194                     GST_DEBUG_OBJECT(pipeline(), "Ignoring HTTP response headers for non-main URI.");
2195                     break;
2196                 }
2197             }
2198
2199             bool isRangeRequest = false;
2200             GUniqueOutPtr<GstStructure> requestHeaders;
2201             if (gst_structure_get(structure, "request-headers", GST_TYPE_STRUCTURE, &requestHeaders.outPtr(), nullptr))
2202                 isRangeRequest = gst_structure_has_field(requestHeaders.get(), "Range");
2203
2204             GST_DEBUG_OBJECT(pipeline(), "Is range request: %s", boolForPrinting(isRangeRequest));
2205
2206             GUniqueOutPtr<GstStructure> responseHeaders;
2207             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders.outPtr(), nullptr)) {
2208                 CString contentLengthHeaderName = httpHeaderNameString(HTTPHeaderName::ContentLength).utf8();
2209                 uint64_t contentLength = 0;
2210                 if (!gst_structure_get_uint64(responseHeaders.get(), contentLengthHeaderName.data(), &contentLength)) {
2211                     // souphttpsrc sets a string for Content-Length, so
2212                     // handle it here, until we remove the webkit+ protocol
2213                     // prefix from webkitwebsrc.
2214                     if (const char* contentLengthAsString = gst_structure_get_string(responseHeaders.get(), contentLengthHeaderName.data())) {
2215                         contentLength = g_ascii_strtoull(contentLengthAsString, nullptr, 10);
2216                         if (contentLength == G_MAXUINT64)
2217                             contentLength = 0;
2218                     }
2219                 }
2220                 if (!isRangeRequest) {
2221                     m_isLiveStream = !contentLength;
2222                     GST_INFO_OBJECT(pipeline(), "%s stream detected", m_isLiveStream ? "Live" : "Non-live");
2223                     updateDownloadBufferingFlag();
2224                 }
2225             }
2226         } else if (gst_structure_has_name(structure, "webkit-network-statistics")) {
2227             if (gst_structure_get(structure, "read-position", G_TYPE_UINT64, &m_networkReadPosition, "size", G_TYPE_UINT64, &m_httpResponseTotalSize, nullptr))
2228                 GST_DEBUG_OBJECT(pipeline(), "Updated network read position %" G_GUINT64_FORMAT ", size: %" G_GUINT64_FORMAT, m_networkReadPosition, m_httpResponseTotalSize);
2229         } else if (gst_structure_has_name(structure, "adaptive-streaming-statistics")) {
2230             if (WEBKIT_IS_WEB_SRC(m_source.get()) && !webkitGstCheckVersion(1, 12, 0)) {
2231                 if (const char* uri = gst_structure_get_string(structure, "uri"))
2232                     m_hasTaintedOrigin = webKitSrcWouldTaintOrigin(WEBKIT_WEB_SRC_CAST(m_source.get()), SecurityOrigin::create(URL(URL(), uri)));
2233             }
2234         } else if (gst_structure_has_name(structure, "GstCacheDownloadComplete")) {
2235             GST_INFO_OBJECT(pipeline(), "Stream is fully downloaded, stopping monitoring downloading progress.");
2236             m_fillTimer.stop();
2237             m_bufferingPercentage = 100;
2238             updateStates();
2239         } else
2240             GST_DEBUG_OBJECT(pipeline(), "Unhandled element message: %" GST_PTR_FORMAT, structure);
2241         break;
2242 #if ENABLE(VIDEO_TRACK)
2243     case GST_MESSAGE_TOC:
2244         processTableOfContents(message);
2245         break;
2246 #endif
2247     case GST_MESSAGE_TAG: {
2248         GstTagList* tags = nullptr;
2249         GUniqueOutPtr<gchar> tag;
2250         gst_message_parse_tag(message, &tags);
2251         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
2252             if (!g_strcmp0(tag.get(), "rotate-90"))
2253                 setVideoSourceOrientation(ImageOrientation::OriginRightTop);
2254             else if (!g_strcmp0(tag.get(), "rotate-180"))
2255                 setVideoSourceOrientation(ImageOrientation::OriginBottomRight);
2256             else if (!g_strcmp0(tag.get(), "rotate-270"))
2257                 setVideoSourceOrientation(ImageOrientation::OriginLeftBottom);
2258         }
2259         gst_tag_list_unref(tags);
2260         break;
2261     }
2262     case GST_MESSAGE_STREAMS_SELECTED: {
2263         GRefPtr<GstStreamCollection> collection;
2264         gst_message_parse_streams_selected(message, &collection.outPtr());
2265
2266         if (!collection)
2267             break;
2268
2269         m_streamCollection.swap(collection);
2270         m_currentAudioStreamId = "";
2271         m_currentVideoStreamId = "";
2272         m_currentTextStreamId = "";
2273
2274         unsigned length = gst_message_streams_selected_get_size(message);
2275         for (unsigned i = 0; i < length; i++) {
2276             GRefPtr<GstStream> stream = gst_message_streams_selected_get_stream(message, i);
2277             if (!stream)
2278                 continue;
2279
2280             GstStreamType type = gst_stream_get_stream_type(stream.get());
2281             String streamId(gst_stream_get_stream_id(stream.get()));
2282
2283             GST_DEBUG_OBJECT(pipeline(), "Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
2284             // Playbin3 can send more than one selected stream of the same type
2285             // but there's no priority or ordering system in place, so we assume
2286             // the selected stream is the last one as reported by playbin3.
2287             if (type & GST_STREAM_TYPE_AUDIO) {
2288                 m_currentAudioStreamId = streamId;
2289                 auto track = m_audioTracks.get(m_currentAudioStreamId);
2290                 ASSERT(track);
2291                 track->markAsActive();
2292             } else if (type & GST_STREAM_TYPE_VIDEO) {
2293                 m_currentVideoStreamId = streamId;
2294                 auto track = m_videoTracks.get(m_currentVideoStreamId);
2295                 ASSERT(track);
2296                 track->markAsActive();
2297             } else if (type & GST_STREAM_TYPE_TEXT)
2298                 m_currentTextStreamId = streamId;
2299             else
2300                 GST_WARNING("Unknown stream type with stream-id %s", streamId.utf8().data());
2301         }
2302         break;
2303     }
2304     default:
2305         GST_DEBUG_OBJECT(pipeline(), "Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
2306         break;
2307     }
2308 }
2309
2310 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
2311 {
2312     GstBufferingMode mode;
2313     gst_message_parse_buffering_stats(message, &mode, nullptr, nullptr, nullptr);
2314
2315     int percentage;
2316     gst_message_parse_buffering(message, &percentage);
2317
2318     updateBufferingStatus(mode, percentage);
2319 }
2320
2321 void MediaPlayerPrivateGStreamer::updateMaxTimeLoaded(double percentage)
2322 {
2323     MediaTime mediaDuration = durationMediaTime();
2324     if (!mediaDuration)
2325         return;
2326
2327     m_maxTimeLoaded = MediaTime(percentage * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
2328     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
2329 }
2330
2331 void MediaPlayerPrivateGStreamer::updateBufferingStatus(GstBufferingMode mode, double percentage)
2332 {
2333     bool wasBuffering = m_isBuffering;
2334
2335     GST_DEBUG_OBJECT(pipeline(), "[Buffering] mode: %s, status: %f%%", enumToString(GST_TYPE_BUFFERING_MODE, mode).data(), percentage);
2336
2337     m_didDownloadFinish = percentage == 100;
2338     m_isBuffering = !m_didDownloadFinish;
2339
2340     if (!m_didDownloadFinish)
2341         m_isBuffering = true;
2342
2343     m_bufferingPercentage = percentage;
2344     switch (mode) {
2345     case GST_BUFFERING_STREAM: {
2346         updateMaxTimeLoaded(percentage);
2347
2348         m_bufferingPercentage = percentage;
2349         if (m_didDownloadFinish || (!wasBuffering && m_isBuffering))
2350             updateStates();
2351
2352         break;
2353     }
2354     case GST_BUFFERING_DOWNLOAD: {
2355         updateMaxTimeLoaded(percentage);
2356         updateStates();
2357         break;
2358     }
2359     default:
2360         GST_DEBUG_OBJECT(pipeline(), "Unhandled buffering mode: %s", enumToString(GST_TYPE_BUFFERING_MODE, mode).data());
2361         break;
2362     }
2363 }
2364
2365 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
2366 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
2367 {
2368     ASSERT(section);
2369
2370     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
2371         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
2372         m_metadataTracks.clear();
2373         for (unsigned i = 0; i < pmt->streams->len; ++i) {
2374             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
2375             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
2376                 AtomString pid = String::number(stream->pid);
2377                 auto track = InbandMetadataTextTrackPrivateGStreamer::create(
2378                     InbandTextTrackPrivate::Kind::Metadata, InbandTextTrackPrivate::CueFormat::Data, pid);
2379
2380                 // 4.7.10.12.2 Sourcing in-band text tracks
2381                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
2382                 // type as follows, based on the type of the media resource:
2383                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
2384                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
2385                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
2386                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
2387                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
2388                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
2389                 // expressed in hexadecimal using uppercase ASCII hex digits.
2390                 StringBuilder inbandMetadataTrackDispatchType;
2391                 inbandMetadataTrackDispatchType.append(hex(stream->stream_type, 2));
2392                 for (unsigned j = 0; j < stream->descriptors->len; ++j) {
2393                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
2394                     for (unsigned k = 0; k < descriptor->length; ++k)
2395                         inbandMetadataTrackDispatchType.append(hex(descriptor->data[k], 2));
2396                 }
2397                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType.toString());
2398
2399                 m_metadataTracks.add(pid, track);
2400                 m_player->addTextTrack(*track);
2401             }
2402         }
2403     } else {
2404         AtomString pid = String::number(section->pid);
2405         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
2406         if (!track)
2407             return;
2408
2409         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
2410         gsize size;
2411         const void* bytes = g_bytes_get_data(data.get(), &size);
2412
2413         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
2414     }
2415 }
2416 #endif
2417
2418 #if ENABLE(VIDEO_TRACK)
2419 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
2420 {
2421     if (m_chaptersTrack)
2422         m_player->removeTextTrack(*m_chaptersTrack);
2423
2424     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Kind::Chapters, InbandTextTrackPrivate::CueFormat::Generic);
2425     m_player->addTextTrack(*m_chaptersTrack);
2426
2427     GRefPtr<GstToc> toc;
2428     gboolean updated;
2429     gst_message_parse_toc(message, &toc.outPtr(), &updated);
2430     ASSERT(toc);
2431
2432     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
2433         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
2434 }
2435
2436 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
2437 {
2438     ASSERT(entry);
2439
2440     auto cue = InbandGenericCue::create();
2441
2442     gint64 start = -1, stop = -1;
2443     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
2444
2445     uint32_t truncatedGstSecond = static_cast<uint32_t>(GST_SECOND);
2446     if (start != -1)
2447         cue->setStartTime(MediaTime(static_cast<int64_t>(start), truncatedGstSecond));
2448     if (stop != -1)
2449         cue->setEndTime(MediaTime(static_cast<int64_t>(stop), truncatedGstSecond));
2450
2451     GstTagList* tags = gst_toc_entry_get_tags(entry);
2452     if (tags) {
2453         gchar* title = nullptr;
2454         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
2455         if (title) {
2456             cue->setContent(title);
2457             g_free(title);
2458         }
2459     }
2460
2461     m_chaptersTrack->addGenericCue(cue);
2462
2463     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
2464         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
2465 }
2466
2467 void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
2468 {
2469     m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
2470         return !validTrackIds.contains(keyAndValue.key);
2471     });
2472 }
2473
2474 void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
2475 {
2476     m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
2477         return !validTrackIds.contains(keyAndValue.key);
2478     });
2479 }
2480
2481 void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
2482 {
2483     m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
2484         return !validTrackIds.contains(keyAndValue.key);
2485     });
2486 }
2487 #endif
2488
2489 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
2490 {
2491     if (g_strcmp0(G_OBJECT_TYPE_NAME(element), "GstDownloadBuffer"))
2492         return;
2493
2494     player->m_downloadBuffer = element;
2495     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
2496     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
2497
2498     GUniqueOutPtr<char> oldDownloadTemplate;
2499     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
2500
2501     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
2502     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
2503     GST_DEBUG_OBJECT(player->pipeline(), "Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
2504
2505     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
2506 }
2507
2508 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
2509 {
2510     ASSERT(player->m_downloadBuffer);
2511
2512     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
2513
2514     GUniqueOutPtr<char> downloadFile;
2515     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
2516     player->m_downloadBuffer = nullptr;
2517
2518     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
2519         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
2520         return;
2521     }
2522
2523     GST_DEBUG_OBJECT(player->pipeline(), "Unlinked media temporary file %s after creation", downloadFile.get());
2524 }
2525
2526 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
2527 {
2528     if (!downloadFileTemplate)
2529         return;
2530
2531     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
2532     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
2533     String templatePattern = String(templateFile.get()).replace("X", "?");
2534
2535     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
2536         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
2537             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
2538             continue;
2539         }
2540
2541         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
2542     }
2543 }
2544
2545 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
2546 {
2547     if (!m_pipeline || m_didErrorOccur)
2548         return;
2549
2550     if (m_isSeeking) {
2551         if (m_isSeekPending)
2552             updateStates();
2553         else {
2554             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data());
2555             m_isSeeking = false;
2556             m_cachedPosition = MediaTime::invalidTime();
2557             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
2558                 seek(m_timeOfOverlappingSeek);
2559                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
2560                 return;
2561             }
2562             m_timeOfOverlappingSeek = MediaTime::invalidTime();
2563
2564             // The pipeline can still have a pending state. In this case a position query will fail.
2565             // Right now we can use m_seekTime as a fallback.
2566             m_canFallBackToLastFinishedSeekPosition = true;
2567             timeChanged();
2568         }
2569     } else
2570         updateStates();
2571 }
2572
2573 void MediaPlayerPrivateGStreamer::updateStates()
2574 {
2575     if (!m_pipeline || m_didErrorOccur)
2576         return;
2577
2578     MediaPlayer::NetworkState oldNetworkState = m_networkState;
2579     MediaPlayer::ReadyState oldReadyState = m_readyState;
2580     GstState pending, state;
2581     bool stateReallyChanged = false;
2582
2583     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
2584     if (state != m_currentState) {
2585         m_oldState = m_currentState;
2586         m_currentState = state;
2587         stateReallyChanged = true;
2588     }
2589
2590     bool shouldUpdatePlaybackState = false;
2591     switch (getStateResult) {
2592     case GST_STATE_CHANGE_SUCCESS: {
2593         GST_DEBUG_OBJECT(pipeline(), "State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2594
2595         // Do nothing if on EOS and state changed to READY to avoid recreating the player
2596         // on HTMLMediaElement and properly generate the video 'ended' event.
2597         if (m_isEndReached && m_currentState == GST_STATE_READY)
2598             break;
2599
2600         m_shouldResetPipeline = m_currentState <= GST_STATE_READY;
2601
2602         bool didBuffering = m_isBuffering;
2603
2604         // Update ready and network states.
2605         switch (m_currentState) {
2606         case GST_STATE_NULL:
2607             m_readyState = MediaPlayer::ReadyState::HaveNothing;
2608             m_networkState = MediaPlayer::NetworkState::Empty;
2609             break;
2610         case GST_STATE_READY:
2611             m_readyState = MediaPlayer::ReadyState::HaveMetadata;
2612             m_networkState = MediaPlayer::NetworkState::Empty;
2613             break;
2614         case GST_STATE_PAUSED:
2615             FALLTHROUGH;
2616         case GST_STATE_PLAYING:
2617             if (m_isBuffering) {
2618                 GRefPtr<GstQuery> query = adoptGRef(gst_query_new_buffering(GST_FORMAT_PERCENT));
2619
2620                 m_isBuffering = m_bufferingPercentage == 100;
2621                 if (gst_element_query(m_pipeline.get(), query.get())) {
2622                     gboolean isBuffering = m_isBuffering;
2623                     gst_query_parse_buffering_percent(query.get(), &isBuffering, nullptr);
2624                     m_isBuffering = isBuffering;
2625                 }
2626
2627                 if (!m_isBuffering) {
2628                     GST_INFO_OBJECT(pipeline(), "[Buffering] Complete.");
2629                     m_readyState = MediaPlayer::ReadyState::HaveEnoughData;
2630                     m_networkState = m_didDownloadFinish ? MediaPlayer::NetworkState::Idle : MediaPlayer::NetworkState::Loading;
2631                 } else {
2632                     m_readyState = MediaPlayer::ReadyState::HaveCurrentData;
2633                     m_networkState = MediaPlayer::NetworkState::Loading;
2634                 }
2635             } else if (m_didDownloadFinish) {
2636                 m_readyState = MediaPlayer::ReadyState::HaveEnoughData;
2637                 m_networkState = MediaPlayer::NetworkState::Loaded;
2638             } else {
2639                 m_readyState = MediaPlayer::ReadyState::HaveFutureData;
2640                 m_networkState = MediaPlayer::NetworkState::Loading;
2641             }
2642
2643             break;
2644         default:
2645             ASSERT_NOT_REACHED();
2646             break;
2647         }
2648
2649         // Sync states where needed.
2650         if (m_currentState == GST_STATE_PAUSED) {
2651             if (!m_areVolumeAndMuteInitialized) {
2652                 notifyPlayerOfVolumeChange();
2653                 notifyPlayerOfMute();
2654                 m_areVolumeAndMuteInitialized = true;
2655             }
2656
2657             if (didBuffering && !m_isBuffering && !m_isPaused && m_playbackRate) {
2658                 GST_INFO_OBJECT(pipeline(), "[Buffering] Restarting playback.");
2659                 changePipelineState(GST_STATE_PLAYING);
2660             }
2661         } else if (m_currentState == GST_STATE_PLAYING) {
2662             m_isPaused = false;
2663
2664             if ((m_isBuffering && !m_isLiveStream) || !m_playbackRate) {
2665                 GST_INFO_OBJECT(pipeline(), "[Buffering] Pausing stream for buffering.");
2666                 changePipelineState(GST_STATE_PAUSED);
2667             }
2668         } else
2669             m_isPaused = true;
2670
2671         GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
2672         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
2673             shouldUpdatePlaybackState = true;
2674             GST_INFO_OBJECT(pipeline(), "Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
2675         }
2676
2677         // Emit play state change notification only when going to PLAYING so that
2678         // the media element gets a chance to enable its page sleep disabler.
2679         // Emitting this notification in more cases triggers unwanted code paths
2680         // and test timeouts.
2681         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
2682             GST_INFO_OBJECT(pipeline(), "Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
2683             shouldUpdatePlaybackState = true;
2684         }
2685
2686         break;
2687     }
2688     case GST_STATE_CHANGE_ASYNC:
2689         GST_DEBUG_OBJECT(pipeline(), "Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2690         // Change in progress.
2691         break;
2692     case GST_STATE_CHANGE_FAILURE:
2693         GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2694         // Change failed.
2695         return;
2696     case GST_STATE_CHANGE_NO_PREROLL:
2697         GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2698
2699         // Live pipelines go in PAUSED without prerolling.
2700         m_isLiveStream = true;
2701         updateDownloadBufferingFlag();
2702
2703         if (m_currentState == GST_STATE_READY)
2704             m_readyState = MediaPlayer::ReadyState::HaveNothing;
2705         else if (m_currentState == GST_STATE_PAUSED) {
2706             m_readyState = MediaPlayer::ReadyState::HaveEnoughData;
2707             m_isPaused = true;
2708         } else if (m_currentState == GST_STATE_PLAYING)
2709             m_isPaused = false;
2710
2711         if (!m_isPaused && m_playbackRate)
2712             changePipelineState(GST_STATE_PLAYING);
2713
2714         m_networkState = MediaPlayer::NetworkState::Loading;
2715         break;
2716     default:
2717         GST_DEBUG_OBJECT(pipeline(), "Else : %d", getStateResult);
2718         break;
2719     }
2720
2721     m_requestedState = GST_STATE_VOID_PENDING;
2722
2723     if (shouldUpdatePlaybackState)
2724         m_player->playbackStateChanged();
2725
2726     if (m_networkState != oldNetworkState) {
2727         GST_DEBUG_OBJECT(pipeline(), "Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
2728         m_player->networkStateChanged();
2729     }
2730     if (m_readyState != oldReadyState) {
2731         GST_DEBUG_OBJECT(pipeline(), "Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
2732         m_player->readyStateChanged();
2733     }
2734
2735     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
2736         updatePlaybackRate();
2737         if (m_isSeekPending) {
2738             GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
2739             m_isSeekPending = false;
2740             m_isSeeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
2741             if (!m_isSeeking) {
2742                 m_cachedPosition = MediaTime::invalidTime();
2743                 GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
2744             }
2745         }
2746     }
2747 }
2748
2749 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
2750 {
2751     if (m_mediaLocations)
2752         gst_structure_free(m_mediaLocations);
2753
2754     const GstStructure* structure = gst_message_get_structure(message);
2755     if (structure) {
2756         // This structure can contain:
2757         // - both a new-location string and embedded locations structure
2758         // - or only a new-location string.
2759         m_mediaLocations = gst_structure_copy(structure);
2760         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2761
2762         if (locations)
2763             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
2764
2765         loadNextLocation();
2766     }
2767 }
2768
2769 bool MediaPlayerPrivateGStreamer::loadNextLocation()
2770 {
2771     if (!m_mediaLocations)
2772         return false;
2773
2774     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2775     const char* newLocation = nullptr;
2776
2777     if (!locations) {
2778         // Fallback on new-location string.
2779         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
2780         if (!newLocation)
2781             return false;
2782     }
2783
2784     if (!newLocation) {
2785         if (m_mediaLocationCurrentIndex < 0) {
2786             m_mediaLocations = nullptr;
2787             return false;
2788         }
2789
2790         const GValue* location = gst_value_list_get_value(locations, m_mediaLocationCurrentIndex);
2791         const GstStructure* structure = gst_value_get_structure(location);
2792
2793         if (!structure) {
2794             m_mediaLocationCurrentIndex--;
2795             return false;
2796         }
2797
2798         newLocation = gst_structure_get_string(structure, "new-location");
2799     }
2800
2801     if (newLocation) {
2802         // Found a candidate. new-location is not always an absolute url
2803         // though. We need to take the base of the current url and
2804         // append the value of new-location to it.
2805         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2806         URL newUrl = URL(baseUrl, newLocation);
2807
2808         GUniqueOutPtr<gchar> playbinUrlStr;
2809         g_object_get(m_pipeline.get(), "current-uri", &playbinUrlStr.outPtr(), nullptr);
2810         URL playbinUrl(URL(), playbinUrlStr.get());
2811
2812         if (playbinUrl == newUrl) {
2813             GST_DEBUG_OBJECT(pipeline(), "Playbin already handled redirection.");
2814
2815             m_url = playbinUrl;
2816
2817             return true;
2818         }
2819
2820         changePipelineState(GST_STATE_READY);
2821         auto securityOrigin = SecurityOrigin::create(m_url);
2822         if (securityOrigin->canRequest(newUrl)) {
2823             GST_INFO_OBJECT(pipeline(), "New media url: %s", newUrl.string().utf8().data());
2824
2825             // Reset player states.
2826             m_networkState = MediaPlayer::NetworkState::Loading;
2827             m_player->networkStateChanged();
2828             m_readyState = MediaPlayer::ReadyState::HaveNothing;
2829             m_player->readyStateChanged();
2830
2831             // Reset pipeline state.
2832             m_shouldResetPipeline = true;
2833
2834             GstState state;
2835             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2836             if (state <= GST_STATE_READY) {
2837                 // Set the new uri and start playing.
2838                 setPlaybinURL(newUrl);
2839                 changePipelineState(GST_STATE_PLAYING);
2840                 return true;
2841             }
2842         } else
2843             GST_INFO_OBJECT(pipeline(), "Not allowed to load new media location: %s", newUrl.string().utf8().data());
2844     }
2845     m_mediaLocationCurrentIndex--;
2846     return false;
2847 }
2848
2849 void MediaPlayerPrivateGStreamer::didEnd()
2850 {
2851     GST_INFO_OBJECT(pipeline(), "Playback ended");
2852
2853     // Synchronize position and duration values to not confuse the
2854     // HTMLMediaElement. In some cases like reverse playback the
2855     // position is not always reported as 0 for instance.
2856     m_cachedPosition = MediaTime::invalidTime();
2857     MediaTime now = currentMediaTime();
2858     if (now > MediaTime::zeroTime() && !m_isSeeking) {
2859         m_cachedDuration = now;
2860         m_player->durationChanged();
2861     }
2862
2863     m_isEndReached = true;
2864
2865     if (!m_player->isLooping()) {
2866         m_isPaused = true;
2867         changePipelineState(GST_STATE_READY);
2868         m_didDownloadFinish = false;
2869
2870 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
2871         wpe_video_plane_display_dmabuf_source_end_of_stream(m_wpeVideoPlaneDisplayDmaBuf.get());
2872 #endif
2873     }
2874     timeChanged();
2875 }
2876
2877 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2878 {
2879     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2880     types = gstRegistryScanner.mimeTypeSet();
2881 }
2882
2883 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2884 {
2885     MediaPlayer::SupportsType result = MediaPlayer::SupportsType::IsNotSupported;
2886 #if ENABLE(MEDIA_SOURCE)
2887     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2888     if (parameters.isMediaSource)
2889         return result;
2890 #endif
2891
2892 #if !ENABLE(MEDIA_STREAM)
2893     if (parameters.isMediaStream)
2894         return result;
2895 #endif
2896
2897     if (parameters.type.isEmpty())
2898         return result;
2899
2900     GST_DEBUG("Checking mime-type \"%s\"", parameters.type.raw().utf8().data());
2901     auto containerType = parameters.type.containerType();
2902     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2903     if (gstRegistryScanner.isContainerTypeSupported(containerType)) {
2904         // Spec says we should not return "probably" if the codecs string is empty.
2905         Vector<String> codecs = parameters.type.codecs();
2906         result = codecs.isEmpty() ? MediaPlayer::SupportsType::MayBeSupported : (gstRegistryScanner.areAllCodecsSupported(codecs) ? MediaPlayer::SupportsType::IsSupported : MediaPlayer::SupportsType::IsNotSupported);
2907     }
2908
2909     auto finalResult = extendedSupportsType(parameters, result);
2910     GST_DEBUG("Supported: %s", convertEnumerationToString(finalResult).utf8().data());
2911     return finalResult;
2912 }
2913
2914 void MediaPlayerPrivateGStreamer::updateDownloadBufferingFlag()
2915 {
2916     if (!m_pipeline)
2917         return;
2918
2919     unsigned flags;
2920     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2921
2922     unsigned flagDownload = getGstPlayFlag("download");
2923
2924     // We don't want to stop downloading if we already started it.
2925     if (flags & flagDownload && m_readyState > MediaPlayer::ReadyState::HaveNothing && !m_shouldResetPipeline) {
2926         GST_DEBUG_OBJECT(pipeline(), "Download already started, not starting again");
2927         return;
2928     }
2929
2930     bool shouldDownload = !m_isLiveStream && m_preload == MediaPlayer::Preload::Auto;
2931     if (shouldDownload) {
2932         GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering");
2933         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2934         m_fillTimer.startRepeating(200_ms);
2935     } else {
2936         GST_INFO_OBJECT(pipeline(), "Disabling on-disk buffering");
2937         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2938         m_fillTimer.stop();
2939     }
2940 }
2941
2942 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const URL& url, const String& pipelineName)
2943 {
2944     const char* playbinName = "playbin";
2945
2946     // MSE doesn't support playbin3. Mediastream requires playbin3. Regular
2947     // playback can use playbin3 on-demand with the WEBKIT_GST_USE_PLAYBIN3
2948     // environment variable.
2949     if ((!isMediaSource() && g_getenv("WEBKIT_GST_USE_PLAYBIN3")) || url.protocolIs("mediastream"))
2950         playbinName = "playbin3";
2951
2952     if (m_pipeline) {
2953         if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2954             GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2955             return;
2956         }
2957
2958         GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.", playbinName);
2959         changePipelineState(GST_STATE_NULL);
2960         m_pipeline = nullptr;
2961     }
2962
2963     ASSERT(!m_pipeline);
2964
2965     m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
2966
2967     static Atomic<uint32_t> pipelineId;
2968     setPipeline(gst_element_factory_make(playbinName,
2969         (pipelineName.isEmpty() ? makeString("media-player-", pipelineId.exchangeAdd(1)) : pipelineName).utf8().data()));
2970     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2971
2972     GST_INFO_OBJECT(pipeline(), "Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2973
2974     // Let also other listeners subscribe to (application) messages in this bus.
2975     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2976     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2977     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2978
2979     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2980
2981     g_signal_connect(GST_BIN_CAST(m_pipeline.get()), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin* subBin, GstElement* element, MediaPlayerPrivateGStreamer* player) {
2982         GUniquePtr<char> binName(gst_element_get_name(GST_ELEMENT_CAST(subBin)));
2983         if (!g_str_has_prefix(binName.get(), "decodebin"))
2984             return;
2985
2986         GUniquePtr<char> elementName(gst_element_get_name(element));
2987         if (g_str_has_prefix(elementName.get(), "v4l2"))
2988             player->m_videoDecoderPlatform = GstVideoDecoderPlatform::Video4Linux;
2989         else if (g_str_has_prefix(elementName.get(), "imxvpudec"))
2990             player->m_videoDecoderPlatform = GstVideoDecoderPlatform::ImxVPU;
2991
2992 #if USE(TEXTURE_MAPPER_GL)
2993         player->updateTextureMapperFlags();
2994 #endif
2995     }), this);
2996
2997     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2998     if (m_isLegacyPlaybin) {
2999         g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
3000         g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
3001     }
3002
3003 #if ENABLE(VIDEO_TRACK)
3004     if (m_isLegacyPlaybin)
3005         g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
3006
3007     GstElement* textCombiner = webkitTextCombinerNew();
3008     ASSERT(textCombiner);
3009     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
3010
3011     m_textAppSink = webkitTextSinkNew();
3012     ASSERT(m_textAppSink);
3013
3014     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
3015     ASSERT(m_textAppSinkPad);
3016
3017     GRefPtr<GstCaps> textCaps;
3018     if (webkitGstCheckVersion(1, 14, 0))
3019         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
3020     else
3021         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
3022     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
3023     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
3024
3025     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
3026 #endif
3027
3028     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
3029
3030     configurePlaySink();
3031
3032     if (m_shouldPreservePitch) {
3033         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
3034
3035         if (!scale)
3036             GST_WARNING("Failed to create scaletempo");
3037         else
3038             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
3039     }
3040
3041     if (!m_canRenderingBeAccelerated) {
3042         // If not using accelerated compositing, let GStreamer handle
3043         // the image-orientation tag.
3044         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
3045         if (videoFlip) {
3046             g_object_set(videoFlip, "method", 8, nullptr);
3047             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
3048         } else
3049             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
3050     }
3051
3052     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
3053     if (videoSinkPad)
3054         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
3055 }
3056
3057 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
3058 {
3059     if (WEBKIT_IS_WEB_SRC(m_source.get()))
3060         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC_CAST(m_source.get()));
3061     return false;
3062 }
3063
3064 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
3065 {
3066     if (m_isLiveStream)
3067         return false;
3068
3069     if (m_url.isLocalFile())
3070         return true;
3071
3072     if (m_url.protocolIsInHTTPFamily())
3073         return true;
3074
3075     return false;
3076 }
3077
3078 void MediaPlayerPrivateGStreamer::readyTimerFired()
3079 {
3080     GST_DEBUG_OBJECT(pipeline(), "In READY for too long. Releasing pipeline resources.");
3081     changePipelineState(GST_STATE_NULL);
3082 }
3083
3084 void MediaPlayerPrivateGStreamer::acceleratedRenderingStateChanged()
3085 {
3086     m_canRenderingBeAccelerated = m_player && m_player->acceleratedCompositingEnabled();
3087 }
3088
3089 #if USE(TEXTURE_MAPPER_GL)
3090 PlatformLayer* MediaPlayerPrivateGStreamer::platformLayer() const
3091 {
3092 #if USE(NICOSIA)
3093     return m_nicosiaLayer.ptr();
3094 #else
3095     return const_cast<MediaPlayerPrivateGStreamer*>(this);
3096 #endif
3097 }
3098
3099 #if USE(NICOSIA)
3100 void MediaPlayerPrivateGStreamer::swapBuffersIfNeeded()
3101 {
3102 #if USE(GSTREAMER_HOLEPUNCH)
3103     pushNextHolePunchBuffer();
3104 #endif
3105 }
3106 #else
3107 RefPtr<TextureMapperPlatformLayerProxy> MediaPlayerPrivateGStreamer::proxy() const
3108 {
3109     return m_platformLayerProxy.copyRef();
3110 }
3111
3112 void MediaPlayerPrivateGStreamer::swapBuffersIfNeeded()
3113 {
3114 #if USE(GSTREAMER_HOLEPUNCH)
3115     pushNextHolePunchBuffer();
3116 #endif
3117 }
3118 #endif
3119
3120 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
3121 class GStreamerDMABufHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient {
3122 public:
3123     GStreamerDMABufHolePunchClient(std::unique_ptr<GstVideoFrameHolder>&& frameHolder, struct wpe_video_plane_display_dmabuf_source* videoPlaneDisplayDmaBufSource)
3124         : m_frameHolder(WTFMove(frameHolder))
3125         , m_wpeVideoPlaneDisplayDmaBuf(videoPlaneDisplayDmaBufSource) { };
3126     void setVideoRectangle(const IntRect& rect) final
3127     {
3128         if (m_wpeVideoPlaneDisplayDmaBuf)
3129             m_frameHolder->handoffVideoDmaBuf(m_wpeVideoPlaneDisplayDmaBuf, rect);
3130     }
3131 private:
3132     std::unique_ptr<GstVideoFrameHolder> m_frameHolder;
3133     struct wpe_video_plane_display_dmabuf_source* m_wpeVideoPlaneDisplayDmaBuf;
3134 };
3135 #endif // USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
3136
3137 void MediaPlayerPrivateGStreamer::pushTextureToCompositor()
3138 {
3139     auto sampleLocker = holdLock(m_sampleMutex);
3140     if (!GST_IS_SAMPLE(m_sample.get()))
3141         return;
3142
3143     auto internalCompositingOperation = [this](TextureMapperPlatformLayerProxy& proxy, std::unique_ptr<GstVideoFrameHolder>&& frameHolder) {
3144         std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer;
3145         if (frameHolder->hasMappedTextures()) {
3146             layerBuffer = frameHolder->platformLayerBuffer();
3147             if (!layerBuffer)
3148                 return;
3149             layerBuffer->setUnmanagedBufferDataHolder(WTFMove(frameHolder));
3150         } else {
3151             layerBuffer = proxy.getAvailableBuffer(frameHolder->size(), GL_DONT_CARE);
3152             if (UNLIKELY(!layerBuffer)) {
3153                 auto texture = BitmapTextureGL::create(TextureMapperContextAttributes::get());
3154                 texture->reset(frameHolder->size(), frameHolder->hasAlphaChannel() ? BitmapTexture::SupportsAlpha : BitmapTexture::NoFlag);
3155                 layerBuffer = makeUnique<TextureMapperPlatformLayerBuffer>(WTFMove(texture));
3156             }
3157             frameHolder->updateTexture(layerBuffer->textureGL());
3158             layerBuffer->setExtraFlags(m_textureMapperFlags | (frameHolder->hasAlphaChannel() ? TextureMapperGL::ShouldBlend : 0));
3159         }
3160         proxy.pushNextBuffer(WTFMove(layerBuffer));
3161     };
3162
3163 #if USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
3164     auto proxyOperation =
3165         [this, internalCompositingOperation](TextureMapperPlatformLayerProxy& proxy)
3166         {
3167             LockHolder holder(proxy.lock());
3168
3169             if (!proxy.isActive())
3170                 return;
3171
3172             auto frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, !m_isUsingFallbackVideoSink);
3173             if (frameHolder->hasDMABuf()) {
3174                 auto layerBuffer = makeUnique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE);
3175                 auto holePunchClient = makeUnique<GStreamerDMABufHolePunchClient>(WTFMove(frameHolder), m_wpeVideoPlaneDisplayDmaBuf.get());
3176                 layerBuffer->setHolePunchClient(WTFMove(holePunchClient));
3177                 proxy.pushNextBuffer(WTFMove(layerBuffer));
3178             } else
3179                 internalCompositingOperation(proxy, WTFMove(frameHolder));
3180         };
3181 #else
3182     auto proxyOperation =
3183         [this, internalCompositingOperation](TextureMapperPlatformLayerProxy& proxy)
3184         {
3185             LockHolder holder(proxy.lock());
3186
3187             if (!proxy.isActive())
3188                 return;
3189
3190             auto frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, !m_isUsingFallbackVideoSink);
3191             internalCompositingOperation(proxy, WTFMove(frameHolder));
3192         };
3193 #endif // USE(WPE_VIDEO_PLANE_DISPLAY_DMABUF)
3194
3195 #if USE(NICOSIA)
3196     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
3197 #else
3198     proxyOperation(*m_platformLayerProxy);
3199 #endif
3200 }
3201 #endif // USE(TEXTURE_MAPPER_GL)
3202
3203 void MediaPlayerPrivateGStreamer::repaint()
3204 {
3205     ASSERT(m_sample);
3206     ASSERT(isMainThread());
3207
3208     m_player->repaint();
3209
3210     LockHolder lock(m_drawMutex);
3211     m_drawCondition.notifyOne();
3212 }
3213
3214 void MediaPlayerPrivateGStreamer::triggerRepaint(GstSample* sample)
3215 {
3216     bool shouldTriggerResize;
3217     {
3218         auto sampleLocker = holdLock(m_sampleMutex);
3219         shouldTriggerResize = !m_sample;
3220         m_sample = sample;
3221     }
3222
3223     if (shouldTriggerResize) {
3224         GST_DEBUG_OBJECT(pipeline(), "First sample reached the sink, triggering video dimensions update");
3225         m_notifier->notify(MainThreadNotification::SizeChanged, [this] {
3226             m_player->sizeChanged();
3227         });
3228     }
3229
3230     if (!m_canRenderingBeAccelerated) {
3231         LockHolder locker(m_drawMutex);
3232         if (m_isBeingDestroyed)
3233             return;
3234         m_drawTimer.startOneShot(0_s);
3235         m_drawCondition.wait(m_drawMutex);
3236         return;
3237     }
3238
3239 #if USE(TEXTURE_MAPPER_GL)
3240     if (m_isUsingFallbackVideoSink) {
3241         LockHolder lock(m_drawMutex);
3242         auto proxyOperation =
3243             [this](TextureMapperPlatformLayerProxy& proxy)
3244             {
3245                 return proxy.scheduleUpdateOnCompositorThread([this] { this->pushTextureToCompositor(); });
3246             };
3247 #if USE(NICOSIA)
3248         if (!proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy()))
3249             return;
3250 #else
3251         if (!proxyOperation(*m_platformLayerProxy))
3252             return;
3253 #endif
3254         m_drawTimer.startOneShot(0_s);
3255         m_drawCondition.wait(m_drawMutex);
3256     } else
3257         pushTextureToCompositor();
3258 #endif // USE(TEXTURE_MAPPER_GL)
3259 }
3260
3261 void MediaPlayerPrivateGStreamer::repaintCallback(MediaPlayerPrivateGStreamer* player, GstSample* sample)
3262 {
3263     player->triggerRepaint(sample);
3264 }
3265
3266 void MediaPlayerPrivateGStreamer::cancelRepaint(bool destroying)
3267 {
3268     // The goal of this function is to release the GStreamer thread from m_drawCondition in triggerRepaint() in non-AC case,
3269     // to avoid a deadlock if the player gets paused while waiting for drawing (see https://bugs.webkit.org/show_bug.cgi?id=170003):
3270     // the main thread is waiting for the GStreamer thread to pause, but the GStreamer thread is locked waiting for the
3271     // main thread to draw. This deadlock doesn't happen when using AC because the sample is processed (not painted) in the compositor
3272     // thread, so the main thread can request the pause and wait if the GStreamer thread is waiting for the compositor thread.
3273     //
3274     // This function is also used when destroying the player (destroying parameter is true), to release the gstreamer thread from
3275     // m_drawCondition and to ensure that new triggerRepaint calls won't wait on m_drawCondition.
3276     if (!m_canRenderingBeAccelerated) {
3277         LockHolder locker(m_drawMutex);
3278         m_drawTimer.stop();
3279         m_isBeingDestroyed = destroying;
3280         m_drawCondition.notifyOne();
3281     }
3282 }
3283
3284 void MediaPlayerPrivateGStreamer::repaintCancelledCallback(MediaPlayerPrivateGStreamer* player)
3285 {
3286     player->cancelRepaint();
3287 }
3288
3289 #if USE(GSTREAMER_GL)
3290 void MediaPlayerPrivateGStreamer::flushCurrentBuffer()
3291 {
3292     auto sampleLocker = holdLock(m_sampleMutex);
3293
3294     if (m_sample) {
3295         // Replace by a new sample having only the caps, so this dummy sample is still useful to get the dimensions.
3296         // This prevents resizing problems when the video changes its quality and a DRAIN is performed.
3297         const GstStructure* info = gst_sample_get_info(m_sample.get());
3298         m_sample = adoptGRef(gst_sample_new(nullptr, gst_sample_get_caps(m_sample.get()),
3299             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
3300     }
3301
3302     bool shouldWait = m_videoDecoderPlatform == GstVideoDecoderPlatform::Video4Linux;
3303     auto proxyOperation = [shouldWait, pipeline = pipeline()](TextureMapperPlatformLayerProxy& proxy) {
3304         GST_DEBUG_OBJECT(pipeline, "Flushing video sample %s", shouldWait ? "synchronously" : "");
3305         LockHolder locker(!shouldWait ? &proxy.lock() : nullptr);
3306
3307         if (proxy.isActive())
3308             proxy.dropCurrentBufferWhilePreservingTexture(shouldWait);
3309     };
3310
3311 #if USE(NICOSIA)
3312     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
3313 #else
3314     proxyOperation(*m_platformLayerProxy);
3315 #endif
3316 }
3317 #endif
3318
3319 void MediaPlayerPrivateGStreamer::setSize(const IntSize& size)
3320 {
3321     m_size = size;
3322 }
3323
3324 void MediaPlayerPrivateGStreamer::paint(GraphicsContext& context, const FloatRect& rect)
3325 {
3326     if (context.paintingDisabled())
3327         return;
3328
3329     if (!m_player->visible())
3330         return;
3331
3332     auto sampleLocker = holdLock(m_sampleMutex);
3333     if (!GST_IS_SAMPLE(m_sample.get()))
3334         return;
3335
3336 #if USE(GSTREAMER_GL)
3337     // Ensure the input is RGBA. We handle YUV video natively, so we need to do
3338     // this conversion on-demand here.
3339     GstBuffer* buffer = gst_sample_get_buffer(m_sample.get());
3340     if (UNLIKELY(!GST_IS_BUFFER(buffer)))
3341         return;
3342
3343     GstCaps* caps = gst_sample_get_caps(m_sample.get());
3344
3345     GstVideoInfo videoInfo;
3346     gst_video_info_init(&videoInfo);
3347     if (!gst_video_info_from_caps(&videoInfo, caps))
3348         return;
3349
3350     GstMemory* memory = gst_buffer_peek_memory(buffer, 0);
3351     bool hasExternalOESTexture = false;
3352     if (gst_is_gl_memory(memory))
3353         hasExternalOESTexture = gst_gl_memory_get_texture_target(GST_GL_MEMORY_CAST(memory)) == GST_GL_TEXTURE_TARGET_EXTERNAL_OES;
3354
3355     if (!GST_VIDEO_INFO_IS_RGB(&videoInfo) || hasExternalOESTexture) {
3356         if (!m_colorConvert)
3357             m_colorConvert = adoptGRef(gst_gl_color_convert_new(GST_GL_BASE_MEMORY_CAST(memory)->context));
3358
3359         if (!m_colorConvertInputCaps || !gst_caps_is_equal(m_colorConvertInputCaps.get(), caps)) {
3360             m_colorConvertInputCaps = caps;
3361             m_colorConvertOutputCaps = adoptGRef(gst_caps_copy(caps));
3362 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
3363             const char* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "BGRx";
3364 #else
3365             const char* formatString = GST_VIDEO_INFO_HAS_ALPHA(&videoInfo) ? "RGBA" : "RGBx";
3366 #endif
3367             gst_caps_set_simple(m_colorConvertOutputCaps.get(), "format", G_TYPE_STRING, formatString,
3368                 "texture-target", G_TYPE_STRING, GST_GL_TEXTURE_TARGET_2D_STR, nullptr);
3369             if (!gst_gl_color_convert_set_caps(m_colorConvert.get(), caps, m_colorConvertOutputCaps.get()))
3370                 return;
3371         }
3372
3373         GRefPtr<GstBuffer> rgbBuffer = adoptGRef(gst_gl_color_convert_perform(m_colorConvert.get(), buffer));
3374         if (UNLIKELY(!GST_IS_BUFFER(rgbBuffer.get())))
3375             return;
3376
3377         const GstStructure* info = gst_sample_get_info(m_sample.get());
3378         m_sample = adoptGRef(gst_sample_new(rgbBuffer.get(), m_colorConvertOutputCaps.get(),
3379             gst_sample_get_segment(m_sample.get()), info ? gst_structure_copy(info) : nullptr));
3380     }
3381 #endif
3382
3383     auto gstImage = ImageGStreamer::createImage(m_sample.get());
3384     if (!gstImage)
3385         return;
3386
3387     context.drawImage(gstImage->image(), rect, gstImage->rect(), { CompositeOperator::Copy, m_canRenderingBeAccelerated ? m_videoSourceOrientation : ImageOrientation() });
3388 }
3389
3390 #if USE(GSTREAMER_GL)
3391 bool MediaPlayerPrivateGStreamer::copyVideoTextureToPlatformTexture(GraphicsContextGLOpenGL* context, PlatformGLObject outputTexture, GCGLenum outputTarget, GCGLint level, GCGLenum internalFormat, GCGLenum format, GCGLenum type, bool premultiplyAlpha, bool flipY)
3392 {
3393     UNUSED_PARAM(context);
3394
3395     if (m_isUsingFallbackVideoSink)
3396         return false;
3397
3398     if (premultiplyAlpha)
3399         return false;
3400
3401     auto sampleLocker = holdLock(m_sampleMutex);
3402
3403     if (!GST_IS_SAMPLE(m_sample.get()))
3404         return false;
3405
3406     std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, true);
3407
3408     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = frameHolder->platformLayerBuffer();
3409     if (!layerBuffer)
3410         return false;
3411
3412     auto size = frameHolder->size();
3413     if (m_videoSourceOrientation.usesWidthAsHeight())
3414         size = size.transposedSize();
3415
3416     if (!m_videoTextureCopier)
3417         m_videoTextureCopier = makeUnique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
3418
3419     frameHolder->waitForCPUSync();
3420
3421     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(*layerBuffer.get(), size, outputTexture, outputTarget, level, internalFormat, format, type, flipY, m_videoSourceOrientation);
3422 }
3423
3424 NativeImagePtr MediaPlayerPrivateGStreamer::nativeImageForCurrentTime()
3425 {
3426 #if USE(CAIRO) && ENABLE(ACCELERATED_2D_CANVAS)
3427     if (m_isUsingFallbackVideoSink)
3428         return nullptr;
3429
3430     auto sampleLocker = holdLock(m_sampleMutex);
3431
3432     if (!GST_IS_SAMPLE(m_sample.get()))
3433         return nullptr;
3434
3435     std::unique_ptr<GstVideoFrameHolder> frameHolder = makeUnique<GstVideoFrameHolder>(m_sample.get(), m_videoDecoderPlatform, m_textureMapperFlags, true);
3436
3437     std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = frameHolder->platformLayerBuffer();
3438     if (!layerBuffer)
3439         return nullptr;
3440
3441     auto size = frameHolder->size();
3442     if (m_videoSourceOrientation.usesWidthAsHeight())
3443         size = size.transposedSize();
3444
3445     GLContext* context = PlatformDisplay::sharedDisplayForCompositing().sharingGLContext();
3446     context->makeContextCurrent();
3447
3448     if (!m_videoTextureCopier)
3449         m_videoTextureCopier = makeUnique<VideoTextureCopierGStreamer>(TEXTURE_COPIER_COLOR_CONVERT_FLAG);
3450
3451     frameHolder->waitForCPUSync();
3452
3453     if (!m_videoTextureCopier->copyVideoTextureToPlatformTexture(*layerBuffer.get(), size, 0, GL_TEXTURE_2D, 0, GL_RGBA, GL_RGBA, GL_UNSIGNED_BYTE, false, m_videoSourceOrientation))
3454         return nullptr;
3455
3456     return adoptRef(cairo_gl_surface_create_for_texture(context->cairoDevice(), CAIRO_CONTENT_COLOR_ALPHA, m_videoTextureCopier->resultTexture(), size.width(), size.height()));
3457 #else
3458     return nullptr;
3459 #endif
3460 }
3461 #endif // USE(GSTREAMER_GL)
3462
3463 void MediaPlayerPrivateGStreamer::setVideoSourceOrientation(ImageOrientation orientation)
3464 {
3465     if (m_videoSourceOrientation == orientation)
3466         return;
3467
3468     m_videoSourceOrientation = orientation;
3469 #if USE(TEXTURE_MAPPER_GL)
3470     updateTextureMapperFlags();
3471 #endif
3472 }
3473
3474 #if USE(TEXTURE_MAPPER_GL)
3475 void MediaPlayerPrivateGStreamer::updateTextureMapperFlags()
3476 {
3477     switch (m_videoSourceOrientation) {
3478     case ImageOrientation::OriginTopLeft:
3479         m_textureMapperFlags = 0;
3480         break;
3481     case ImageOrientation::OriginRightTop:
3482         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture90;
3483         break;
3484     case ImageOrientation::OriginBottomRight:
3485         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture180;
3486         break;
3487     case ImageOrientation::OriginLeftBottom:
3488         m_textureMapperFlags = TextureMapperGL::ShouldRotateTexture270;
3489         break;
3490     default:
3491         // FIXME: Handle OriginTopRight, OriginBottomLeft, OriginLeftTop and OriginRightBottom?
3492         m_textureMapperFlags = 0;
3493         break;
3494     }
3495 }
3496 #endif
3497
3498 bool MediaPlayerPrivateGStreamer::supportsFullscreen() const
3499 {
3500     return true;
3501 }
3502
3503 MediaPlayer::MovieLoadType MediaPlayerPrivateGStreamer::movieLoadType() const
3504 {
3505     if (m_readyState == MediaPlayer::ReadyState::HaveNothing)
3506         return MediaPlayer::MovieLoadType::Unknown;
3507
3508     if (m_isLiveStream)
3509         return MediaPlayer::MovieLoadType::LiveStream;
3510
3511     return MediaPlayer::MovieLoadType::Download;
3512 }
3513
3514 #if USE(GSTREAMER_GL)
3515 GstElement* MediaPlayerPrivateGStreamer::createVideoSinkGL()
3516 {
3517     if (!webKitGLVideoSinkProbePlatform()) {
3518         g_warning("WebKit wasn't able to find the GL video sink dependencies. Hardware-accelerated zero-copy video rendering can't be enabled without this plugin.");
3519         return nullptr;
3520     }
3521
3522     GstElement* sink = gst_element_factory_make("webkitglvideosink", nullptr);
3523     ASSERT(sink);
3524     webKitGLVideoSinkSetMediaPlayerPrivate(WEBKIT_GL_VIDEO_SINK(sink), this);
3525     return sink;
3526 }
3527 #endif // USE(GSTREAMER_GL)
3528
3529 #if USE(GSTREAMER_HOLEPUNCH)
3530 static void setRectangleToVideoSink(GstElement* videoSink, const IntRect& rect)
3531 {
3532     // Here goes the platform-dependant code to set to the videoSink the size
3533     // and position of the video rendering window. Mark them unused as default.
3534     UNUSED_PARAM(videoSink);
3535     UNUSED_PARAM(rect);
3536 }
3537
3538 class GStreamerHolePunchClient : public TextureMapperPlatformLayerBuffer::HolePunchClient {
3539 public:
3540     GStreamerHolePunchClient(GRefPtr<GstElement>&& videoSink) : m_videoSink(WTFMove(videoSink)) { };
3541     void setVideoRectangle(const IntRect& rect) final { setRectangleToVideoSink(m_videoSink.get(), rect); }
3542 private:
3543     GRefPtr<GstElement> m_videoSink;
3544 };
3545
3546 GstElement* MediaPlayerPrivateGStreamer::createHolePunchVideoSink()
3547 {
3548     // Here goes the platform-dependant code to create the videoSink. As a default
3549     // we use a fakeVideoSink so nothing is drawn to the page.
3550     GstElement* videoSink =  gst_element_factory_make("fakevideosink", nullptr);
3551
3552     return videoSink;
3553 }
3554
3555 void MediaPlayerPrivateGStreamer::pushNextHolePunchBuffer()
3556 {
3557     auto proxyOperation =
3558         [this](TextureMapperPlatformLayerProxy& proxy)
3559         {
3560             LockHolder holder(proxy.lock());
3561             std::unique_ptr<TextureMapperPlatformLayerBuffer> layerBuffer = makeUnique<TextureMapperPlatformLayerBuffer>(0, m_size, TextureMapperGL::ShouldNotBlend, GL_DONT_CARE);
3562             std::unique_ptr<GStreamerHolePunchClient> holePunchClient = makeUnique<GStreamerHolePunchClient>(m_videoSink.get());
3563             layerBuffer->setHolePunchClient(WTFMove(holePunchClient));
3564             proxy.pushNextBuffer(WTFMove(layerBuffer));
3565         };
3566
3567 #if USE(NICOSIA)
3568     proxyOperation(downcast<Nicosia::ContentLayerTextureMapperImpl>(m_nicosiaLayer->impl()).proxy());
3569 #else
3570     proxyOperation(*m_platformLayerProxy);
3571 #endif
3572 }
3573 #endif
3574
3575 GstElement* MediaPlayerPrivateGStreamer::createVideoSink()
3576 {
3577     acceleratedRenderingStateChanged();
3578
3579 #if USE(GSTREAMER_HOLEPUNCH)
3580     m_videoSink = createHolePunchVideoSink();
3581     pushNextHolePunchBuffer();
3582     return m_videoSink.get();
3583 #endif
3584
3585 #if USE(GSTREAMER_GL)
3586     if (m_canRenderingBeAccelerated)
3587         m_videoSink = createVideoSinkGL();
3588 #endif
3589
3590     if (!m_videoSink) {
3591         m_isUsingFallbackVideoSink = true;
3592         m_videoSink = webkitVideoSinkNew();
3593         g_signal_connect_swapped(m_videoSink.get(), "repaint-requested", G_CALLBACK(repaintCallback), this);
3594         g_signal_connect_swapped(m_videoSink.get(), "repaint-cancelled", G_CALLBACK(repaintCancelledCallback), this);
3595     }
3596
3597     GstElement* videoSink = nullptr;
3598     if (!webkitGstCheckVersion(1, 17, 0)) {
3599         m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");