Unreviewed, rolling out r234489.
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "FileSystem.h"
32 #include "GStreamerCommon.h"
33 #include "HTTPHeaderNames.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "URL.h"
41 #include "WebKitWebSourceGStreamer.h"
42 #include <glib.h>
43 #include <gst/gst.h>
44 #include <gst/pbutils/missing-plugins.h>
45 #include <limits>
46 #include <wtf/HexNumber.h>
47 #include <wtf/MediaTime.h>
48 #include <wtf/NeverDestroyed.h>
49 #include <wtf/StringPrintStream.h>
50 #include <wtf/glib/GUniquePtr.h>
51 #include <wtf/glib/RunLoopSourcePriority.h>
52 #include <wtf/text/CString.h>
53
54 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
55 #include "GStreamerMediaStreamSource.h"
56 #endif
57
58 #if ENABLE(VIDEO_TRACK)
59 #include "AudioTrackPrivateGStreamer.h"
60 #include "InbandMetadataTextTrackPrivateGStreamer.h"
61 #include "InbandTextTrackPrivateGStreamer.h"
62 #include "TextCombinerGStreamer.h"
63 #include "TextSinkGStreamer.h"
64 #include "VideoTrackPrivateGStreamer.h"
65 #endif
66
67 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
68 #define GST_USE_UNSTABLE_API
69 #include <gst/mpegts/mpegts.h>
70 #undef GST_USE_UNSTABLE_API
71 #endif
72 #include <gst/audio/streamvolume.h>
73
74 #if ENABLE(MEDIA_SOURCE)
75 #include "MediaSource.h"
76 #include "WebKitMediaSourceGStreamer.h"
77 #endif
78
79 #if ENABLE(WEB_AUDIO)
80 #include "AudioSourceProviderGStreamer.h"
81 #endif
82
83 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
84 #define GST_CAT_DEFAULT webkit_media_player_debug
85
86
87 namespace WebCore {
88 using namespace std;
89
90 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
91 {
92     player->handleMessage(message);
93 }
94
95 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
96 {
97     player->setAudioStreamProperties(object);
98 }
99
100 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
101 {
102     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
103         return;
104
105     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
106     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
107     g_object_set(object, "stream-properties", structure, nullptr);
108     gst_structure_free(structure);
109     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
110     GST_DEBUG("Set media.role as %s at %s", role, elementName.get());
111 }
112
113 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
114 {
115     if (isAvailable()) {
116         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
117             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
118     }
119 }
120
121 bool MediaPlayerPrivateGStreamer::isAvailable()
122 {
123     if (!MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements())
124         return false;
125
126     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
127     return factory;
128 }
129
130 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
131     : MediaPlayerPrivateGStreamerBase(player)
132     , m_buffering(false)
133     , m_bufferingPercentage(0)
134     , m_canFallBackToLastFinishedSeekPosition(false)
135     , m_changingRate(false)
136     , m_downloadFinished(false)
137     , m_errorOccured(false)
138     , m_isEndReached(false)
139     , m_isStreaming(false)
140     , m_durationAtEOS(MediaTime::invalidTime())
141     , m_paused(true)
142     , m_playbackRate(1)
143     , m_requestedState(GST_STATE_VOID_PENDING)
144     , m_resetPipeline(false)
145     , m_seeking(false)
146     , m_seekIsPending(false)
147     , m_seekTime(MediaTime::invalidTime())
148     , m_source(nullptr)
149     , m_volumeAndMuteInitialized(false)
150     , m_mediaLocations(nullptr)
151     , m_mediaLocationCurrentIndex(0)
152     , m_playbackRatePause(false)
153     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
154     , m_lastPlaybackRate(1)
155     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
156     , m_maxTimeLoaded(MediaTime::zeroTime())
157     , m_preload(player->preload())
158     , m_delayingLoad(false)
159     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
160     , m_hasVideo(false)
161     , m_hasAudio(false)
162     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
163     , m_totalBytes(0)
164     , m_preservesPitch(false)
165 {
166 #if USE(GLIB)
167     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
168 #endif
169 }
170
171 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
172 {
173     GST_DEBUG("Disposing player");
174
175 #if ENABLE(VIDEO_TRACK)
176     for (auto& track : m_audioTracks.values())
177         track->disconnect();
178
179     for (auto& track : m_textTracks.values())
180         track->disconnect();
181
182     for (auto& track : m_videoTracks.values())
183         track->disconnect();
184 #endif
185     if (m_fillTimer.isActive())
186         m_fillTimer.stop();
187
188     if (m_mediaLocations) {
189         gst_structure_free(m_mediaLocations);
190         m_mediaLocations = nullptr;
191     }
192
193     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
194         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
195
196     if (m_autoAudioSink) {
197         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
198             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
199     }
200
201     m_readyTimerHandler.stop();
202     for (auto& missingPluginCallback : m_missingPluginCallbacks) {
203         if (missingPluginCallback)
204             missingPluginCallback->invalidate();
205     }
206     m_missingPluginCallbacks.clear();
207
208     if (m_videoSink) {
209         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
210         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
211     }
212
213     if (m_pipeline) {
214         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
215         ASSERT(bus);
216         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
217         gst_bus_remove_signal_watch(bus.get());
218         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
219         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
220     }
221 }
222
223 static void convertToInternalProtocol(URL& url)
224 {
225     if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
226         url.setProtocol("webkit+" + url.protocol());
227 }
228
229 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
230 {
231     // Clean out everything after file:// url path.
232     String cleanURLString(url.string());
233     if (url.isLocalFile())
234         cleanURLString = cleanURLString.substring(0, url.pathEnd());
235
236     m_url = URL(URL(), cleanURLString);
237     convertToInternalProtocol(m_url);
238
239     GST_INFO("Load %s", m_url.string().utf8().data());
240     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
241 }
242
243 void MediaPlayerPrivateGStreamer::load(const String& urlString)
244 {
245     loadFull(urlString, nullptr, String());
246 }
247
248 void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const gchar* playbinName,
249     const String& pipelineName)
250 {
251     // FIXME: This method is still called even if supportsType() returned
252     // IsNotSupported. This would deserve more investigation but meanwhile make
253     // sure we don't ever try to play animated gif assets.
254     if (m_player->contentMIMEType() == "image/gif") {
255         loadingFailed(MediaPlayer::FormatError);
256         return;
257     }
258
259     if (!MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements())
260         return;
261
262     URL url(URL(), urlString);
263     if (url.isBlankURL())
264         return;
265
266     if (!m_pipeline)
267         createGSTPlayBin(isMediaSource() ? "playbin" : playbinName, pipelineName);
268
269     if (m_fillTimer.isActive())
270         m_fillTimer.stop();
271
272     ASSERT(m_pipeline);
273
274     setPlaybinURL(url);
275
276     GST_DEBUG("preload: %s", convertEnumerationToString(m_preload).utf8().data());
277     if (m_preload == MediaPlayer::None) {
278         GST_INFO("Delaying load.");
279         m_delayingLoad = true;
280     }
281
282     // Reset network and ready states. Those will be set properly once
283     // the pipeline pre-rolled.
284     m_networkState = MediaPlayer::Loading;
285     m_player->networkStateChanged();
286     m_readyState = MediaPlayer::HaveNothing;
287     m_player->readyStateChanged();
288     m_volumeAndMuteInitialized = false;
289     m_durationAtEOS = MediaTime::invalidTime();
290
291     if (!m_delayingLoad)
292         commitLoad();
293 }
294
295 #if ENABLE(MEDIA_SOURCE)
296 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
297 {
298     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
299     m_networkState = MediaPlayer::FormatError;
300     m_player->networkStateChanged();
301 }
302 #endif
303
304 #if ENABLE(MEDIA_STREAM)
305 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream)
306 {
307 #if GST_CHECK_VERSION(1, 10, 0)
308     m_streamPrivate = &stream;
309     auto pipelineName = String::format("mediastream_%s_%p",
310         (stream.hasCaptureVideoSource() || stream.hasCaptureAudioSource()) ? "Local" : "Remote", this);
311
312     loadFull(String("mediastream://") + stream.id(), "playbin3", pipelineName);
313 #if USE(GSTREAMER_GL)
314     ensureGLVideoSinkContext();
315 #endif
316     m_player->play();
317 #else
318     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
319     m_networkState = MediaPlayer::FormatError;
320     m_player->networkStateChanged();
321     notImplemented();
322 #endif
323 }
324 #endif
325
326 void MediaPlayerPrivateGStreamer::commitLoad()
327 {
328     ASSERT(!m_delayingLoad);
329     GST_DEBUG("Committing load.");
330
331     // GStreamer needs to have the pipeline set to a paused state to
332     // start providing anything useful.
333     changePipelineState(GST_STATE_PAUSED);
334
335     setDownloadBuffering();
336     updateStates();
337 }
338
339 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
340 {
341     if (m_isEndReached && m_seeking)
342         return m_seekTime;
343
344     // Position is only available if no async state change is going on and the state is either paused or playing.
345     gint64 position = GST_CLOCK_TIME_NONE;
346     GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
347     if (gst_element_query(m_pipeline.get(), query))
348         gst_query_parse_position(query, 0, &position);
349     gst_query_unref(query);
350
351     GST_LOG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
352
353     MediaTime playbackPosition = MediaTime::zeroTime();
354     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
355     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
356         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
357     else if (m_canFallBackToLastFinishedSeekPosition)
358         playbackPosition = m_seekTime;
359
360     return playbackPosition;
361 }
362
363 void MediaPlayerPrivateGStreamer::readyTimerFired()
364 {
365     GST_DEBUG("In READY for too long. Releasing pipeline resources.");
366     changePipelineState(GST_STATE_NULL);
367 }
368
369 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
370 {
371     ASSERT(m_pipeline);
372
373     GstState currentState;
374     GstState pending;
375
376     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
377     if (currentState == newState || pending == newState) {
378         GST_DEBUG("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
379             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
380         return true;
381     }
382
383     GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
384         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
385
386 #if USE(GSTREAMER_GL)
387     if (currentState == GST_STATE_READY && newState == GST_STATE_PAUSED)
388         ensureGLVideoSinkContext();
389 #endif
390
391     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
392     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
393     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
394         return false;
395
396     // Create a timer when entering the READY state so that we can free resources
397     // if we stay for too long on READY.
398     // Also lets remove the timer if we request a state change for any state other than READY.
399     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
400     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
401         // Max interval in seconds to stay in the READY state on manual
402         // state change requests.
403         static const Seconds readyStateTimerDelay { 1_min };
404         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
405     } else if (newState != GST_STATE_READY)
406         m_readyTimerHandler.stop();
407
408     return true;
409 }
410
411 void MediaPlayerPrivateGStreamer::prepareToPlay()
412 {
413     GST_DEBUG("Prepare to play");
414     m_preload = MediaPlayer::Auto;
415     if (m_delayingLoad) {
416         m_delayingLoad = false;
417         commitLoad();
418     }
419 }
420
421 void MediaPlayerPrivateGStreamer::play()
422 {
423     if (!m_playbackRate) {
424         m_playbackRatePause = true;
425         return;
426     }
427
428     if (changePipelineState(GST_STATE_PLAYING)) {
429         m_isEndReached = false;
430         m_delayingLoad = false;
431         m_preload = MediaPlayer::Auto;
432         setDownloadBuffering();
433         GST_INFO("Play");
434     } else
435         loadingFailed(MediaPlayer::Empty);
436 }
437
438 void MediaPlayerPrivateGStreamer::pause()
439 {
440     m_playbackRatePause = false;
441     GstState currentState, pendingState;
442     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
443     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
444         return;
445
446     if (changePipelineState(GST_STATE_PAUSED))
447         GST_INFO("Pause");
448     else
449         loadingFailed(MediaPlayer::Empty);
450 }
451
452 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
453 {
454     if (!m_pipeline || m_errorOccured)
455         return MediaTime::invalidTime();
456
457     if (m_durationAtEOS.isValid())
458         return m_durationAtEOS;
459
460     // The duration query would fail on a not-prerolled pipeline.
461     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
462         return MediaTime::positiveInfiniteTime();
463
464     gint64 timeLength = 0;
465
466     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &timeLength) || !GST_CLOCK_TIME_IS_VALID(timeLength)) {
467         GST_DEBUG("Time duration query failed for %s", m_url.string().utf8().data());
468         return MediaTime::positiveInfiniteTime();
469     }
470
471     GST_LOG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
472
473     return MediaTime(timeLength, GST_SECOND);
474     // FIXME: handle 3.14.9.5 properly
475 }
476
477 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
478 {
479     if (!m_pipeline || m_errorOccured)
480         return MediaTime::invalidTime();
481
482     if (m_seeking)
483         return m_seekTime;
484
485     return playbackPosition();
486 }
487
488 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
489 {
490     if (!m_pipeline)
491         return;
492
493     if (m_errorOccured)
494         return;
495
496     GST_INFO("[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
497
498     // Avoid useless seeking.
499     if (mediaTime == currentMediaTime())
500         return;
501
502     MediaTime time = std::min(mediaTime, durationMediaTime());
503
504     if (isLiveStream())
505         return;
506
507     GST_INFO("[Seek] seeking to %s", toString(time).utf8().data());
508
509     if (m_seeking) {
510         m_timeOfOverlappingSeek = time;
511         if (m_seekIsPending) {
512             m_seekTime = time;
513             return;
514         }
515     }
516
517     GstState state;
518     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
519     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
520         GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
521         return;
522     }
523     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
524         m_seekIsPending = true;
525         if (m_isEndReached) {
526             GST_DEBUG("[Seek] reset pipeline");
527             m_resetPipeline = true;
528             if (!changePipelineState(GST_STATE_PAUSED))
529                 loadingFailed(MediaPlayer::Empty);
530         }
531     } else {
532         // We can seek now.
533         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
534             GST_DEBUG("[Seek] seeking to %s failed", toString(time).utf8().data());
535             return;
536         }
537     }
538
539     m_seeking = true;
540     m_seekTime = time;
541     m_isEndReached = false;
542 }
543
544 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
545 {
546     // Default values for rate >= 0.
547     MediaTime startTime = position, endTime = MediaTime::invalidTime();
548
549     // TODO: Should do more than that, need to notify the media source
550     // and probably flush the pipeline at least.
551     if (isMediaSource())
552         return true;
553
554     if (rate < 0) {
555         startTime = MediaTime::zeroTime();
556         // If we are at beginning of media, start from the end to
557         // avoid immediate EOS.
558         if (position < MediaTime::zeroTime())
559             endTime = durationMediaTime();
560         else
561             endTime = position;
562     }
563
564     if (!rate)
565         rate = 1.0;
566
567     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
568         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
569 }
570
571 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
572 {
573     if (!m_changingRate)
574         return;
575
576     GST_INFO("Set Rate to %f", m_playbackRate);
577
578     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
579     bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
580
581     GST_INFO(mute ? "Need to mute audio" : "Do not need to mute audio");
582
583     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
584         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
585         m_lastPlaybackRate = m_playbackRate;
586     } else {
587         m_playbackRate = m_lastPlaybackRate;
588         GST_ERROR("Set rate to %f failed", m_playbackRate);
589     }
590
591     if (m_playbackRatePause) {
592         GstState state;
593         GstState pending;
594
595         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
596         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
597             changePipelineState(GST_STATE_PLAYING);
598         m_playbackRatePause = false;
599     }
600
601     m_changingRate = false;
602     m_player->rateChanged();
603 }
604
605 bool MediaPlayerPrivateGStreamer::paused() const
606 {
607     if (m_isEndReached) {
608         GST_DEBUG("Ignoring pause at EOS");
609         return true;
610     }
611
612     if (m_playbackRatePause) {
613         GST_DEBUG("Playback rate is 0, simulating PAUSED state");
614         return false;
615     }
616
617     GstState state;
618     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
619     bool paused = state <= GST_STATE_PAUSED;
620     GST_DEBUG("Paused: %s", toString(paused).utf8().data());
621     return paused;
622 }
623
624 bool MediaPlayerPrivateGStreamer::seeking() const
625 {
626     return m_seeking;
627 }
628
629 #if GST_CHECK_VERSION(1, 10, 0)
630 #define CLEAR_TRACKS(tracks, method) \
631     for (auto& track : tracks.values())\
632         method(*track);\
633     tracks.clear();
634
635 void MediaPlayerPrivateGStreamer::clearTracks()
636 {
637 #if ENABLE(VIDEO_TRACK)
638     CLEAR_TRACKS(m_audioTracks, m_player->removeAudioTrack);
639     CLEAR_TRACKS(m_videoTracks, m_player->removeVideoTrack);
640     CLEAR_TRACKS(m_textTracks, m_player->removeTextTrack);
641 #endif // ENABLE(VIDEO_TRACK)
642 }
643 #undef CLEAR_TRACKS
644
645 #if ENABLE(VIDEO_TRACK)
646 #define CREATE_TRACK(type, Type) \
647     m_has##Type = true; \
648     if (!useMediaSource) {\
649         RefPtr<Type##TrackPrivateGStreamer> track = Type##TrackPrivateGStreamer::create(makeWeakPtr(*this), i, stream); \
650         m_##type##Tracks.add(track->id(), track); \
651         m_player->add##Type##Track(*track);\
652         if (gst_stream_get_stream_flags(stream.get()) & GST_STREAM_FLAG_SELECT) {                                    \
653             m_current##Type##StreamId = String(gst_stream_get_stream_id(stream.get()));                              \
654         }                                                                                                            \
655     }
656
657 FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
658 {
659 #if ENABLE(MEDIA_STREAM)
660     if (!m_isLegacyPlaybin && !m_currentVideoStreamId.isEmpty()) {
661         RefPtr<VideoTrackPrivateGStreamer> videoTrack = m_videoTracks.get(m_currentVideoStreamId);
662
663         if (videoTrack) {
664             auto tags = gst_stream_get_tags(videoTrack->stream());
665             gint width, height;
666
667             if (gst_tag_list_get_int(tags, WEBKIT_MEDIA_TRACK_TAG_WIDTH, &width) && gst_tag_list_get_int(tags, WEBKIT_MEDIA_TRACK_TAG_HEIGHT, &height))
668                 return FloatSize(width, height);
669         }
670     }
671 #endif // ENABLE(MEDIA_STREAM)
672
673     return MediaPlayerPrivateGStreamerBase::naturalSize();
674 }
675 #else
676 #define CREATE_TRACK(type, _id, tracks, method, stream) m_has##Type## = true;
677 #endif // ENABLE(VIDEO_TRACK)
678
679 void MediaPlayerPrivateGStreamer::updateTracks()
680 {
681     ASSERT(!m_isLegacyPlaybin);
682
683     bool useMediaSource = isMediaSource();
684     unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
685
686     bool oldHasAudio = m_hasAudio;
687     bool oldHasVideo = m_hasVideo;
688     // New stream collections override previous ones.
689     clearTracks();
690     unsigned textTrackIndex = 0;
691     for (unsigned i = 0; i < length; i++) {
692         GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
693         String streamId(gst_stream_get_stream_id(stream.get()));
694         GstStreamType type = gst_stream_get_stream_type(stream.get());
695
696         GST_DEBUG_OBJECT(pipeline(), "Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
697         if (type & GST_STREAM_TYPE_AUDIO) {
698             CREATE_TRACK(audio, Audio)
699         } else if (type & GST_STREAM_TYPE_VIDEO) {
700             CREATE_TRACK(video, Video)
701         } else if (type & GST_STREAM_TYPE_TEXT && !useMediaSource) {
702 #if ENABLE(VIDEO_TRACK)
703             RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(textTrackIndex++, stream);
704             m_textTracks.add(streamId, track);
705             m_player->addTextTrack(*track);
706 #endif
707         } else
708             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
709     }
710
711     if ((oldHasVideo != m_hasVideo) || (oldHasAudio != m_hasAudio))
712         m_player->characteristicChanged();
713
714     if (m_hasVideo)
715         m_player->sizeChanged();
716
717     if (useMediaSource) {
718         GST_DEBUG("Tracks managed by source element. Bailing out now.");
719         m_player->client().mediaPlayerEngineUpdated(m_player);
720         return;
721     }
722
723     m_player->client().mediaPlayerEngineUpdated(m_player);
724 }
725 #endif // GST_CHECK_VERSION(1, 10, 0)
726
727 void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
728 {
729     const char* propertyName;
730     const char* trackTypeAsString;
731     Vector<String> selectedStreams;
732     String selectedStreamId;
733
734 #if GST_CHECK_VERSION(1, 10, 0)
735     GstStream* stream = nullptr;
736
737     if (!m_isLegacyPlaybin) {
738         stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
739         if (!stream) {
740             GST_WARNING_OBJECT(pipeline(), "No stream to select at index %u", index);
741             return;
742         }
743         selectedStreamId = String::fromUTF8(gst_stream_get_stream_id(stream));
744         selectedStreams.append(selectedStreamId);
745     }
746 #endif // GST_CHECK_VERSION(1,0,0)
747
748     switch (trackType) {
749     case TrackPrivateBaseGStreamer::TrackType::Audio:
750         propertyName = "current-audio";
751         trackTypeAsString = "audio";
752         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentAudioStreamId) {
753             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
754             return;
755         }
756
757         if (!m_currentTextStreamId.isEmpty())
758             selectedStreams.append(m_currentTextStreamId);
759         if (!m_currentVideoStreamId.isEmpty())
760             selectedStreams.append(m_currentVideoStreamId);
761         break;
762     case TrackPrivateBaseGStreamer::TrackType::Video:
763         propertyName = "current-video";
764         trackTypeAsString = "video";
765         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentVideoStreamId) {
766             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
767             return;
768         }
769
770         if (!m_currentAudioStreamId.isEmpty())
771             selectedStreams.append(m_currentAudioStreamId);
772         if (!m_currentTextStreamId.isEmpty())
773             selectedStreams.append(m_currentTextStreamId);
774         break;
775     case TrackPrivateBaseGStreamer::TrackType::Text:
776         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentTextStreamId) {
777             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
778             return;
779         }
780
781         propertyName = "current-text";
782         trackTypeAsString = "text";
783         if (!m_currentAudioStreamId.isEmpty())
784             selectedStreams.append(m_currentAudioStreamId);
785         if (!m_currentVideoStreamId.isEmpty())
786             selectedStreams.append(m_currentVideoStreamId);
787         break;
788     case TrackPrivateBaseGStreamer::TrackType::Unknown:
789     default:
790         ASSERT_NOT_REACHED();
791     }
792
793     GST_INFO("Enabling %s track with index: %u", trackTypeAsString, index);
794     // FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531
795     if (m_isLegacyPlaybin || isMediaSource()) {
796         GstElement* element = isMediaSource() ? m_source.get() : m_pipeline.get();
797         g_object_set(element, propertyName, index, nullptr);
798     }
799 #if GST_CHECK_VERSION(1, 10, 0)
800     else {
801         GList* selectedStreamsList = nullptr;
802
803         for (const auto& streamId : selectedStreams)
804             selectedStreamsList = g_list_append(selectedStreamsList, g_strdup(streamId.utf8().data()));
805
806         // TODO: MSE GstStream API support: https://bugs.webkit.org/show_bug.cgi?id=182531
807         gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreamsList));
808         g_list_free_full(selectedStreamsList, reinterpret_cast<GDestroyNotify>(g_free));
809     }
810 #endif
811 }
812
813 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
814 {
815     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] {
816         player->notifyPlayerOfVideo();
817     });
818 }
819
820 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
821 {
822     if (UNLIKELY(!m_pipeline || !m_source))
823         return;
824
825     ASSERT(m_isLegacyPlaybin || isMediaSource());
826
827     gint numTracks = 0;
828     bool useMediaSource = isMediaSource();
829     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
830     g_object_get(element, "n-video", &numTracks, nullptr);
831
832     GST_INFO("Media has %d video tracks", numTracks);
833
834     bool oldHasVideo = m_hasVideo;
835     m_hasVideo = numTracks > 0;
836     if (oldHasVideo != m_hasVideo)
837         m_player->characteristicChanged();
838
839     if (m_hasVideo)
840         m_player->sizeChanged();
841
842     if (useMediaSource) {
843         GST_DEBUG("Tracks managed by source element. Bailing out now.");
844         m_player->client().mediaPlayerEngineUpdated(m_player);
845         return;
846     }
847
848 #if ENABLE(VIDEO_TRACK)
849     Vector<String> validVideoStreams;
850     for (gint i = 0; i < numTracks; ++i) {
851         GRefPtr<GstPad> pad;
852         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
853         ASSERT(pad);
854
855         String streamId = "V" + String::number(i);
856         validVideoStreams.append(streamId);
857         if (i < static_cast<gint>(m_videoTracks.size())) {
858             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
859             if (existingTrack) {
860                 existingTrack->setIndex(i);
861                 if (existingTrack->pad() == pad)
862                     continue;
863             }
864         }
865
866         RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
867         ASSERT(streamId == track->id());
868         m_videoTracks.add(streamId, track);
869         m_player->addVideoTrack(*track);
870     }
871
872     purgeInvalidVideoTracks(validVideoStreams);
873 #endif
874
875     m_player->client().mediaPlayerEngineUpdated(m_player);
876 }
877
878 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
879 {
880     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
881         player->notifyPlayerOfVideoCaps();
882     });
883 }
884
885 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
886 {
887     m_videoSize = IntSize();
888     m_player->client().mediaPlayerEngineUpdated(m_player);
889 }
890
891 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
892 {
893     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] {
894         player->notifyPlayerOfAudio();
895     });
896 }
897
898 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
899 {
900     if (UNLIKELY(!m_pipeline || !m_source))
901         return;
902
903     ASSERT(m_isLegacyPlaybin || isMediaSource());
904
905     gint numTracks = 0;
906     bool useMediaSource = isMediaSource();
907     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
908     g_object_get(element, "n-audio", &numTracks, nullptr);
909
910     GST_INFO("Media has %d audio tracks", numTracks);
911     bool oldHasAudio = m_hasAudio;
912     m_hasAudio = numTracks > 0;
913     if (oldHasAudio != m_hasAudio)
914         m_player->characteristicChanged();
915
916     if (useMediaSource) {
917         GST_DEBUG("Tracks managed by source element. Bailing out now.");
918         m_player->client().mediaPlayerEngineUpdated(m_player);
919         return;
920     }
921
922 #if ENABLE(VIDEO_TRACK)
923     Vector<String> validAudioStreams;
924     for (gint i = 0; i < numTracks; ++i) {
925         GRefPtr<GstPad> pad;
926         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
927         ASSERT(pad);
928
929         String streamId = "A" + String::number(i);
930         validAudioStreams.append(streamId);
931         if (i < static_cast<gint>(m_audioTracks.size())) {
932             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
933             if (existingTrack) {
934                 existingTrack->setIndex(i);
935                 if (existingTrack->pad() == pad)
936                     continue;
937             }
938         }
939
940         RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
941         ASSERT(streamId == track->id());
942         m_audioTracks.add(streamId, track);
943         m_player->addAudioTrack(*track);
944     }
945
946     purgeInvalidAudioTracks(validAudioStreams);
947 #endif
948
949     m_player->client().mediaPlayerEngineUpdated(m_player);
950 }
951
952 #if ENABLE(VIDEO_TRACK)
953 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
954 {
955     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] {
956         player->notifyPlayerOfText();
957     });
958 }
959
960 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
961 {
962     if (UNLIKELY(!m_pipeline || !m_source))
963         return;
964
965     ASSERT(m_isLegacyPlaybin || isMediaSource());
966
967     gint numTracks = 0;
968     bool useMediaSource = isMediaSource();
969     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
970     g_object_get(element, "n-text", &numTracks, nullptr);
971
972     GST_INFO("Media has %d text tracks", numTracks);
973
974     if (useMediaSource) {
975         GST_DEBUG("Tracks managed by source element. Bailing out now.");
976         return;
977     }
978
979     Vector<String> validTextStreams;
980     for (gint i = 0; i < numTracks; ++i) {
981         GRefPtr<GstPad> pad;
982         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
983         ASSERT(pad);
984
985         // We can't assume the pad has a sticky event here like implemented in
986         // InbandTextTrackPrivateGStreamer because it might be emitted after the
987         // track was created. So fallback to a dummy stream ID like in the Audio
988         // and Video tracks.
989         String streamId = "T" + String::number(i);
990
991         validTextStreams.append(streamId);
992         if (i < static_cast<gint>(m_textTracks.size())) {
993             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
994             if (existingTrack) {
995                 existingTrack->setIndex(i);
996                 if (existingTrack->pad() == pad)
997                     continue;
998             }
999         }
1000
1001         RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(i, pad);
1002         m_textTracks.add(streamId, track);
1003         m_player->addTextTrack(*track);
1004     }
1005
1006     purgeInvalidTextTracks(validTextStreams);
1007 }
1008
1009 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
1010 {
1011     player->newTextSample();
1012     return GST_FLOW_OK;
1013 }
1014
1015 void MediaPlayerPrivateGStreamer::newTextSample()
1016 {
1017     if (!m_textAppSink)
1018         return;
1019
1020     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
1021         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
1022
1023     GRefPtr<GstSample> sample;
1024     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
1025     ASSERT(sample);
1026
1027     if (streamStartEvent) {
1028         bool found = FALSE;
1029         const gchar* id;
1030         gst_event_parse_stream_start(streamStartEvent.get(), &id);
1031         for (auto& track : m_textTracks.values()) {
1032             if (!strcmp(track->streamId().utf8().data(), id)) {
1033                 track->handleSample(sample);
1034                 found = true;
1035                 break;
1036             }
1037         }
1038         if (!found)
1039             GST_WARNING("Got sample with unknown stream ID %s.", id);
1040     } else
1041         GST_WARNING("Unable to handle sample with no stream start event.");
1042 }
1043 #endif
1044
1045 void MediaPlayerPrivateGStreamer::setRate(float rate)
1046 {
1047     // Higher rate causes crash.
1048     rate = clampTo(rate, -20.0, 20.0);
1049
1050     // Avoid useless playback rate update.
1051     if (m_playbackRate == rate) {
1052         // and make sure that upper layers were notified if rate was set
1053
1054         if (!m_changingRate && m_player->rate() != m_playbackRate)
1055             m_player->rateChanged();
1056         return;
1057     }
1058
1059     if (isLiveStream()) {
1060         // notify upper layers that we cannot handle passed rate.
1061         m_changingRate = false;
1062         m_player->rateChanged();
1063         return;
1064     }
1065
1066     GstState state;
1067     GstState pending;
1068
1069     m_playbackRate = rate;
1070     m_changingRate = true;
1071
1072     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
1073
1074     if (!rate) {
1075         m_changingRate = false;
1076         m_playbackRatePause = true;
1077         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
1078             changePipelineState(GST_STATE_PAUSED);
1079         return;
1080     }
1081
1082     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
1083         || (pending == GST_STATE_PAUSED))
1084         return;
1085
1086     updatePlaybackRate();
1087 }
1088
1089 double MediaPlayerPrivateGStreamer::rate() const
1090 {
1091     return m_playbackRate;
1092 }
1093
1094 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
1095 {
1096     m_preservesPitch = preservesPitch;
1097 }
1098
1099 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
1100 {
1101     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1102     if (m_errorOccured || isLiveStream())
1103         return timeRanges;
1104
1105     MediaTime mediaDuration = durationMediaTime();
1106     if (!mediaDuration || mediaDuration.isPositiveInfinite())
1107         return timeRanges;
1108
1109     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1110
1111     if (!gst_element_query(m_pipeline.get(), query)) {
1112         gst_query_unref(query);
1113         return timeRanges;
1114     }
1115
1116     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
1117     for (guint index = 0; index < numBufferingRanges; index++) {
1118         gint64 rangeStart = 0, rangeStop = 0;
1119         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop)) {
1120             timeRanges->add(MediaTime(rangeStart * toGstUnsigned64Time(mediaDuration) / GST_FORMAT_PERCENT_MAX, GST_SECOND),
1121                 MediaTime(rangeStop * toGstUnsigned64Time(mediaDuration) / GST_FORMAT_PERCENT_MAX, GST_SECOND));
1122         }
1123     }
1124
1125     // Fallback to the more general maxTimeLoaded() if no range has
1126     // been found.
1127     if (!timeRanges->length()) {
1128         MediaTime loaded = maxTimeLoaded();
1129         if (loaded.isValid() && loaded)
1130             timeRanges->add(MediaTime::zeroTime(), loaded);
1131     }
1132
1133     gst_query_unref(query);
1134
1135     return timeRanges;
1136 }
1137
1138 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
1139 {
1140     GUniqueOutPtr<GError> err;
1141     GUniqueOutPtr<gchar> debug;
1142     MediaPlayer::NetworkState error;
1143     bool issueError = true;
1144     bool attemptNextLocation = false;
1145     const GstStructure* structure = gst_message_get_structure(message);
1146     GstState requestedState, currentState;
1147
1148     m_canFallBackToLastFinishedSeekPosition = false;
1149
1150     if (structure) {
1151         const gchar* messageTypeName = gst_structure_get_name(structure);
1152
1153         // Redirect messages are sent from elements, like qtdemux, to
1154         // notify of the new location(s) of the media.
1155         if (!g_strcmp0(messageTypeName, "redirect")) {
1156             mediaLocationChanged(message);
1157             return;
1158         }
1159     }
1160
1161     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
1162     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
1163
1164     GST_LOG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
1165     switch (GST_MESSAGE_TYPE(message)) {
1166     case GST_MESSAGE_ERROR:
1167         if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured)
1168             break;
1169         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
1170         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
1171
1172         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
1173
1174         error = MediaPlayer::Empty;
1175         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
1176             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
1177             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
1178             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
1179             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
1180             error = MediaPlayer::FormatError;
1181         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
1182             // Let the mediaPlayerClient handle the stream error, in
1183             // this case the HTMLMediaElement will emit a stalled
1184             // event.
1185             GST_ERROR("Decode error, let the Media element emit a stalled event.");
1186             m_loadingStalled = true;
1187             break;
1188         } else if (err->domain == GST_STREAM_ERROR) {
1189             error = MediaPlayer::DecodeError;
1190             attemptNextLocation = true;
1191         } else if (err->domain == GST_RESOURCE_ERROR)
1192             error = MediaPlayer::NetworkError;
1193
1194         if (attemptNextLocation)
1195             issueError = !loadNextLocation();
1196         if (issueError) {
1197             m_errorOccured = true;
1198             if (m_networkState != error) {
1199                 m_networkState = error;
1200                 m_player->networkStateChanged();
1201             }
1202         }
1203         break;
1204     case GST_MESSAGE_EOS:
1205         didEnd();
1206         break;
1207     case GST_MESSAGE_ASYNC_DONE:
1208         if (!messageSourceIsPlaybin || m_delayingLoad)
1209             break;
1210         asyncStateChangeDone();
1211         break;
1212     case GST_MESSAGE_STATE_CHANGED: {
1213         if (!messageSourceIsPlaybin || m_delayingLoad)
1214             break;
1215         updateStates();
1216
1217         // Construct a filename for the graphviz dot file output.
1218         GstState newState;
1219         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
1220         CString dotFileName = String::format("%s.%s_%s", GST_OBJECT_NAME(m_pipeline.get()),
1221             gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8();
1222         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
1223
1224         break;
1225     }
1226     case GST_MESSAGE_BUFFERING:
1227         processBufferingStats(message);
1228         break;
1229     case GST_MESSAGE_DURATION_CHANGED:
1230         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
1231         if (messageSourceIsPlaybin && !isMediaSource())
1232             durationChanged();
1233         break;
1234     case GST_MESSAGE_REQUEST_STATE:
1235         gst_message_parse_request_state(message, &requestedState);
1236         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1237         if (requestedState < currentState) {
1238             GST_INFO("Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
1239                 gst_element_state_get_name(requestedState));
1240             m_requestedState = requestedState;
1241             if (!changePipelineState(requestedState))
1242                 loadingFailed(MediaPlayer::Empty);
1243         }
1244         break;
1245     case GST_MESSAGE_CLOCK_LOST:
1246         // This can only happen in PLAYING state and we should just
1247         // get a new clock by moving back to PAUSED and then to
1248         // PLAYING again.
1249         // This can happen if the stream that ends in a sink that
1250         // provides the current clock disappears, for example if
1251         // the audio sink provides the clock and the audio stream
1252         // is disabled. It also happens relatively often with
1253         // HTTP adaptive streams when switching between different
1254         // variants of a stream.
1255         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1256         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1257         break;
1258     case GST_MESSAGE_LATENCY:
1259         // Recalculate the latency, we don't need any special handling
1260         // here other than the GStreamer default.
1261         // This can happen if the latency of live elements changes, or
1262         // for one reason or another a new live element is added or
1263         // removed from the pipeline.
1264         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1265         break;
1266     case GST_MESSAGE_ELEMENT:
1267         if (gst_is_missing_plugin_message(message)) {
1268             if (gst_install_plugins_supported()) {
1269                 RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = makeWeakPtr(*this)](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
1270                     if (!weakThis) {
1271                         GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
1272                         return;
1273                     }
1274
1275                     GST_DEBUG("got missing plugin installation callback with result %u", result);
1276                     RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
1277                     weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
1278                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1279                         return;
1280
1281                     weakThis->changePipelineState(GST_STATE_READY);
1282                     weakThis->changePipelineState(GST_STATE_PAUSED);
1283                 });
1284                 m_missingPluginCallbacks.append(missingPluginCallback);
1285                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1286                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1287                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), *missingPluginCallback);
1288             }
1289         }
1290 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1291         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
1292             processMpegTsSection(section);
1293             gst_mpegts_section_unref(section);
1294         }
1295 #endif
1296 #if ENABLE(ENCRYPTED_MEDIA)
1297         else if (gst_structure_has_name(structure, "drm-key-needed")) {
1298             GST_DEBUG("drm-key-needed message from %s", GST_MESSAGE_SRC_NAME(message));
1299             GRefPtr<GstEvent> event;
1300             gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr);
1301             handleProtectionEvent(event.get());
1302         } else if (gst_structure_has_name(structure, "decrypt-key-needed")) {
1303             GST_DEBUG("decrypt-key-needed message from %s", GST_MESSAGE_SRC_NAME(message));
1304             MediaPlayerPrivateGStreamerBase::dispatchCDMInstance();
1305         }
1306 #endif
1307         else if (gst_structure_has_name(structure, "http-headers")) {
1308             GstStructure* responseHeaders;
1309             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders, nullptr)) {
1310                 if (!gst_structure_has_field(responseHeaders, httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data())) {
1311                     GST_INFO("Live stream detected. Disabling on-disk buffering");
1312                     m_isStreaming = true;
1313                     setDownloadBuffering();
1314                 }
1315                 gst_structure_free(responseHeaders);
1316             }
1317         } else
1318             GST_DEBUG("Unhandled element message: %" GST_PTR_FORMAT, structure);
1319         break;
1320 #if ENABLE(VIDEO_TRACK)
1321     case GST_MESSAGE_TOC:
1322         processTableOfContents(message);
1323         break;
1324 #endif
1325     case GST_MESSAGE_TAG: {
1326         GstTagList* tags = nullptr;
1327         GUniqueOutPtr<gchar> tag;
1328         gst_message_parse_tag(message, &tags);
1329         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1330             if (!g_strcmp0(tag.get(), "rotate-90"))
1331                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1332             else if (!g_strcmp0(tag.get(), "rotate-180"))
1333                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1334             else if (!g_strcmp0(tag.get(), "rotate-270"))
1335                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1336         }
1337         gst_tag_list_unref(tags);
1338         break;
1339     }
1340 #if GST_CHECK_VERSION(1, 10, 0)
1341     case GST_MESSAGE_STREAMS_SELECTED: {
1342         GRefPtr<GstStreamCollection> collection;
1343         gst_message_parse_streams_selected(message, &collection.outPtr());
1344
1345         if (!collection)
1346             break;
1347
1348         m_streamCollection.swap(collection);
1349         m_currentAudioStreamId = "";
1350         m_currentVideoStreamId = "";
1351         m_currentTextStreamId = "";
1352
1353         unsigned length = gst_message_streams_selected_get_size(message);
1354         for (unsigned i = 0; i < length; i++) {
1355             GRefPtr<GstStream> stream = gst_message_streams_selected_get_stream(message, i);
1356             if (!stream)
1357                 continue;
1358
1359             GstStreamType type = gst_stream_get_stream_type(stream.get());
1360             String streamId(gst_stream_get_stream_id(stream.get()));
1361
1362             GST_DEBUG("Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
1363             // Playbin3 can send more than one selected stream of the same type
1364             // but there's no priority or ordering system in place, so we assume
1365             // the selected stream is the last one as reported by playbin3.
1366             if (type & GST_STREAM_TYPE_AUDIO) {
1367                 m_currentAudioStreamId = streamId;
1368                 auto track = m_audioTracks.get(m_currentAudioStreamId);
1369                 ASSERT(track);
1370                 track->markAsActive();
1371             } else if (type & GST_STREAM_TYPE_VIDEO) {
1372                 m_currentVideoStreamId = streamId;
1373                 auto track = m_videoTracks.get(m_currentVideoStreamId);
1374                 ASSERT(track);
1375                 track->markAsActive();
1376             } else if (type & GST_STREAM_TYPE_TEXT)
1377                 m_currentTextStreamId = streamId;
1378             else
1379                 GST_WARNING("Unknown stream type with stream-id %s", streamId.utf8().data());
1380         }
1381         break;
1382     }
1383 #endif
1384     default:
1385         GST_DEBUG("Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
1386         break;
1387     }
1388 }
1389
1390 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1391 {
1392     m_buffering = true;
1393     gst_message_parse_buffering(message, &m_bufferingPercentage);
1394
1395     GST_DEBUG("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1396
1397     if (m_bufferingPercentage == 100)
1398         updateStates();
1399 }
1400
1401 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1402 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1403 {
1404     ASSERT(section);
1405
1406     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1407         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1408         m_metadataTracks.clear();
1409         for (guint i = 0; i < pmt->streams->len; ++i) {
1410             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1411             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1412                 AtomicString pid = String::number(stream->pid);
1413                 RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = InbandMetadataTextTrackPrivateGStreamer::create(
1414                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1415
1416                 // 4.7.10.12.2 Sourcing in-band text tracks
1417                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1418                 // type as follows, based on the type of the media resource:
1419                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1420                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1421                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1422                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1423                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1424                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1425                 // expressed in hexadecimal using uppercase ASCII hex digits.
1426                 String inbandMetadataTrackDispatchType;
1427                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1428                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1429                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1430                     for (guint k = 0; k < descriptor->length; ++k)
1431                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1432                 }
1433                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1434
1435                 m_metadataTracks.add(pid, track);
1436                 m_player->addTextTrack(*track);
1437             }
1438         }
1439     } else {
1440         AtomicString pid = String::number(section->pid);
1441         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1442         if (!track)
1443             return;
1444
1445         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1446         gsize size;
1447         const void* bytes = g_bytes_get_data(data.get(), &size);
1448
1449         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1450     }
1451 }
1452 #endif
1453
1454 #if ENABLE(VIDEO_TRACK)
1455 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1456 {
1457     if (m_chaptersTrack)
1458         m_player->removeTextTrack(*m_chaptersTrack);
1459
1460     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1461     m_player->addTextTrack(*m_chaptersTrack);
1462
1463     GRefPtr<GstToc> toc;
1464     gboolean updated;
1465     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1466     ASSERT(toc);
1467
1468     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1469         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1470 }
1471
1472 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1473 {
1474     ASSERT(entry);
1475
1476     auto cue = GenericCueData::create();
1477
1478     gint64 start = -1, stop = -1;
1479     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1480     if (start != -1)
1481         cue->setStartTime(MediaTime(start, GST_SECOND));
1482     if (stop != -1)
1483         cue->setEndTime(MediaTime(stop, GST_SECOND));
1484
1485     GstTagList* tags = gst_toc_entry_get_tags(entry);
1486     if (tags) {
1487         gchar* title = nullptr;
1488         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1489         if (title) {
1490             cue->setContent(title);
1491             g_free(title);
1492         }
1493     }
1494
1495     m_chaptersTrack->addGenericCue(cue);
1496
1497     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1498         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1499 }
1500
1501 void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
1502 {
1503     m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
1504         return !validTrackIds.contains(keyAndValue.key);
1505     });
1506 }
1507
1508 void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
1509 {
1510     m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
1511         return !validTrackIds.contains(keyAndValue.key);
1512     });
1513 }
1514
1515 void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
1516 {
1517     m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
1518         return !validTrackIds.contains(keyAndValue.key);
1519     });
1520 }
1521 #endif
1522
1523 static gint findHLSQueue(gconstpointer a, gconstpointer)
1524 {
1525     GValue* item = static_cast<GValue*>(const_cast<gpointer>(a));
1526     GstElement* element = GST_ELEMENT(g_value_get_object(item));
1527     if (g_str_has_prefix(GST_ELEMENT_NAME(element), "queue")) {
1528         GstElement* parent = GST_ELEMENT(GST_ELEMENT_PARENT(element));
1529         if (!GST_IS_OBJECT(parent))
1530             return 1;
1531
1532         if (g_str_has_prefix(GST_ELEMENT_NAME(GST_ELEMENT_PARENT(parent)), "hlsdemux"))
1533             return 0;
1534     }
1535
1536     return 1;
1537 }
1538
1539 static bool isHLSProgressing(GstElement* playbin, GstQuery* query)
1540 {
1541     GValue item = { };
1542     GstIterator* binIterator = gst_bin_iterate_recurse(GST_BIN(playbin));
1543     bool foundHLSQueue = gst_iterator_find_custom(binIterator, reinterpret_cast<GCompareFunc>(findHLSQueue), &item, nullptr);
1544     gst_iterator_free(binIterator);
1545
1546     if (!foundHLSQueue)
1547         return false;
1548
1549     GstElement* queueElement = GST_ELEMENT(g_value_get_object(&item));
1550     bool queryResult = gst_element_query(queueElement, query);
1551     g_value_unset(&item);
1552
1553     return queryResult;
1554 }
1555
1556 void MediaPlayerPrivateGStreamer::fillTimerFired()
1557 {
1558     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1559
1560     if (G_UNLIKELY(!gst_element_query(m_pipeline.get(), query))) {
1561         // This query always fails for live pipelines. In the case of HLS, try and find
1562         // the queue inside the HLS element to get a proxy measure of progress. Note
1563         // that the percentage value is rather meaningless as used below.
1564         // This is a hack, see https://bugs.webkit.org/show_bug.cgi?id=141469.
1565         if (!isHLSProgressing(m_pipeline.get(), query)) {
1566             gst_query_unref(query);
1567             return;
1568         }
1569     }
1570
1571     gint64 start, stop;
1572     gdouble fillStatus = 100.0;
1573
1574     gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
1575     gst_query_unref(query);
1576
1577     if (stop != -1)
1578         fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1579
1580     GST_DEBUG("[Buffering] Download buffer filled up to %f%%", fillStatus);
1581
1582     MediaTime mediaDuration = durationMediaTime();
1583
1584     // Update maxTimeLoaded only if the media duration is
1585     // available. Otherwise we can't compute it.
1586     if (mediaDuration) {
1587         if (fillStatus == 100.0)
1588             m_maxTimeLoaded = mediaDuration;
1589         else
1590             m_maxTimeLoaded = MediaTime(fillStatus * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
1591         GST_DEBUG("[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
1592     }
1593
1594     m_downloadFinished = fillStatus == 100.0;
1595     if (!m_downloadFinished) {
1596         updateStates();
1597         return;
1598     }
1599
1600     // Media is now fully loaded. It will play even if network
1601     // connection is cut. Buffering is done, remove the fill source
1602     // from the main loop.
1603     m_fillTimer.stop();
1604     updateStates();
1605 }
1606
1607 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
1608 {
1609     if (m_errorOccured)
1610         return MediaTime::zeroTime();
1611
1612     MediaTime duration = durationMediaTime();
1613     GST_DEBUG("maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
1614     // infinite duration means live stream
1615     if (duration.isPositiveInfinite())
1616         return MediaTime::zeroTime();
1617
1618     return duration;
1619 }
1620
1621 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1622 {
1623     if (m_errorOccured)
1624         return MediaTime::zeroTime();
1625
1626     MediaTime loaded = m_maxTimeLoaded;
1627     if (m_isEndReached)
1628         loaded = durationMediaTime();
1629     GST_LOG("maxTimeLoaded: %s", toString(loaded).utf8().data());
1630     return loaded;
1631 }
1632
1633 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1634 {
1635     if (m_errorOccured || m_loadingStalled)
1636         return false;
1637
1638     if (isLiveStream())
1639         return true;
1640
1641     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1642         return false;
1643
1644     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1645     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1646     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1647     GST_LOG("didLoadingProgress: %s", toString(didLoadingProgress).utf8().data());
1648     return didLoadingProgress;
1649 }
1650
1651 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1652 {
1653     if (m_errorOccured)
1654         return 0;
1655
1656     if (m_totalBytes)
1657         return m_totalBytes;
1658
1659     if (!m_source)
1660         return 0;
1661
1662     if (isLiveStream())
1663         return 0;
1664
1665     GstFormat fmt = GST_FORMAT_BYTES;
1666     gint64 length = 0;
1667     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1668         GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1669         m_totalBytes = static_cast<unsigned long long>(length);
1670         m_isStreaming = !length;
1671         return m_totalBytes;
1672     }
1673
1674     // Fall back to querying the source pads manually.
1675     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1676     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1677     bool done = false;
1678     while (!done) {
1679         GValue item = G_VALUE_INIT;
1680         switch (gst_iterator_next(iter, &item)) {
1681         case GST_ITERATOR_OK: {
1682             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1683             gint64 padLength = 0;
1684             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1685                 length = padLength;
1686             break;
1687         }
1688         case GST_ITERATOR_RESYNC:
1689             gst_iterator_resync(iter);
1690             break;
1691         case GST_ITERATOR_ERROR:
1692             FALLTHROUGH;
1693         case GST_ITERATOR_DONE:
1694             done = true;
1695             break;
1696         }
1697
1698         g_value_unset(&item);
1699     }
1700
1701     gst_iterator_free(iter);
1702
1703     GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1704     m_totalBytes = static_cast<unsigned long long>(length);
1705     m_isStreaming = !length;
1706     return m_totalBytes;
1707 }
1708
1709 void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1710 {
1711     player->sourceSetup(sourceElement);
1712 }
1713
1714 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1715 {
1716     if (g_strcmp0(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(G_OBJECT(element))), "GstDownloadBuffer"))
1717         return;
1718
1719     player->m_downloadBuffer = element;
1720     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1721     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1722
1723     GUniqueOutPtr<char> oldDownloadTemplate;
1724     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1725
1726     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1727     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1728     GST_DEBUG("Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1729
1730     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1731 }
1732
1733 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1734 {
1735     ASSERT(player->m_downloadBuffer);
1736
1737     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1738
1739     GUniqueOutPtr<char> downloadFile;
1740     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1741     player->m_downloadBuffer = nullptr;
1742
1743     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
1744         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1745         return;
1746     }
1747
1748     GST_DEBUG("Unlinked media temporary file %s after creation", downloadFile.get());
1749 }
1750
1751 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1752 {
1753     if (!downloadFileTemplate)
1754         return;
1755
1756     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1757     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1758     String templatePattern = String(templateFile.get()).replace("X", "?");
1759
1760     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
1761         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
1762             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1763             continue;
1764         }
1765
1766         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1767     }
1768 }
1769
1770 void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1771 {
1772     GST_DEBUG("Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1773
1774     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1775         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1776
1777     m_source = sourceElement;
1778
1779     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1780         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1781         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1782 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
1783     } else if (WEBKIT_IS_MEDIA_STREAM_SRC(sourceElement)) {
1784         auto stream = m_streamPrivate.get();
1785         ASSERT(stream);
1786         webkitMediaStreamSrcSetStream(WEBKIT_MEDIA_STREAM_SRC(sourceElement), stream);
1787 #endif
1788     }
1789 }
1790
1791 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1792 {
1793     if (!m_source)
1794         return false;
1795
1796     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1797         return true;
1798
1799     GUniqueOutPtr<char> originalURI, resolvedURI;
1800     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1801     if (!originalURI || !resolvedURI)
1802         return false;
1803     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1804         return true;
1805
1806     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1807     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1808     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1809 }
1810
1811 void MediaPlayerPrivateGStreamer::cancelLoad()
1812 {
1813     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1814         return;
1815
1816     if (m_pipeline)
1817         changePipelineState(GST_STATE_READY);
1818 }
1819
1820 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1821 {
1822     if (!m_pipeline || m_errorOccured)
1823         return;
1824
1825     if (m_seeking) {
1826         if (m_seekIsPending)
1827             updateStates();
1828         else {
1829             GST_DEBUG("[Seek] seeked to %s", toString(m_seekTime).utf8().data());
1830             m_seeking = false;
1831             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
1832                 seek(m_timeOfOverlappingSeek);
1833                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
1834                 return;
1835             }
1836             m_timeOfOverlappingSeek = MediaTime::invalidTime();
1837
1838             // The pipeline can still have a pending state. In this case a position query will fail.
1839             // Right now we can use m_seekTime as a fallback.
1840             m_canFallBackToLastFinishedSeekPosition = true;
1841             timeChanged();
1842         }
1843     } else
1844         updateStates();
1845 }
1846
1847 void MediaPlayerPrivateGStreamer::updateStates()
1848 {
1849     if (!m_pipeline)
1850         return;
1851
1852     if (m_errorOccured)
1853         return;
1854
1855     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1856     MediaPlayer::ReadyState oldReadyState = m_readyState;
1857     GstState pending;
1858     GstState state;
1859     bool stateReallyChanged = false;
1860
1861     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1862     if (state != m_currentState) {
1863         m_oldState = m_currentState;
1864         m_currentState = state;
1865         stateReallyChanged = true;
1866     }
1867
1868     bool shouldUpdatePlaybackState = false;
1869     switch (getStateResult) {
1870     case GST_STATE_CHANGE_SUCCESS: {
1871         GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1872
1873         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1874         // on HTMLMediaElement and properly generate the video 'ended' event.
1875         if (m_isEndReached && m_currentState == GST_STATE_READY)
1876             break;
1877
1878         m_resetPipeline = m_currentState <= GST_STATE_READY;
1879
1880         bool didBuffering = m_buffering;
1881
1882         // Update ready and network states.
1883         switch (m_currentState) {
1884         case GST_STATE_NULL:
1885             m_readyState = MediaPlayer::HaveNothing;
1886             m_networkState = MediaPlayer::Empty;
1887             break;
1888         case GST_STATE_READY:
1889             m_readyState = MediaPlayer::HaveMetadata;
1890             m_networkState = MediaPlayer::Empty;
1891             break;
1892         case GST_STATE_PAUSED:
1893         case GST_STATE_PLAYING:
1894             if (m_buffering) {
1895                 if (m_bufferingPercentage == 100) {
1896                     GST_DEBUG("[Buffering] Complete.");
1897                     m_buffering = false;
1898                     m_readyState = MediaPlayer::HaveEnoughData;
1899                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1900                 } else {
1901                     m_readyState = MediaPlayer::HaveCurrentData;
1902                     m_networkState = MediaPlayer::Loading;
1903                 }
1904             } else if (m_downloadFinished) {
1905                 m_readyState = MediaPlayer::HaveEnoughData;
1906                 m_networkState = MediaPlayer::Loaded;
1907             } else {
1908                 m_readyState = MediaPlayer::HaveFutureData;
1909                 m_networkState = MediaPlayer::Loading;
1910             }
1911
1912             break;
1913         default:
1914             ASSERT_NOT_REACHED();
1915             break;
1916         }
1917
1918         // Sync states where needed.
1919         if (m_currentState == GST_STATE_PAUSED) {
1920             if (!m_volumeAndMuteInitialized) {
1921                 notifyPlayerOfVolumeChange();
1922                 notifyPlayerOfMute();
1923                 m_volumeAndMuteInitialized = true;
1924             }
1925
1926             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1927                 GST_DEBUG("[Buffering] Restarting playback.");
1928                 changePipelineState(GST_STATE_PLAYING);
1929             }
1930         } else if (m_currentState == GST_STATE_PLAYING) {
1931             m_paused = false;
1932
1933             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1934                 GST_DEBUG("[Buffering] Pausing stream for buffering.");
1935                 changePipelineState(GST_STATE_PAUSED);
1936             }
1937         } else
1938             m_paused = true;
1939
1940         GST_DEBUG("Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
1941         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
1942             shouldUpdatePlaybackState = true;
1943             GST_INFO("Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
1944         }
1945
1946         // Emit play state change notification only when going to PLAYING so that
1947         // the media element gets a chance to enable its page sleep disabler.
1948         // Emitting this notification in more cases triggers unwanted code paths
1949         // and test timeouts.
1950         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
1951             GST_INFO("Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
1952             shouldUpdatePlaybackState = true;
1953         }
1954
1955         break;
1956     }
1957     case GST_STATE_CHANGE_ASYNC:
1958         GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1959         // Change in progress.
1960         break;
1961     case GST_STATE_CHANGE_FAILURE:
1962         GST_DEBUG("Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1963         // Change failed
1964         return;
1965     case GST_STATE_CHANGE_NO_PREROLL:
1966         GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1967
1968         // Live pipelines go in PAUSED without prerolling.
1969         m_isStreaming = true;
1970         setDownloadBuffering();
1971
1972         if (m_currentState == GST_STATE_READY)
1973             m_readyState = MediaPlayer::HaveNothing;
1974         else if (m_currentState == GST_STATE_PAUSED) {
1975             m_readyState = MediaPlayer::HaveEnoughData;
1976             m_paused = true;
1977         } else if (m_currentState == GST_STATE_PLAYING)
1978             m_paused = false;
1979
1980         if (!m_paused && m_playbackRate)
1981             changePipelineState(GST_STATE_PLAYING);
1982
1983         m_networkState = MediaPlayer::Loading;
1984         break;
1985     default:
1986         GST_DEBUG("Else : %d", getStateResult);
1987         break;
1988     }
1989
1990     m_requestedState = GST_STATE_VOID_PENDING;
1991
1992     if (shouldUpdatePlaybackState)
1993         m_player->playbackStateChanged();
1994
1995     if (m_networkState != oldNetworkState) {
1996         GST_DEBUG("Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
1997         m_player->networkStateChanged();
1998     }
1999     if (m_readyState != oldReadyState) {
2000         GST_DEBUG("Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
2001         m_player->readyStateChanged();
2002     }
2003
2004     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
2005         updatePlaybackRate();
2006         if (m_seekIsPending) {
2007             GST_DEBUG("[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
2008             m_seekIsPending = false;
2009             m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
2010             if (!m_seeking)
2011                 GST_DEBUG("[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
2012         }
2013     }
2014 }
2015
2016 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
2017 {
2018 #if GST_CHECK_VERSION(1, 10, 0)
2019     if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_STREAM_COLLECTION && !m_isLegacyPlaybin) {
2020         GRefPtr<GstStreamCollection> collection;
2021         gst_message_parse_stream_collection(message, &collection.outPtr());
2022
2023         if (collection) {
2024             m_streamCollection.swap(collection);
2025             m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
2026                 this->updateTracks();
2027             });
2028         }
2029     }
2030 #endif
2031
2032     return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
2033 }
2034
2035 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
2036 {
2037     if (m_mediaLocations)
2038         gst_structure_free(m_mediaLocations);
2039
2040     const GstStructure* structure = gst_message_get_structure(message);
2041     if (structure) {
2042         // This structure can contain:
2043         // - both a new-location string and embedded locations structure
2044         // - or only a new-location string.
2045         m_mediaLocations = gst_structure_copy(structure);
2046         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2047
2048         if (locations)
2049             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
2050
2051         loadNextLocation();
2052     }
2053 }
2054
2055 bool MediaPlayerPrivateGStreamer::loadNextLocation()
2056 {
2057     if (!m_mediaLocations)
2058         return false;
2059
2060     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2061     const gchar* newLocation = nullptr;
2062
2063     if (!locations) {
2064         // Fallback on new-location string.
2065         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
2066         if (!newLocation)
2067             return false;
2068     }
2069
2070     if (!newLocation) {
2071         if (m_mediaLocationCurrentIndex < 0) {
2072             m_mediaLocations = nullptr;
2073             return false;
2074         }
2075
2076         const GValue* location = gst_value_list_get_value(locations, m_mediaLocationCurrentIndex);
2077         const GstStructure* structure = gst_value_get_structure(location);
2078
2079         if (!structure) {
2080             m_mediaLocationCurrentIndex--;
2081             return false;
2082         }
2083
2084         newLocation = gst_structure_get_string(structure, "new-location");
2085     }
2086
2087     if (newLocation) {
2088         // Found a candidate. new-location is not always an absolute url
2089         // though. We need to take the base of the current url and
2090         // append the value of new-location to it.
2091         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2092         URL newUrl = URL(baseUrl, newLocation);
2093         convertToInternalProtocol(newUrl);
2094
2095         RefPtr<SecurityOrigin> securityOrigin = SecurityOrigin::create(m_url);
2096         if (securityOrigin->canRequest(newUrl)) {
2097             GST_INFO("New media url: %s", newUrl.string().utf8().data());
2098
2099             // Reset player states.
2100             m_networkState = MediaPlayer::Loading;
2101             m_player->networkStateChanged();
2102             m_readyState = MediaPlayer::HaveNothing;
2103             m_player->readyStateChanged();
2104
2105             // Reset pipeline state.
2106             m_resetPipeline = true;
2107             changePipelineState(GST_STATE_READY);
2108
2109             GstState state;
2110             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2111             if (state <= GST_STATE_READY) {
2112                 // Set the new uri and start playing.
2113                 setPlaybinURL(newUrl);
2114                 changePipelineState(GST_STATE_PLAYING);
2115                 return true;
2116             }
2117         } else
2118             GST_INFO("Not allowed to load new media location: %s", newUrl.string().utf8().data());
2119     }
2120     m_mediaLocationCurrentIndex--;
2121     return false;
2122 }
2123
2124 void MediaPlayerPrivateGStreamer::loadStateChanged()
2125 {
2126     updateStates();
2127 }
2128
2129 void MediaPlayerPrivateGStreamer::timeChanged()
2130 {
2131     updateStates();
2132     m_player->timeChanged();
2133 }
2134
2135 void MediaPlayerPrivateGStreamer::didEnd()
2136 {
2137     GST_INFO("Playback ended");
2138
2139     // Synchronize position and duration values to not confuse the
2140     // HTMLMediaElement. In some cases like reverse playback the
2141     // position is not always reported as 0 for instance.
2142     MediaTime now = currentMediaTime();
2143     if (now > MediaTime { } && now <= durationMediaTime())
2144         m_player->durationChanged();
2145
2146     m_isEndReached = true;
2147     timeChanged();
2148
2149     if (!m_player->client().mediaPlayerIsLooping()) {
2150         m_paused = true;
2151         m_durationAtEOS = durationMediaTime();
2152         changePipelineState(GST_STATE_READY);
2153         m_downloadFinished = false;
2154     }
2155 }
2156
2157 void MediaPlayerPrivateGStreamer::durationChanged()
2158 {
2159     MediaTime previousDuration = durationMediaTime();
2160
2161     // FIXME: Check if this method is still useful, because it's not doing its work at all
2162     // since bug #159458 removed a cacheDuration() call here.
2163
2164     // Avoid emiting durationchanged in the case where the previous
2165     // duration was 0 because that case is already handled by the
2166     // HTMLMediaElement.
2167     if (previousDuration && durationMediaTime() != previousDuration)
2168         m_player->durationChanged();
2169 }
2170
2171 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
2172 {
2173     GST_WARNING("Loading failed, error: %d", error);
2174
2175     m_errorOccured = true;
2176     if (m_networkState != error) {
2177         m_networkState = error;
2178         m_player->networkStateChanged();
2179     }
2180     if (m_readyState != MediaPlayer::HaveNothing) {
2181         m_readyState = MediaPlayer::HaveNothing;
2182         m_player->readyStateChanged();
2183     }
2184
2185     // Loading failed, remove ready timer.
2186     m_readyTimerHandler.stop();
2187 }
2188
2189 static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeSet()
2190 {
2191     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> mimeTypes = []()
2192     {
2193         MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements();
2194         HashSet<String, ASCIICaseInsensitiveHash> set;
2195
2196         GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
2197         GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
2198         GList* demuxerFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DEMUXER, GST_RANK_MARGINAL);
2199
2200         enum ElementType {
2201             AudioDecoder = 0,
2202             VideoDecoder,
2203             Demuxer
2204         };
2205         struct GstCapsWebKitMapping {
2206             ElementType elementType;
2207             const char* capsString;
2208             Vector<AtomicString> webkitMimeTypes;
2209         };
2210
2211         Vector<GstCapsWebKitMapping> mapping = {
2212             {AudioDecoder, "audio/midi", {"audio/midi", "audio/riff-midi"}},
2213             {AudioDecoder, "audio/x-sbc", { }},
2214             {AudioDecoder, "audio/x-sid", { }},
2215             {AudioDecoder, "audio/x-flac", {"audio/x-flac", "audio/flac"}},
2216             {AudioDecoder, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
2217             {AudioDecoder, "audio/x-wavpack", {"audio/x-wavpack"}},
2218             {AudioDecoder, "audio/x-speex", {"audio/speex", "audio/x-speex"}},
2219             {AudioDecoder, "audio/x-ac3", { }},
2220             {AudioDecoder, "audio/x-eac3", {"audio/x-ac3"}},
2221             {AudioDecoder, "audio/x-dts", { }},
2222             {VideoDecoder, "video/x-h264, profile=(string)high", {"video/mp4", "video/x-m4v"}},
2223             {VideoDecoder, "video/x-msvideocodec", {"video/x-msvideo"}},
2224             {VideoDecoder, "video/x-h263", { }},
2225             {VideoDecoder, "video/mpegts", { }},
2226             {VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", {"video/mpeg"}},
2227             {VideoDecoder, "video/x-dirac", { }},
2228             {VideoDecoder, "video/x-flash-video", {"video/flv", "video/x-flv"}},
2229             {Demuxer, "video/quicktime", { }},
2230             {Demuxer, "video/quicktime, variant=(string)3gpp", {"video/3gpp"}},
2231             {Demuxer, "application/x-3gp", { }},
2232             {Demuxer, "video/x-ms-asf", { }},
2233             {Demuxer, "audio/x-aiff", { }},
2234             {Demuxer, "application/x-pn-realaudio", { }},
2235             {Demuxer, "application/vnd.rn-realmedia", { }},
2236             {Demuxer, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
2237             {Demuxer, "application/x-hls", {"application/vnd.apple.mpegurl", "application/x-mpegurl"}}
2238         };
2239
2240         for (auto& current : mapping) {
2241             GList* factories = demuxerFactories;
2242             if (current.elementType == AudioDecoder)
2243                 factories = audioDecoderFactories;
2244             else if (current.elementType == VideoDecoder)
2245                 factories = videoDecoderFactories;
2246
2247             if (gstRegistryHasElementForMediaType(factories, current.capsString)) {
2248                 if (!current.webkitMimeTypes.isEmpty()) {
2249                     for (const auto& mimeType : current.webkitMimeTypes)
2250                         set.add(mimeType);
2251                 } else
2252                     set.add(AtomicString(current.capsString));
2253             }
2254         }
2255
2256         bool opusSupported = false;
2257         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-opus")) {
2258             opusSupported = true;
2259             set.add(AtomicString("audio/opus"));
2260         }
2261
2262         bool vorbisSupported = false;
2263         if (gstRegistryHasElementForMediaType(demuxerFactories, "application/ogg")) {
2264             set.add(AtomicString("application/ogg"));
2265
2266             vorbisSupported = gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-vorbis");
2267             if (vorbisSupported) {
2268                 set.add(AtomicString("audio/ogg"));
2269                 set.add(AtomicString("audio/x-vorbis+ogg"));
2270             }
2271
2272             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-theora"))
2273                 set.add(AtomicString("video/ogg"));
2274         }
2275
2276         bool audioMpegSupported = false;
2277         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
2278             audioMpegSupported = true;
2279             set.add(AtomicString("audio/mp1"));
2280             set.add(AtomicString("audio/mp3"));
2281             set.add(AtomicString("audio/x-mp3"));
2282         }
2283
2284         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
2285             audioMpegSupported = true;
2286             set.add(AtomicString("audio/aac"));
2287             set.add(AtomicString("audio/mp2"));
2288             set.add(AtomicString("audio/mp4"));
2289             set.add(AtomicString("audio/x-m4a"));
2290         }
2291
2292         if (audioMpegSupported) {
2293             set.add(AtomicString("audio/mpeg"));
2294             set.add(AtomicString("audio/x-mpeg"));
2295         }
2296
2297         if (gstRegistryHasElementForMediaType(demuxerFactories, "video/x-matroska")) {
2298             set.add(AtomicString("video/x-matroska"));
2299
2300             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp8")
2301                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp9")
2302                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp10"))
2303                 set.add(AtomicString("video/webm"));
2304
2305             if (vorbisSupported || opusSupported)
2306                 set.add(AtomicString("audio/webm"));
2307         }
2308
2309         gst_plugin_feature_list_free(audioDecoderFactories);
2310         gst_plugin_feature_list_free(videoDecoderFactories);
2311         gst_plugin_feature_list_free(demuxerFactories);
2312         return set;
2313     }();
2314     return mimeTypes;
2315 }
2316
2317 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2318 {
2319     types = mimeTypeSet();
2320 }
2321
2322 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2323 {
2324     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
2325 #if ENABLE(MEDIA_SOURCE)
2326     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2327     if (parameters.isMediaSource)
2328         return result;
2329 #endif
2330
2331 #if !ENABLE(MEDIA_STREAM) || !GST_CHECK_VERSION(1, 10, 0)
2332     if (parameters.isMediaStream)
2333         return result;
2334 #endif
2335
2336     if (parameters.type.isEmpty())
2337         return result;
2338
2339     // spec says we should not return "probably" if the codecs string is empty
2340     if (mimeTypeSet().contains(parameters.type.containerType()))
2341         result = parameters.type.codecs().isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
2342
2343     return extendedSupportsType(parameters, result);
2344 }
2345
2346 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
2347 {
2348     if (!m_pipeline)
2349         return;
2350
2351     unsigned flags;
2352     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2353
2354     unsigned flagDownload = getGstPlayFlag("download");
2355
2356     // We don't want to stop downloading if we already started it.
2357     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline) {
2358         GST_DEBUG("Download already started, not starting again");
2359         return;
2360     }
2361
2362     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
2363     if (shouldDownload) {
2364         GST_INFO("Enabling on-disk buffering");
2365         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2366         m_fillTimer.startRepeating(200_ms);
2367     } else {
2368         GST_INFO("Disabling on-disk buffering");
2369         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2370         m_fillTimer.stop();
2371     }
2372 }
2373
2374 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
2375 {
2376     GST_DEBUG("Setting preload to %s", convertEnumerationToString(preload).utf8().data());
2377     if (preload == MediaPlayer::Auto && isLiveStream())
2378         return;
2379
2380     m_preload = preload;
2381     setDownloadBuffering();
2382
2383     if (m_delayingLoad && m_preload != MediaPlayer::None) {
2384         m_delayingLoad = false;
2385         commitLoad();
2386     }
2387 }
2388
2389 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
2390 {
2391     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
2392     if (!m_autoAudioSink) {
2393         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
2394         return nullptr;
2395     }
2396
2397     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
2398
2399     GstElement* audioSinkBin;
2400
2401     if (webkitGstCheckVersion(1, 4, 2)) {
2402 #if ENABLE(WEB_AUDIO)
2403         audioSinkBin = gst_bin_new("audio-sink");
2404         ensureAudioSourceProvider();
2405         m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2406         return audioSinkBin;
2407 #else
2408         return m_autoAudioSink.get();
2409 #endif
2410     }
2411
2412     // Construct audio sink only if pitch preserving is enabled.
2413     // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
2414     if (m_preservesPitch) {
2415         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2416         if (!scale) {
2417             GST_WARNING("Failed to create scaletempo");
2418             return m_autoAudioSink.get();
2419         }
2420
2421         audioSinkBin = gst_bin_new("audio-sink");
2422         gst_bin_add(GST_BIN(audioSinkBin), scale);
2423         GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
2424         gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
2425
2426 #if ENABLE(WEB_AUDIO)
2427         ensureAudioSourceProvider();
2428         m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
2429 #else
2430         GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
2431         GstElement* resample = gst_element_factory_make("audioresample", nullptr);
2432
2433         gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
2434
2435         if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
2436             GST_WARNING("Failed to link audio sink elements");
2437             gst_object_unref(audioSinkBin);
2438             return m_autoAudioSink.get();
2439         }
2440 #endif
2441         return audioSinkBin;
2442     }
2443
2444 #if ENABLE(WEB_AUDIO)
2445     audioSinkBin = gst_bin_new("audio-sink");
2446     ensureAudioSourceProvider();
2447     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2448     return audioSinkBin;
2449 #endif
2450     ASSERT_NOT_REACHED();
2451     return nullptr;
2452 }
2453
2454 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2455 {
2456     GstElement* sink;
2457     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2458     return sink;
2459 }
2460
2461 #if ENABLE(WEB_AUDIO)
2462 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2463 {
2464     if (!m_audioSourceProvider)
2465         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2466 }
2467
2468 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2469 {
2470     ensureAudioSourceProvider();
2471     return m_audioSourceProvider.get();
2472 }
2473 #endif
2474
2475 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const gchar* playbinName, const String& pipelineName)
2476 {
2477     if (m_pipeline) {
2478         if (!playbinName) {
2479             GST_INFO_OBJECT(pipeline(), "Keeping same playbin as nothing forced");
2480             return;
2481         }
2482
2483         if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2484             GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2485             return;
2486         }
2487
2488         GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.",
2489             playbinName);
2490         changePipelineState(GST_STATE_NULL);
2491         m_pipeline = nullptr;
2492     }
2493
2494     ASSERT(!m_pipeline);
2495
2496 #if GST_CHECK_VERSION(1, 10, 0)
2497     if (g_getenv("USE_PLAYBIN3"))
2498         playbinName = "playbin3";
2499 #else
2500     playbinName = "playbin";
2501 #endif
2502
2503     if (!playbinName)
2504         playbinName = "playbin";
2505
2506     m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
2507
2508     // gst_element_factory_make() returns a floating reference so
2509     // we should not adopt.
2510     setPipeline(gst_element_factory_make(playbinName,
2511         pipelineName.isEmpty() ? String::format("play_%p", this).utf8().data() : pipelineName.utf8().data()));
2512     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2513
2514     GST_INFO("Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2515
2516     // Let also other listeners subscribe to (application) messages in this bus.
2517     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2518     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2519     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2520
2521     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2522
2523     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2524     if (m_isLegacyPlaybin) {
2525         g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2526         g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2527     }
2528
2529 #if ENABLE(VIDEO_TRACK)
2530     if (m_isLegacyPlaybin)
2531         g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2532
2533     GstElement* textCombiner = webkitTextCombinerNew();
2534     ASSERT(textCombiner);
2535     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2536
2537     m_textAppSink = webkitTextSinkNew();
2538     ASSERT(m_textAppSink);
2539
2540     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2541     ASSERT(m_textAppSinkPad);
2542
2543     GRefPtr<GstCaps> textCaps;
2544     if (webkitGstCheckVersion(1, 13, 0))
2545         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
2546     else
2547         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
2548     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
2549     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2550
2551     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2552 #endif
2553
2554     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2555
2556     configurePlaySink();
2557
2558     // On 1.4.2 and newer we use the audio-filter property instead.
2559     // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
2560     // the reason for using >= 1.4.2 instead of >= 1.4.0.
2561     if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
2562         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2563
2564         if (!scale)
2565             GST_WARNING("Failed to create scaletempo");
2566         else
2567             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2568     }
2569
2570     if (!m_renderingCanBeAccelerated) {
2571         // If not using accelerated compositing, let GStreamer handle
2572         // the image-orientation tag.
2573         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2574         if (videoFlip) {
2575             g_object_set(videoFlip, "method", 8, nullptr);
2576             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2577         } else
2578             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
2579     }
2580
2581     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2582     if (videoSinkPad)
2583         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2584 }
2585
2586 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2587 {
2588     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2589     gst_element_post_message(m_pipeline.get(), message);
2590 }
2591
2592 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2593 {
2594     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2595         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2596     return false;
2597 }
2598
2599 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2600 {
2601     if (isLiveStream())
2602         return false;
2603
2604     if (m_url.isLocalFile())
2605         return true;
2606
2607     if (m_url.protocolIsInHTTPFamily())
2608         return true;
2609
2610     return false;
2611 }
2612
2613 }
2614
2615 #endif // USE(GSTREAMER)