15cdd4be50a13492af13226124cce1ec55266367
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "GStreamerCommon.h"
32 #include "GStreamerRegistryScanner.h"
33 #include "HTTPHeaderNames.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <glib.h>
42 #include <gst/gst.h>
43 #include <gst/pbutils/missing-plugins.h>
44 #include <limits>
45 #include <wtf/FileSystem.h>
46 #include <wtf/MediaTime.h>
47 #include <wtf/NeverDestroyed.h>
48 #include <wtf/StringPrintStream.h>
49 #include <wtf/URL.h>
50 #include <wtf/WallTime.h>
51 #include <wtf/glib/GUniquePtr.h>
52 #include <wtf/glib/RunLoopSourcePriority.h>
53 #include <wtf/text/CString.h>
54 #include <wtf/text/StringConcatenateNumbers.h>
55
56 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
57 #include "GStreamerMediaStreamSource.h"
58 #endif
59
60 #if ENABLE(VIDEO_TRACK)
61 #include "AudioTrackPrivateGStreamer.h"
62 #include "InbandMetadataTextTrackPrivateGStreamer.h"
63 #include "InbandTextTrackPrivateGStreamer.h"
64 #include "TextCombinerGStreamer.h"
65 #include "TextSinkGStreamer.h"
66 #include "VideoTrackPrivateGStreamer.h"
67 #endif
68
69 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
70 #define GST_USE_UNSTABLE_API
71 #include <gst/mpegts/mpegts.h>
72 #undef GST_USE_UNSTABLE_API
73 #endif
74 #include <gst/audio/streamvolume.h>
75
76 #if ENABLE(MEDIA_SOURCE)
77 #include "MediaSource.h"
78 #include "WebKitMediaSourceGStreamer.h"
79 #endif
80
81 #if ENABLE(WEB_AUDIO)
82 #include "AudioSourceProviderGStreamer.h"
83 #endif
84
85 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
86 #define GST_CAT_DEFAULT webkit_media_player_debug
87
88
89 namespace WebCore {
90 using namespace std;
91
92 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
93 {
94     player->handleMessage(message);
95 }
96
97 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
98 {
99     player->setAudioStreamProperties(object);
100 }
101
102 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
103 {
104     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
105         return;
106
107     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
108     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
109     g_object_set(object, "stream-properties", structure, nullptr);
110     gst_structure_free(structure);
111     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
112     GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get());
113 }
114
115 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
116 {
117     MediaPlayerPrivateGStreamerBase::initializeDebugCategory();
118     if (isAvailable()) {
119         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
120             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
121     }
122 }
123
124 bool MediaPlayerPrivateGStreamer::isAvailable()
125 {
126     if (!initializeGStreamerAndRegisterWebKitElements())
127         return false;
128
129     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
130     return factory;
131 }
132
133 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
134     : MediaPlayerPrivateGStreamerBase(player)
135     , m_buffering(false)
136     , m_bufferingPercentage(0)
137     , m_cachedPosition(MediaTime::invalidTime())
138     , m_cachedDuration(MediaTime::invalidTime())
139     , m_canFallBackToLastFinishedSeekPosition(false)
140     , m_changingRate(false)
141     , m_downloadFinished(false)
142     , m_errorOccured(false)
143     , m_isEndReached(false)
144     , m_isStreaming(false)
145     , m_paused(true)
146     , m_playbackRate(1)
147     , m_requestedState(GST_STATE_VOID_PENDING)
148     , m_resetPipeline(false)
149     , m_seeking(false)
150     , m_seekIsPending(false)
151     , m_seekTime(MediaTime::invalidTime())
152     , m_source(nullptr)
153     , m_volumeAndMuteInitialized(false)
154     , m_mediaLocations(nullptr)
155     , m_mediaLocationCurrentIndex(0)
156     , m_playbackRatePause(false)
157     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
158     , m_lastPlaybackRate(1)
159     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
160     , m_maxTimeLoaded(MediaTime::zeroTime())
161     , m_preload(player->preload())
162     , m_delayingLoad(false)
163     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
164     , m_hasVideo(false)
165     , m_hasAudio(false)
166     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
167     , m_totalBytes(0)
168     , m_preservesPitch(false)
169 {
170 #if USE(GLIB)
171     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
172 #endif
173 }
174
175 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
176 {
177     GST_DEBUG_OBJECT(pipeline(), "Disposing player");
178
179 #if ENABLE(VIDEO_TRACK)
180     for (auto& track : m_audioTracks.values())
181         track->disconnect();
182
183     for (auto& track : m_textTracks.values())
184         track->disconnect();
185
186     for (auto& track : m_videoTracks.values())
187         track->disconnect();
188 #endif
189     if (m_fillTimer.isActive())
190         m_fillTimer.stop();
191
192     if (m_mediaLocations) {
193         gst_structure_free(m_mediaLocations);
194         m_mediaLocations = nullptr;
195     }
196
197     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
198         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
199
200     if (m_autoAudioSink) {
201         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
202             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
203     }
204
205     m_readyTimerHandler.stop();
206     for (auto& missingPluginCallback : m_missingPluginCallbacks) {
207         if (missingPluginCallback)
208             missingPluginCallback->invalidate();
209     }
210     m_missingPluginCallbacks.clear();
211
212     if (m_videoSink) {
213         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
214         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
215     }
216
217     if (m_pipeline) {
218         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
219         ASSERT(bus);
220         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
221         gst_bus_remove_signal_watch(bus.get());
222         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
223         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
224     }
225 }
226
227 static void convertToInternalProtocol(URL& url)
228 {
229     if (webkitGstCheckVersion(1, 12, 0))
230         return;
231     if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
232         url.setProtocol("webkit+" + url.protocol());
233 }
234
235 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
236 {
237     // Clean out everything after file:// url path.
238     String cleanURLString(url.string());
239     if (url.isLocalFile())
240         cleanURLString = cleanURLString.substring(0, url.pathEnd());
241
242     m_url = URL(URL(), cleanURLString);
243     convertToInternalProtocol(m_url);
244     GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data());
245     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
246 }
247
248 void MediaPlayerPrivateGStreamer::load(const String& urlString)
249 {
250     loadFull(urlString, String());
251 }
252
253 static void setSyncOnClock(GstElement *element, bool sync)
254 {
255     if (!GST_IS_BIN(element)) {
256         g_object_set(element, "sync", sync, NULL);
257         return;
258     }
259
260     GstIterator* it = gst_bin_iterate_sinks(GST_BIN(element));
261     while (gst_iterator_foreach(it, (GstIteratorForeachFunction)([](const GValue* item, void* syncPtr) {
262         bool* sync = static_cast<bool*>(syncPtr);
263         setSyncOnClock(GST_ELEMENT(g_value_get_object(item)), *sync);
264     }), &sync) == GST_ITERATOR_RESYNC)
265         gst_iterator_resync(it);
266     gst_iterator_free(it);
267 }
268
269 void MediaPlayerPrivateGStreamer::syncOnClock(bool sync)
270 {
271     setSyncOnClock(videoSink(), sync);
272     setSyncOnClock(audioSink(), sync);
273 }
274
275 void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const String& pipelineName)
276 {
277     if (m_player->contentMIMEType() == "image/gif") {
278         loadingFailed(MediaPlayer::FormatError, MediaPlayer::HaveNothing, true);
279         return;
280     }
281
282     URL url(URL(), urlString);
283     if (url.protocolIsAbout())
284         return;
285
286     if (!m_pipeline)
287         createGSTPlayBin(url, pipelineName);
288     syncOnClock(true);
289     if (m_fillTimer.isActive())
290         m_fillTimer.stop();
291
292     ASSERT(m_pipeline);
293
294     setPlaybinURL(url);
295
296     GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data());
297     if (m_preload == MediaPlayer::None) {
298         GST_INFO_OBJECT(pipeline(), "Delaying load.");
299         m_delayingLoad = true;
300     }
301
302     // Reset network and ready states. Those will be set properly once
303     // the pipeline pre-rolled.
304     m_networkState = MediaPlayer::Loading;
305     m_player->networkStateChanged();
306     m_readyState = MediaPlayer::HaveNothing;
307     m_player->readyStateChanged();
308     m_volumeAndMuteInitialized = false;
309     m_hasTaintedOrigin = WTF::nullopt;
310
311     if (!m_delayingLoad)
312         commitLoad();
313 }
314
315 #if ENABLE(MEDIA_SOURCE)
316 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
317 {
318     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
319     m_networkState = MediaPlayer::FormatError;
320     m_player->networkStateChanged();
321 }
322 #endif
323
324 #if ENABLE(MEDIA_STREAM)
325 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream)
326 {
327 #if GST_CHECK_VERSION(1, 10, 0)
328     m_streamPrivate = &stream;
329     static Atomic<uint32_t> pipelineId;
330     auto pipelineName = makeString("mediastream-",
331         (stream.hasCaptureVideoSource() || stream.hasCaptureAudioSource()) ? "local" : "remote",
332         "-", pipelineId.exchangeAdd(1));
333
334     loadFull(String("mediastream://") + stream.id(), pipelineName);
335     syncOnClock(false);
336
337 #if USE(GSTREAMER_GL)
338     ensureGLVideoSinkContext();
339 #endif
340     m_player->play();
341 #else
342     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
343     m_networkState = MediaPlayer::FormatError;
344     m_player->networkStateChanged();
345     notImplemented();
346 #endif
347 }
348 #endif
349
350 void MediaPlayerPrivateGStreamer::commitLoad()
351 {
352     ASSERT(!m_delayingLoad);
353     GST_DEBUG_OBJECT(pipeline(), "Committing load.");
354
355     // GStreamer needs to have the pipeline set to a paused state to
356     // start providing anything useful.
357     changePipelineState(GST_STATE_PAUSED);
358
359     setDownloadBuffering();
360     updateStates();
361 }
362
363 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
364 {
365     GST_TRACE_OBJECT(pipeline(), "isEndReached: %s, seeking: %s, seekTime: %s", boolForPrinting(m_isEndReached), boolForPrinting(m_seeking), m_seekTime.toString().utf8().data());
366     if (m_isEndReached && m_seeking)
367         return m_seekTime;
368
369     // This constant should remain lower than HTMLMediaElement's maxTimeupdateEventFrequency.
370     static const Seconds positionCacheThreshold = 200_ms;
371     Seconds now = WTF::WallTime::now().secondsSinceEpoch();
372     if (m_lastQueryTime && (now - m_lastQueryTime.value()) < positionCacheThreshold && m_cachedPosition.isValid()) {
373         GST_TRACE_OBJECT(pipeline(), "Returning cached position: %s", m_cachedPosition.toString().utf8().data());
374         return m_cachedPosition;
375     }
376
377     m_lastQueryTime = now;
378
379     // Position is only available if no async state change is going on and the state is either paused or playing.
380     gint64 position = GST_CLOCK_TIME_NONE;
381     GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
382     if (gst_element_query(m_pipeline.get(), query))
383         gst_query_parse_position(query, 0, &position);
384     gst_query_unref(query);
385
386     GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT ", canFallBackToLastFinishedSeekPosition: %s", GST_TIME_ARGS(position), boolForPrinting(m_canFallBackToLastFinishedSeekPosition));
387
388     MediaTime playbackPosition = MediaTime::zeroTime();
389     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
390     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
391         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
392     else if (m_canFallBackToLastFinishedSeekPosition)
393         playbackPosition = m_seekTime;
394
395     m_cachedPosition = playbackPosition;
396     return playbackPosition;
397 }
398
399 void MediaPlayerPrivateGStreamer::readyTimerFired()
400 {
401     GST_DEBUG_OBJECT(pipeline(), "In READY for too long. Releasing pipeline resources.");
402     changePipelineState(GST_STATE_NULL);
403 }
404
405 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
406 {
407     ASSERT(m_pipeline);
408
409     GstState currentState;
410     GstState pending;
411
412     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
413     if (currentState == newState || pending == newState) {
414         GST_DEBUG_OBJECT(pipeline(), "Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
415             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
416         return true;
417     }
418
419     GST_DEBUG_OBJECT(pipeline(), "Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
420         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
421
422 #if USE(GSTREAMER_GL)
423     if (currentState == GST_STATE_READY && newState == GST_STATE_PAUSED)
424         ensureGLVideoSinkContext();
425 #endif
426
427     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
428     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
429     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
430         return false;
431
432     // Create a timer when entering the READY state so that we can free resources
433     // if we stay for too long on READY.
434     // Also lets remove the timer if we request a state change for any state other than READY.
435     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
436     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
437         // Max interval in seconds to stay in the READY state on manual
438         // state change requests.
439         static const Seconds readyStateTimerDelay { 1_min };
440         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
441     } else if (newState != GST_STATE_READY)
442         m_readyTimerHandler.stop();
443
444     return true;
445 }
446
447 void MediaPlayerPrivateGStreamer::prepareToPlay()
448 {
449     GST_DEBUG_OBJECT(pipeline(), "Prepare to play");
450     m_preload = MediaPlayer::Auto;
451     if (m_delayingLoad) {
452         m_delayingLoad = false;
453         commitLoad();
454     }
455 }
456
457 void MediaPlayerPrivateGStreamer::play()
458 {
459     if (!m_playbackRate) {
460         m_playbackRatePause = true;
461         return;
462     }
463
464     if (changePipelineState(GST_STATE_PLAYING)) {
465         m_isEndReached = false;
466         m_delayingLoad = false;
467         m_preload = MediaPlayer::Auto;
468         setDownloadBuffering();
469         GST_INFO_OBJECT(pipeline(), "Play");
470     } else
471         loadingFailed(MediaPlayer::Empty);
472 }
473
474 void MediaPlayerPrivateGStreamer::pause()
475 {
476     m_playbackRatePause = false;
477     GstState currentState, pendingState;
478     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
479     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
480         return;
481
482     if (changePipelineState(GST_STATE_PAUSED))
483         GST_INFO_OBJECT(pipeline(), "Pause");
484     else
485         loadingFailed(MediaPlayer::Empty);
486 }
487
488 MediaTime MediaPlayerPrivateGStreamer::platformDuration() const
489 {
490     GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, pipeline state: %s", boolForPrinting(m_errorOccured), gst_element_state_get_name(GST_STATE(m_pipeline.get())));
491     if (m_errorOccured)
492         return MediaTime::invalidTime();
493
494     // The duration query would fail on a not-prerolled pipeline.
495     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
496         return MediaTime::invalidTime();
497
498     int64_t duration = 0;
499     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &duration) || !GST_CLOCK_TIME_IS_VALID(duration)) {
500         GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
501         return MediaTime::positiveInfiniteTime();
502     }
503
504     GST_LOG_OBJECT(pipeline(), "Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
505     return MediaTime(duration, GST_SECOND);
506 }
507
508 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
509 {
510     GST_TRACE_OBJECT(pipeline(), "Cached duration: %s", m_cachedDuration.toString().utf8().data());
511     if (m_cachedDuration.isValid())
512         return m_cachedDuration;
513
514     MediaTime duration = platformDuration();
515     if (!duration || duration.isInvalid())
516         return MediaTime::zeroTime();
517
518     m_cachedDuration = duration;
519
520     return m_cachedDuration;
521 }
522
523 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
524 {
525     if (!m_pipeline || m_errorOccured)
526         return MediaTime::invalidTime();
527
528     GST_TRACE_OBJECT(pipeline(), "seeking: %s, seekTime: %s", boolForPrinting(m_seeking), m_seekTime.toString().utf8().data());
529     if (m_seeking)
530         return m_seekTime;
531
532     return playbackPosition();
533 }
534
535 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
536 {
537     if (!m_pipeline)
538         return;
539
540     if (m_errorOccured)
541         return;
542
543     GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
544
545     // Avoid useless seeking.
546     if (mediaTime == currentMediaTime()) {
547         GST_DEBUG_OBJECT(pipeline(), "[Seek] seek to EOS position unhandled");
548         return;
549     }
550
551     MediaTime time = std::min(mediaTime, durationMediaTime());
552
553     if (isLiveStream()) {
554         GST_DEBUG_OBJECT(pipeline(), "[Seek] Live stream seek unhandled");
555         return;
556     }
557
558     GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data());
559
560     if (m_seeking) {
561         m_timeOfOverlappingSeek = time;
562         if (m_seekIsPending) {
563             m_seekTime = time;
564             return;
565         }
566     }
567
568     GstState state;
569     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
570     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
571         GST_DEBUG_OBJECT(pipeline(), "[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
572         return;
573     }
574     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
575         m_seekIsPending = true;
576         if (m_isEndReached) {
577             GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline");
578             m_resetPipeline = true;
579             if (!changePipelineState(GST_STATE_PAUSED))
580                 loadingFailed(MediaPlayer::Empty);
581         }
582     } else {
583         // We can seek now.
584         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
585             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(time).utf8().data());
586             return;
587         }
588     }
589
590     m_seeking = true;
591     m_seekTime = time;
592     m_isEndReached = false;
593 }
594
595 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
596 {
597     // Default values for rate >= 0.
598     MediaTime startTime = position, endTime = MediaTime::invalidTime();
599
600     if (rate < 0) {
601         startTime = MediaTime::zeroTime();
602         // If we are at beginning of media, start from the end to
603         // avoid immediate EOS.
604         if (position < MediaTime::zeroTime())
605             endTime = durationMediaTime();
606         else
607             endTime = position;
608     }
609
610     if (!rate)
611         rate = 1.0;
612
613     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
614         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
615 }
616
617 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
618 {
619     if (!m_changingRate)
620         return;
621
622     GST_INFO_OBJECT(pipeline(), "Set Rate to %f", m_playbackRate);
623
624     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
625     bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
626
627     GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio");
628
629     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
630         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
631         m_lastPlaybackRate = m_playbackRate;
632     } else {
633         m_playbackRate = m_lastPlaybackRate;
634         GST_ERROR("Set rate to %f failed", m_playbackRate);
635     }
636
637     if (m_playbackRatePause) {
638         GstState state;
639         GstState pending;
640
641         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
642         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
643             changePipelineState(GST_STATE_PLAYING);
644         m_playbackRatePause = false;
645     }
646
647     m_changingRate = false;
648     m_player->rateChanged();
649 }
650
651 bool MediaPlayerPrivateGStreamer::paused() const
652 {
653     if (m_isEndReached) {
654         GST_DEBUG_OBJECT(pipeline(), "Ignoring pause at EOS");
655         return true;
656     }
657
658     if (m_playbackRatePause) {
659         GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state");
660         return false;
661     }
662
663     GstState state;
664     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
665     bool paused = state <= GST_STATE_PAUSED;
666     GST_DEBUG_OBJECT(pipeline(), "Paused: %s", toString(paused).utf8().data());
667     return paused;
668 }
669
670 bool MediaPlayerPrivateGStreamer::seeking() const
671 {
672     return m_seeking;
673 }
674
675 #if GST_CHECK_VERSION(1, 10, 0)
676 #define CLEAR_TRACKS(tracks, method) \
677     for (auto& track : tracks.values())\
678         method(*track);\
679     tracks.clear();
680
681 void MediaPlayerPrivateGStreamer::clearTracks()
682 {
683 #if ENABLE(VIDEO_TRACK)
684     CLEAR_TRACKS(m_audioTracks, m_player->removeAudioTrack);
685     CLEAR_TRACKS(m_videoTracks, m_player->removeVideoTrack);
686     CLEAR_TRACKS(m_textTracks, m_player->removeTextTrack);
687 #endif // ENABLE(VIDEO_TRACK)
688 }
689 #undef CLEAR_TRACKS
690
691 #if ENABLE(VIDEO_TRACK)
692 #define CREATE_TRACK(type, Type) \
693     m_has##Type = true; \
694     if (!useMediaSource) {\
695         RefPtr<Type##TrackPrivateGStreamer> track = Type##TrackPrivateGStreamer::create(makeWeakPtr(*this), i, stream); \
696         m_##type##Tracks.add(track->id(), track); \
697         m_player->add##Type##Track(*track);\
698         if (gst_stream_get_stream_flags(stream.get()) & GST_STREAM_FLAG_SELECT) {                                    \
699             m_current##Type##StreamId = String(gst_stream_get_stream_id(stream.get()));                              \
700         }                                                                                                            \
701     }
702
703 FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
704 {
705 #if ENABLE(MEDIA_STREAM)
706     if (!m_isLegacyPlaybin && !m_currentVideoStreamId.isEmpty()) {
707         RefPtr<VideoTrackPrivateGStreamer> videoTrack = m_videoTracks.get(m_currentVideoStreamId);
708
709         if (videoTrack) {
710             auto tags = adoptGRef(gst_stream_get_tags(videoTrack->stream()));
711             gint width, height;
712
713             if (tags && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_WIDTH, &width) && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_HEIGHT, &height))
714                 return FloatSize(width, height);
715         }
716     }
717 #endif // ENABLE(MEDIA_STREAM)
718
719     return MediaPlayerPrivateGStreamerBase::naturalSize();
720 }
721 #else
722 #define CREATE_TRACK(type, _id, tracks, method, stream) m_has##Type## = true;
723 #endif // ENABLE(VIDEO_TRACK)
724
725 void MediaPlayerPrivateGStreamer::updateTracks()
726 {
727     ASSERT(!m_isLegacyPlaybin);
728
729     bool useMediaSource = isMediaSource();
730     unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
731
732     bool oldHasAudio = m_hasAudio;
733     bool oldHasVideo = m_hasVideo;
734     // New stream collections override previous ones.
735     clearTracks();
736     unsigned textTrackIndex = 0;
737     for (unsigned i = 0; i < length; i++) {
738         GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
739         String streamId(gst_stream_get_stream_id(stream.get()));
740         GstStreamType type = gst_stream_get_stream_type(stream.get());
741
742         GST_DEBUG_OBJECT(pipeline(), "Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
743         if (type & GST_STREAM_TYPE_AUDIO) {
744             CREATE_TRACK(audio, Audio)
745         } else if (type & GST_STREAM_TYPE_VIDEO) {
746             CREATE_TRACK(video, Video)
747         } else if (type & GST_STREAM_TYPE_TEXT && !useMediaSource) {
748 #if ENABLE(VIDEO_TRACK)
749             auto track = InbandTextTrackPrivateGStreamer::create(textTrackIndex++, stream);
750             m_textTracks.add(streamId, track.copyRef());
751             m_player->addTextTrack(track.get());
752 #endif
753         } else
754             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
755     }
756
757     if ((oldHasVideo != m_hasVideo) || (oldHasAudio != m_hasAudio))
758         m_player->characteristicChanged();
759
760     if (m_hasVideo)
761         m_player->sizeChanged();
762
763     m_player->client().mediaPlayerEngineUpdated(m_player);
764 }
765 #endif // GST_CHECK_VERSION(1, 10, 0)
766
767 void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
768 {
769     // FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531.
770     if (isMediaSource()) {
771         GST_FIXME_OBJECT(m_pipeline.get(), "Audio/Video/Text track switching is not yet supported by the MSE backend.");
772         return;
773     }
774
775     const char* propertyName;
776     const char* trackTypeAsString;
777     Vector<String> selectedStreams;
778     String selectedStreamId;
779
780 #if GST_CHECK_VERSION(1, 10, 0)
781     GstStream* stream = nullptr;
782
783     if (!m_isLegacyPlaybin) {
784         stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
785         if (!stream) {
786             GST_WARNING_OBJECT(pipeline(), "No stream to select at index %u", index);
787             return;
788         }
789         selectedStreamId = String::fromUTF8(gst_stream_get_stream_id(stream));
790         selectedStreams.append(selectedStreamId);
791     }
792 #endif // GST_CHECK_VERSION(1,0,0)
793
794     switch (trackType) {
795     case TrackPrivateBaseGStreamer::TrackType::Audio:
796         propertyName = "current-audio";
797         trackTypeAsString = "audio";
798         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentAudioStreamId) {
799             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
800             return;
801         }
802
803         if (!m_currentTextStreamId.isEmpty())
804             selectedStreams.append(m_currentTextStreamId);
805         if (!m_currentVideoStreamId.isEmpty())
806             selectedStreams.append(m_currentVideoStreamId);
807         break;
808     case TrackPrivateBaseGStreamer::TrackType::Video:
809         propertyName = "current-video";
810         trackTypeAsString = "video";
811         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentVideoStreamId) {
812             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
813             return;
814         }
815
816         if (!m_currentAudioStreamId.isEmpty())
817             selectedStreams.append(m_currentAudioStreamId);
818         if (!m_currentTextStreamId.isEmpty())
819             selectedStreams.append(m_currentTextStreamId);
820         break;
821     case TrackPrivateBaseGStreamer::TrackType::Text:
822         propertyName = "current-text";
823         trackTypeAsString = "text";
824         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentTextStreamId) {
825             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
826             return;
827         }
828
829         if (!m_currentAudioStreamId.isEmpty())
830             selectedStreams.append(m_currentAudioStreamId);
831         if (!m_currentVideoStreamId.isEmpty())
832             selectedStreams.append(m_currentVideoStreamId);
833         break;
834     case TrackPrivateBaseGStreamer::TrackType::Unknown:
835     default:
836         ASSERT_NOT_REACHED();
837     }
838
839     GST_INFO_OBJECT(pipeline(), "Enabling %s track with index: %u", trackTypeAsString, index);
840     if (m_isLegacyPlaybin)
841         g_object_set(m_pipeline.get(), propertyName, index, nullptr);
842 #if GST_CHECK_VERSION(1, 10, 0)
843     else {
844         GList* selectedStreamsList = nullptr;
845
846         for (const auto& streamId : selectedStreams)
847             selectedStreamsList = g_list_append(selectedStreamsList, g_strdup(streamId.utf8().data()));
848
849         // TODO: MSE GstStream API support: https://bugs.webkit.org/show_bug.cgi?id=182531
850         gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreamsList));
851         g_list_free_full(selectedStreamsList, reinterpret_cast<GDestroyNotify>(g_free));
852     }
853 #endif
854 }
855
856 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
857 {
858     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] {
859         player->notifyPlayerOfVideo();
860     });
861 }
862
863 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
864 {
865     if (UNLIKELY(!m_pipeline || !m_source))
866         return;
867
868     ASSERT(m_isLegacyPlaybin || isMediaSource());
869
870     gint numTracks = 0;
871     bool useMediaSource = isMediaSource();
872     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
873     g_object_get(element, "n-video", &numTracks, nullptr);
874
875     GST_INFO_OBJECT(pipeline(), "Media has %d video tracks", numTracks);
876
877     bool oldHasVideo = m_hasVideo;
878     m_hasVideo = numTracks > 0;
879     if (oldHasVideo != m_hasVideo)
880         m_player->characteristicChanged();
881
882     if (m_hasVideo)
883         m_player->sizeChanged();
884
885     if (useMediaSource) {
886         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
887         m_player->client().mediaPlayerEngineUpdated(m_player);
888         return;
889     }
890
891 #if ENABLE(VIDEO_TRACK)
892     Vector<String> validVideoStreams;
893     for (gint i = 0; i < numTracks; ++i) {
894         GRefPtr<GstPad> pad;
895         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
896         ASSERT(pad);
897
898         String streamId = "V" + String::number(i);
899         validVideoStreams.append(streamId);
900         if (i < static_cast<gint>(m_videoTracks.size())) {
901             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
902             if (existingTrack) {
903                 existingTrack->setIndex(i);
904                 if (existingTrack->pad() == pad)
905                     continue;
906             }
907         }
908
909         auto track = VideoTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
910         ASSERT(streamId == track->id());
911         m_videoTracks.add(streamId, track.copyRef());
912         m_player->addVideoTrack(track.get());
913     }
914
915     purgeInvalidVideoTracks(validVideoStreams);
916 #endif
917
918     m_player->client().mediaPlayerEngineUpdated(m_player);
919 }
920
921 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
922 {
923     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
924         player->notifyPlayerOfVideoCaps();
925     });
926 }
927
928 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
929 {
930     m_videoSize = IntSize();
931     m_player->client().mediaPlayerEngineUpdated(m_player);
932 }
933
934 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
935 {
936     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] {
937         player->notifyPlayerOfAudio();
938     });
939 }
940
941 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
942 {
943     if (UNLIKELY(!m_pipeline || !m_source))
944         return;
945
946     ASSERT(m_isLegacyPlaybin || isMediaSource());
947
948     gint numTracks = 0;
949     bool useMediaSource = isMediaSource();
950     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
951     g_object_get(element, "n-audio", &numTracks, nullptr);
952
953     GST_INFO_OBJECT(pipeline(), "Media has %d audio tracks", numTracks);
954     bool oldHasAudio = m_hasAudio;
955     m_hasAudio = numTracks > 0;
956     if (oldHasAudio != m_hasAudio)
957         m_player->characteristicChanged();
958
959     if (useMediaSource) {
960         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
961         m_player->client().mediaPlayerEngineUpdated(m_player);
962         return;
963     }
964
965 #if ENABLE(VIDEO_TRACK)
966     Vector<String> validAudioStreams;
967     for (gint i = 0; i < numTracks; ++i) {
968         GRefPtr<GstPad> pad;
969         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
970         ASSERT(pad);
971
972         String streamId = "A" + String::number(i);
973         validAudioStreams.append(streamId);
974         if (i < static_cast<gint>(m_audioTracks.size())) {
975             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
976             if (existingTrack) {
977                 existingTrack->setIndex(i);
978                 if (existingTrack->pad() == pad)
979                     continue;
980             }
981         }
982
983         auto track = AudioTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
984         ASSERT(streamId == track->id());
985         m_audioTracks.add(streamId, track);
986         m_player->addAudioTrack(*track);
987     }
988
989     purgeInvalidAudioTracks(validAudioStreams);
990 #endif
991
992     m_player->client().mediaPlayerEngineUpdated(m_player);
993 }
994
995 #if ENABLE(VIDEO_TRACK)
996 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
997 {
998     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] {
999         player->notifyPlayerOfText();
1000     });
1001 }
1002
1003 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
1004 {
1005     if (UNLIKELY(!m_pipeline || !m_source))
1006         return;
1007
1008     ASSERT(m_isLegacyPlaybin || isMediaSource());
1009
1010     gint numTracks = 0;
1011     bool useMediaSource = isMediaSource();
1012     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1013     g_object_get(element, "n-text", &numTracks, nullptr);
1014
1015     GST_INFO_OBJECT(pipeline(), "Media has %d text tracks", numTracks);
1016
1017     if (useMediaSource) {
1018         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1019         return;
1020     }
1021
1022     Vector<String> validTextStreams;
1023     for (gint i = 0; i < numTracks; ++i) {
1024         GRefPtr<GstPad> pad;
1025         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
1026         ASSERT(pad);
1027
1028         // We can't assume the pad has a sticky event here like implemented in
1029         // InbandTextTrackPrivateGStreamer because it might be emitted after the
1030         // track was created. So fallback to a dummy stream ID like in the Audio
1031         // and Video tracks.
1032         String streamId = "T" + String::number(i);
1033
1034         validTextStreams.append(streamId);
1035         if (i < static_cast<gint>(m_textTracks.size())) {
1036             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
1037             if (existingTrack) {
1038                 existingTrack->setIndex(i);
1039                 if (existingTrack->pad() == pad)
1040                     continue;
1041             }
1042         }
1043
1044         auto track = InbandTextTrackPrivateGStreamer::create(i, pad);
1045         m_textTracks.add(streamId, track.copyRef());
1046         m_player->addTextTrack(track.get());
1047     }
1048
1049     purgeInvalidTextTracks(validTextStreams);
1050 }
1051
1052 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
1053 {
1054     player->newTextSample();
1055     return GST_FLOW_OK;
1056 }
1057
1058 void MediaPlayerPrivateGStreamer::newTextSample()
1059 {
1060     if (!m_textAppSink)
1061         return;
1062
1063     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
1064         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
1065
1066     GRefPtr<GstSample> sample;
1067     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
1068     ASSERT(sample);
1069
1070     if (streamStartEvent) {
1071         bool found = FALSE;
1072         const gchar* id;
1073         gst_event_parse_stream_start(streamStartEvent.get(), &id);
1074         for (auto& track : m_textTracks.values()) {
1075             if (!strcmp(track->streamId().utf8().data(), id)) {
1076                 track->handleSample(sample);
1077                 found = true;
1078                 break;
1079             }
1080         }
1081         if (!found)
1082             GST_WARNING("Got sample with unknown stream ID %s.", id);
1083     } else
1084         GST_WARNING("Unable to handle sample with no stream start event.");
1085 }
1086 #endif
1087
1088 void MediaPlayerPrivateGStreamer::setRate(float rate)
1089 {
1090     // Higher rate causes crash.
1091     rate = clampTo(rate, -20.0, 20.0);
1092
1093     // Avoid useless playback rate update.
1094     if (m_playbackRate == rate) {
1095         // and make sure that upper layers were notified if rate was set
1096
1097         if (!m_changingRate && m_player->rate() != m_playbackRate)
1098             m_player->rateChanged();
1099         return;
1100     }
1101
1102     if (isLiveStream()) {
1103         // notify upper layers that we cannot handle passed rate.
1104         m_changingRate = false;
1105         m_player->rateChanged();
1106         return;
1107     }
1108
1109     GstState state;
1110     GstState pending;
1111
1112     m_playbackRate = rate;
1113     m_changingRate = true;
1114
1115     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
1116
1117     if (!rate) {
1118         m_changingRate = false;
1119         m_playbackRatePause = true;
1120         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
1121             changePipelineState(GST_STATE_PAUSED);
1122         return;
1123     }
1124
1125     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
1126         || (pending == GST_STATE_PAUSED))
1127         return;
1128
1129     updatePlaybackRate();
1130 }
1131
1132 double MediaPlayerPrivateGStreamer::rate() const
1133 {
1134     return m_playbackRate;
1135 }
1136
1137 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
1138 {
1139     m_preservesPitch = preservesPitch;
1140 }
1141
1142 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
1143 {
1144     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1145     if (m_errorOccured || isLiveStream())
1146         return timeRanges;
1147
1148     MediaTime mediaDuration = durationMediaTime();
1149     if (!mediaDuration || mediaDuration.isPositiveInfinite())
1150         return timeRanges;
1151
1152     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1153
1154     if (!gst_element_query(m_pipeline.get(), query)) {
1155         gst_query_unref(query);
1156         return timeRanges;
1157     }
1158
1159     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
1160     for (guint index = 0; index < numBufferingRanges; index++) {
1161         gint64 rangeStart = 0, rangeStop = 0;
1162         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop)) {
1163             uint64_t startTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStart, GST_FORMAT_PERCENT_MAX);
1164             uint64_t stopTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStop, GST_FORMAT_PERCENT_MAX);
1165             timeRanges->add(MediaTime(startTime, GST_SECOND), MediaTime(stopTime, GST_SECOND));
1166         }
1167     }
1168
1169     // Fallback to the more general maxTimeLoaded() if no range has
1170     // been found.
1171     if (!timeRanges->length()) {
1172         MediaTime loaded = maxTimeLoaded();
1173         if (loaded.isValid() && loaded)
1174             timeRanges->add(MediaTime::zeroTime(), loaded);
1175     }
1176
1177     gst_query_unref(query);
1178
1179     return timeRanges;
1180 }
1181
1182 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
1183 {
1184     GUniqueOutPtr<GError> err;
1185     GUniqueOutPtr<gchar> debug;
1186     MediaPlayer::NetworkState error;
1187     bool issueError = true;
1188     bool attemptNextLocation = false;
1189     const GstStructure* structure = gst_message_get_structure(message);
1190     GstState requestedState, currentState;
1191
1192     m_canFallBackToLastFinishedSeekPosition = false;
1193
1194     if (structure) {
1195         const gchar* messageTypeName = gst_structure_get_name(structure);
1196
1197         // Redirect messages are sent from elements, like qtdemux, to
1198         // notify of the new location(s) of the media.
1199         if (!g_strcmp0(messageTypeName, "redirect")) {
1200             mediaLocationChanged(message);
1201             return;
1202         }
1203     }
1204
1205     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
1206     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
1207
1208     GST_LOG_OBJECT(pipeline(), "Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
1209     switch (GST_MESSAGE_TYPE(message)) {
1210     case GST_MESSAGE_ERROR:
1211         if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured)
1212             break;
1213         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
1214         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
1215
1216         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
1217
1218         error = MediaPlayer::Empty;
1219         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
1220             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
1221             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
1222             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
1223             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
1224             error = MediaPlayer::FormatError;
1225         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
1226             // Let the mediaPlayerClient handle the stream error, in
1227             // this case the HTMLMediaElement will emit a stalled
1228             // event.
1229             GST_ERROR("Decode error, let the Media element emit a stalled event.");
1230             m_loadingStalled = true;
1231             break;
1232         } else if (err->domain == GST_STREAM_ERROR) {
1233             error = MediaPlayer::DecodeError;
1234             attemptNextLocation = true;
1235         } else if (err->domain == GST_RESOURCE_ERROR)
1236             error = MediaPlayer::NetworkError;
1237
1238         if (attemptNextLocation)
1239             issueError = !loadNextLocation();
1240         if (issueError) {
1241             m_errorOccured = true;
1242             if (m_networkState != error) {
1243                 m_networkState = error;
1244                 m_player->networkStateChanged();
1245             }
1246         }
1247         break;
1248     case GST_MESSAGE_EOS:
1249         didEnd();
1250         break;
1251     case GST_MESSAGE_ASYNC_DONE:
1252         if (!messageSourceIsPlaybin || m_delayingLoad)
1253             break;
1254         asyncStateChangeDone();
1255         break;
1256     case GST_MESSAGE_STATE_CHANGED: {
1257         if (!messageSourceIsPlaybin || m_delayingLoad)
1258             break;
1259         updateStates();
1260
1261         // Construct a filename for the graphviz dot file output.
1262         GstState newState;
1263         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
1264         CString dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), '.',
1265             gst_element_state_get_name(currentState), '_', gst_element_state_get_name(newState)).utf8();
1266         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
1267
1268         break;
1269     }
1270     case GST_MESSAGE_BUFFERING:
1271         processBufferingStats(message);
1272         break;
1273     case GST_MESSAGE_DURATION_CHANGED:
1274         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
1275         if (messageSourceIsPlaybin && !isMediaSource())
1276             durationChanged();
1277         break;
1278     case GST_MESSAGE_REQUEST_STATE:
1279         gst_message_parse_request_state(message, &requestedState);
1280         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1281         if (requestedState < currentState) {
1282             GST_INFO_OBJECT(pipeline(), "Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
1283                 gst_element_state_get_name(requestedState));
1284             m_requestedState = requestedState;
1285             if (!changePipelineState(requestedState))
1286                 loadingFailed(MediaPlayer::Empty);
1287         }
1288         break;
1289     case GST_MESSAGE_CLOCK_LOST:
1290         // This can only happen in PLAYING state and we should just
1291         // get a new clock by moving back to PAUSED and then to
1292         // PLAYING again.
1293         // This can happen if the stream that ends in a sink that
1294         // provides the current clock disappears, for example if
1295         // the audio sink provides the clock and the audio stream
1296         // is disabled. It also happens relatively often with
1297         // HTTP adaptive streams when switching between different
1298         // variants of a stream.
1299         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1300         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1301         break;
1302     case GST_MESSAGE_LATENCY:
1303         // Recalculate the latency, we don't need any special handling
1304         // here other than the GStreamer default.
1305         // This can happen if the latency of live elements changes, or
1306         // for one reason or another a new live element is added or
1307         // removed from the pipeline.
1308         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1309         break;
1310     case GST_MESSAGE_ELEMENT:
1311         if (gst_is_missing_plugin_message(message)) {
1312             if (gst_install_plugins_supported()) {
1313                 auto missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = makeWeakPtr(*this)](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
1314                     if (!weakThis) {
1315                         GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
1316                         return;
1317                     }
1318
1319                     GST_DEBUG("got missing plugin installation callback with result %u", result);
1320                     RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
1321                     weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
1322                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1323                         return;
1324
1325                     weakThis->changePipelineState(GST_STATE_READY);
1326                     weakThis->changePipelineState(GST_STATE_PAUSED);
1327                 });
1328                 m_missingPluginCallbacks.append(missingPluginCallback.copyRef());
1329                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1330                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1331                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), missingPluginCallback.get());
1332             }
1333         }
1334 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1335         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
1336             processMpegTsSection(section);
1337             gst_mpegts_section_unref(section);
1338         }
1339 #endif
1340 #if ENABLE(ENCRYPTED_MEDIA)
1341         else if (gst_structure_has_name(structure, "drm-waiting-for-key")) {
1342             GST_DEBUG_OBJECT(pipeline(), "drm-waiting-for-key message from %s", GST_MESSAGE_SRC_NAME(message));
1343             setWaitingForKey(true);
1344             // FIXME: The decryptors should be able to attempt to decrypt after being created and linked in a pipeline but currently they are not and current
1345             // architecture does not make this very easy. Fortunately, the arch will change soon and it does not pay off to fix this now with something that could be
1346             // more convoluted. In the meantime, force attempt to decrypt when they get blocked.
1347             attemptToDecryptWithLocalInstance();
1348         } else if (gst_structure_has_name(structure, "drm-key-received")) {
1349             GST_DEBUG_OBJECT(pipeline(), "drm-key-received message from %s", GST_MESSAGE_SRC_NAME(message));
1350             setWaitingForKey(false);
1351         }
1352 #endif
1353         else if (gst_structure_has_name(structure, "http-headers")) {
1354             GST_DEBUG_OBJECT(pipeline(), "Processing HTTP headers: %" GST_PTR_FORMAT, structure);
1355             if (const char* uri = gst_structure_get_string(structure, "uri")) {
1356                 URL url(URL(), uri);
1357                 convertToInternalProtocol(url);
1358                 m_origins.add(SecurityOrigin::create(url));
1359
1360                 if (url != m_url) {
1361                     GST_DEBUG_OBJECT(pipeline(), "Ignoring HTTP response headers for non-main URI.");
1362                     break;
1363                 }
1364             }
1365             GUniqueOutPtr<GstStructure> responseHeaders;
1366             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders.outPtr(), nullptr)) {
1367                 const char* contentLengthHeaderName = httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data();
1368                 uint64_t contentLength = 0;
1369                 if (!gst_structure_get_uint64(responseHeaders.get(), contentLengthHeaderName, &contentLength)) {
1370                     // souphttpsrc sets a string for Content-Length, so
1371                     // handle it here, until we remove the webkit+ protocol
1372                     // prefix from webkitwebsrc.
1373                     if (const char* contentLengthAsString = gst_structure_get_string(responseHeaders.get(), contentLengthHeaderName)) {
1374                         contentLength = g_ascii_strtoull(contentLengthAsString, nullptr, 10);
1375                         if (contentLength == G_MAXUINT64)
1376                             contentLength = 0;
1377                     }
1378                 }
1379                 GST_INFO_OBJECT(pipeline(), "%s stream detected", !contentLength ? "Live" : "Non-live");
1380                 if (!contentLength) {
1381                     m_isStreaming = true;
1382                     setDownloadBuffering();
1383                 }
1384             }
1385         } else if (gst_structure_has_name(structure, "webkit-network-statistics")) {
1386             if (gst_structure_get(structure, "read-position", G_TYPE_UINT64, &m_networkReadPosition, "size", G_TYPE_UINT64, &m_httpResponseTotalSize, nullptr))
1387                 GST_DEBUG_OBJECT(pipeline(), "Updated network read position %" G_GUINT64_FORMAT ", size: %" G_GUINT64_FORMAT, m_networkReadPosition, m_httpResponseTotalSize);
1388         } else if (gst_structure_has_name(structure, "adaptive-streaming-statistics")) {
1389             if (WEBKIT_IS_WEB_SRC(m_source.get()) && !webkitGstCheckVersion(1, 12, 0)) {
1390                 if (const char* uri = gst_structure_get_string(structure, "uri"))
1391                     m_hasTaintedOrigin = webKitSrcWouldTaintOrigin(WEBKIT_WEB_SRC(m_source.get()), SecurityOrigin::create(URL(URL(), uri)));
1392             }
1393         } else
1394             GST_DEBUG_OBJECT(pipeline(), "Unhandled element message: %" GST_PTR_FORMAT, structure);
1395         break;
1396 #if ENABLE(VIDEO_TRACK)
1397     case GST_MESSAGE_TOC:
1398         processTableOfContents(message);
1399         break;
1400 #endif
1401     case GST_MESSAGE_TAG: {
1402         GstTagList* tags = nullptr;
1403         GUniqueOutPtr<gchar> tag;
1404         gst_message_parse_tag(message, &tags);
1405         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1406             if (!g_strcmp0(tag.get(), "rotate-90"))
1407                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1408             else if (!g_strcmp0(tag.get(), "rotate-180"))
1409                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1410             else if (!g_strcmp0(tag.get(), "rotate-270"))
1411                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1412         }
1413         gst_tag_list_unref(tags);
1414         break;
1415     }
1416 #if GST_CHECK_VERSION(1, 10, 0)
1417     case GST_MESSAGE_STREAMS_SELECTED: {
1418         GRefPtr<GstStreamCollection> collection;
1419         gst_message_parse_streams_selected(message, &collection.outPtr());
1420
1421         if (!collection)
1422             break;
1423
1424         m_streamCollection.swap(collection);
1425         m_currentAudioStreamId = "";
1426         m_currentVideoStreamId = "";
1427         m_currentTextStreamId = "";
1428
1429         unsigned length = gst_message_streams_selected_get_size(message);
1430         for (unsigned i = 0; i < length; i++) {
1431             GRefPtr<GstStream> stream = gst_message_streams_selected_get_stream(message, i);
1432             if (!stream)
1433                 continue;
1434
1435             GstStreamType type = gst_stream_get_stream_type(stream.get());
1436             String streamId(gst_stream_get_stream_id(stream.get()));
1437
1438             GST_DEBUG_OBJECT(pipeline(), "Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
1439             // Playbin3 can send more than one selected stream of the same type
1440             // but there's no priority or ordering system in place, so we assume
1441             // the selected stream is the last one as reported by playbin3.
1442             if (type & GST_STREAM_TYPE_AUDIO) {
1443                 m_currentAudioStreamId = streamId;
1444                 auto track = m_audioTracks.get(m_currentAudioStreamId);
1445                 ASSERT(track);
1446                 track->markAsActive();
1447             } else if (type & GST_STREAM_TYPE_VIDEO) {
1448                 m_currentVideoStreamId = streamId;
1449                 auto track = m_videoTracks.get(m_currentVideoStreamId);
1450                 ASSERT(track);
1451                 track->markAsActive();
1452             } else if (type & GST_STREAM_TYPE_TEXT)
1453                 m_currentTextStreamId = streamId;
1454             else
1455                 GST_WARNING("Unknown stream type with stream-id %s", streamId.utf8().data());
1456         }
1457         break;
1458     }
1459 #endif
1460     default:
1461         GST_DEBUG_OBJECT(pipeline(), "Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
1462         break;
1463     }
1464 }
1465
1466 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1467 {
1468     m_buffering = true;
1469     gst_message_parse_buffering(message, &m_bufferingPercentage);
1470
1471     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1472
1473     if (m_bufferingPercentage == 100)
1474         updateStates();
1475 }
1476
1477 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1478 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1479 {
1480     ASSERT(section);
1481
1482     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1483         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1484         m_metadataTracks.clear();
1485         for (guint i = 0; i < pmt->streams->len; ++i) {
1486             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1487             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1488                 AtomString pid = String::number(stream->pid);
1489                 auto track = InbandMetadataTextTrackPrivateGStreamer::create(
1490                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1491
1492                 // 4.7.10.12.2 Sourcing in-band text tracks
1493                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1494                 // type as follows, based on the type of the media resource:
1495                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1496                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1497                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1498                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1499                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1500                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1501                 // expressed in hexadecimal using uppercase ASCII hex digits.
1502                 String inbandMetadataTrackDispatchType;
1503                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1504                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1505                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1506                     for (guint k = 0; k < descriptor->length; ++k)
1507                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1508                 }
1509                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1510
1511                 m_metadataTracks.add(pid, track);
1512                 m_player->addTextTrack(*track);
1513             }
1514         }
1515     } else {
1516         AtomString pid = String::number(section->pid);
1517         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1518         if (!track)
1519             return;
1520
1521         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1522         gsize size;
1523         const void* bytes = g_bytes_get_data(data.get(), &size);
1524
1525         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1526     }
1527 }
1528 #endif
1529
1530 #if ENABLE(VIDEO_TRACK)
1531 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1532 {
1533     if (m_chaptersTrack)
1534         m_player->removeTextTrack(*m_chaptersTrack);
1535
1536     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1537     m_player->addTextTrack(*m_chaptersTrack);
1538
1539     GRefPtr<GstToc> toc;
1540     gboolean updated;
1541     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1542     ASSERT(toc);
1543
1544     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1545         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1546 }
1547
1548 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1549 {
1550     ASSERT(entry);
1551
1552     auto cue = GenericCueData::create();
1553
1554     gint64 start = -1, stop = -1;
1555     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1556     if (start != -1)
1557         cue->setStartTime(MediaTime(start, GST_SECOND));
1558     if (stop != -1)
1559         cue->setEndTime(MediaTime(stop, GST_SECOND));
1560
1561     GstTagList* tags = gst_toc_entry_get_tags(entry);
1562     if (tags) {
1563         gchar* title = nullptr;
1564         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1565         if (title) {
1566             cue->setContent(title);
1567             g_free(title);
1568         }
1569     }
1570
1571     m_chaptersTrack->addGenericCue(cue);
1572
1573     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1574         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1575 }
1576
1577 void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
1578 {
1579     m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
1580         return !validTrackIds.contains(keyAndValue.key);
1581     });
1582 }
1583
1584 void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
1585 {
1586     m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
1587         return !validTrackIds.contains(keyAndValue.key);
1588     });
1589 }
1590
1591 void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
1592 {
1593     m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
1594         return !validTrackIds.contains(keyAndValue.key);
1595     });
1596 }
1597 #endif
1598
1599 void MediaPlayerPrivateGStreamer::fillTimerFired()
1600 {
1601     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_buffering(GST_FORMAT_PERCENT));
1602     double fillStatus = 100.0;
1603
1604     if (gst_element_query(m_pipeline.get(), query.get())) {
1605         int64_t stop;
1606         GstFormat format;
1607         gst_query_parse_buffering_range(query.get(), &format, nullptr, &stop, nullptr);
1608         ASSERT(format == GST_FORMAT_PERCENT);
1609
1610         if (stop != -1)
1611             fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1612     } else if (m_httpResponseTotalSize) {
1613         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Query failed, falling back to network read position estimation");
1614         fillStatus = 100.0 * (m_networkReadPosition / m_httpResponseTotalSize);
1615     } else {
1616         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Unable to determine on-disk buffering status");
1617         return;
1618     }
1619
1620     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Download buffer filled up to %f%%", fillStatus);
1621
1622     MediaTime mediaDuration = durationMediaTime();
1623
1624     // Update maxTimeLoaded only if the media duration is
1625     // available. Otherwise we can't compute it.
1626     if (mediaDuration) {
1627         if (fillStatus == 100.0)
1628             m_maxTimeLoaded = mediaDuration;
1629         else
1630             m_maxTimeLoaded = MediaTime(fillStatus * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
1631         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
1632     }
1633
1634     m_downloadFinished = fillStatus == 100.0;
1635     if (!m_downloadFinished) {
1636         updateStates();
1637         return;
1638     }
1639
1640     // Media is now fully loaded. It will play even if network
1641     // connection is cut. Buffering is done, remove the fill source
1642     // from the main loop.
1643     m_fillTimer.stop();
1644     updateStates();
1645 }
1646
1647 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
1648 {
1649     GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, isLiveStream: %s", boolForPrinting(m_errorOccured), boolForPrinting(isLiveStream()));
1650     if (m_errorOccured)
1651         return MediaTime::zeroTime();
1652
1653     if (isLiveStream())
1654         return MediaTime::zeroTime();
1655
1656     MediaTime duration = durationMediaTime();
1657     GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
1658     // infinite duration means live stream
1659     if (duration.isPositiveInfinite())
1660         return MediaTime::zeroTime();
1661
1662     return duration;
1663 }
1664
1665 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1666 {
1667     if (m_errorOccured)
1668         return MediaTime::zeroTime();
1669
1670     MediaTime loaded = m_maxTimeLoaded;
1671     if (m_isEndReached)
1672         loaded = durationMediaTime();
1673     GST_LOG("maxTimeLoaded: %s", toString(loaded).utf8().data());
1674     return loaded;
1675 }
1676
1677 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1678 {
1679     if (m_errorOccured || m_loadingStalled)
1680         return false;
1681
1682     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1683         GST_LOG_OBJECT(pipeline(), "Last network read position: %" G_GUINT64_FORMAT ", current: %" G_GUINT64_FORMAT, m_readPositionAtLastDidLoadingProgress, m_networkReadPosition);
1684         bool didLoadingProgress = m_readPositionAtLastDidLoadingProgress != m_networkReadPosition;
1685         m_readPositionAtLastDidLoadingProgress = m_networkReadPosition;
1686         return didLoadingProgress;
1687     }
1688
1689     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1690         return false;
1691
1692     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1693     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1694     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1695     GST_LOG_OBJECT(pipeline(), "didLoadingProgress: %s", boolForPrinting(didLoadingProgress));
1696     return didLoadingProgress;
1697 }
1698
1699 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1700 {
1701     if (m_errorOccured)
1702         return 0;
1703
1704     if (m_totalBytes)
1705         return m_totalBytes;
1706
1707     if (!m_source)
1708         return 0;
1709
1710     if (isLiveStream())
1711         return 0;
1712
1713     GstFormat fmt = GST_FORMAT_BYTES;
1714     gint64 length = 0;
1715     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1716         GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1717         m_totalBytes = static_cast<unsigned long long>(length);
1718         m_isStreaming = !length;
1719         return m_totalBytes;
1720     }
1721
1722     // Fall back to querying the source pads manually.
1723     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1724     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1725     bool done = false;
1726     while (!done) {
1727         GValue item = G_VALUE_INIT;
1728         switch (gst_iterator_next(iter, &item)) {
1729         case GST_ITERATOR_OK: {
1730             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1731             gint64 padLength = 0;
1732             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1733                 length = padLength;
1734             break;
1735         }
1736         case GST_ITERATOR_RESYNC:
1737             gst_iterator_resync(iter);
1738             break;
1739         case GST_ITERATOR_ERROR:
1740             FALLTHROUGH;
1741         case GST_ITERATOR_DONE:
1742             done = true;
1743             break;
1744         }
1745
1746         g_value_unset(&item);
1747     }
1748
1749     gst_iterator_free(iter);
1750
1751     GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1752     m_totalBytes = static_cast<unsigned long long>(length);
1753     m_isStreaming = !length;
1754     return m_totalBytes;
1755 }
1756
1757 void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1758 {
1759     player->sourceSetup(sourceElement);
1760 }
1761
1762 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1763 {
1764     if (g_strcmp0(G_OBJECT_TYPE_NAME(element), "GstDownloadBuffer"))
1765         return;
1766
1767     player->m_downloadBuffer = element;
1768     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1769     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1770
1771     GUniqueOutPtr<char> oldDownloadTemplate;
1772     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1773
1774     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1775     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1776     GST_DEBUG_OBJECT(player->pipeline(), "Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1777
1778     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1779 }
1780
1781 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1782 {
1783     ASSERT(player->m_downloadBuffer);
1784
1785     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1786
1787     GUniqueOutPtr<char> downloadFile;
1788     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1789     player->m_downloadBuffer = nullptr;
1790
1791     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
1792         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1793         return;
1794     }
1795
1796     GST_DEBUG_OBJECT(player->pipeline(), "Unlinked media temporary file %s after creation", downloadFile.get());
1797 }
1798
1799 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1800 {
1801     if (!downloadFileTemplate)
1802         return;
1803
1804     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1805     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1806     String templatePattern = String(templateFile.get()).replace("X", "?");
1807
1808     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
1809         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
1810             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1811             continue;
1812         }
1813
1814         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1815     }
1816 }
1817
1818 void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1819 {
1820     GST_DEBUG_OBJECT(pipeline(), "Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1821
1822     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1823         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1824
1825     m_source = sourceElement;
1826
1827     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1828         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1829         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1830 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
1831     } else if (WEBKIT_IS_MEDIA_STREAM_SRC(sourceElement)) {
1832         auto stream = m_streamPrivate.get();
1833         ASSERT(stream);
1834         webkitMediaStreamSrcSetStream(WEBKIT_MEDIA_STREAM_SRC(sourceElement), stream);
1835 #endif
1836     }
1837 }
1838
1839 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1840 {
1841     if (!m_source)
1842         return false;
1843
1844     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1845         return true;
1846
1847     GUniqueOutPtr<char> originalURI, resolvedURI;
1848     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1849     if (!originalURI || !resolvedURI)
1850         return false;
1851     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1852         return true;
1853
1854     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1855     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1856     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1857 }
1858
1859 void MediaPlayerPrivateGStreamer::cancelLoad()
1860 {
1861     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1862         return;
1863
1864     if (m_pipeline)
1865         changePipelineState(GST_STATE_READY);
1866 }
1867
1868 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1869 {
1870     if (!m_pipeline || m_errorOccured)
1871         return;
1872
1873     if (m_seeking) {
1874         if (m_seekIsPending)
1875             updateStates();
1876         else {
1877             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data());
1878             m_seeking = false;
1879             m_cachedPosition = MediaTime::invalidTime();
1880             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
1881                 seek(m_timeOfOverlappingSeek);
1882                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
1883                 return;
1884             }
1885             m_timeOfOverlappingSeek = MediaTime::invalidTime();
1886
1887             // The pipeline can still have a pending state. In this case a position query will fail.
1888             // Right now we can use m_seekTime as a fallback.
1889             m_canFallBackToLastFinishedSeekPosition = true;
1890             timeChanged();
1891         }
1892     } else
1893         updateStates();
1894 }
1895
1896 void MediaPlayerPrivateGStreamer::updateStates()
1897 {
1898     if (!m_pipeline)
1899         return;
1900
1901     if (m_errorOccured)
1902         return;
1903
1904     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1905     MediaPlayer::ReadyState oldReadyState = m_readyState;
1906     GstState pending;
1907     GstState state;
1908     bool stateReallyChanged = false;
1909
1910     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1911     if (state != m_currentState) {
1912         m_oldState = m_currentState;
1913         m_currentState = state;
1914         stateReallyChanged = true;
1915     }
1916
1917     bool shouldUpdatePlaybackState = false;
1918     switch (getStateResult) {
1919     case GST_STATE_CHANGE_SUCCESS: {
1920         GST_DEBUG_OBJECT(pipeline(), "State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1921
1922         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1923         // on HTMLMediaElement and properly generate the video 'ended' event.
1924         if (m_isEndReached && m_currentState == GST_STATE_READY)
1925             break;
1926
1927         m_resetPipeline = m_currentState <= GST_STATE_READY;
1928
1929         bool didBuffering = m_buffering;
1930
1931         // Update ready and network states.
1932         switch (m_currentState) {
1933         case GST_STATE_NULL:
1934             m_readyState = MediaPlayer::HaveNothing;
1935             m_networkState = MediaPlayer::Empty;
1936             break;
1937         case GST_STATE_READY:
1938             m_readyState = MediaPlayer::HaveMetadata;
1939             m_networkState = MediaPlayer::Empty;
1940             break;
1941         case GST_STATE_PAUSED:
1942         case GST_STATE_PLAYING:
1943             if (m_buffering) {
1944                 if (m_bufferingPercentage == 100) {
1945                     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Complete.");
1946                     m_buffering = false;
1947                     m_readyState = MediaPlayer::HaveEnoughData;
1948                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1949                 } else {
1950                     m_readyState = MediaPlayer::HaveCurrentData;
1951                     m_networkState = MediaPlayer::Loading;
1952                 }
1953             } else if (m_downloadFinished) {
1954                 m_readyState = MediaPlayer::HaveEnoughData;
1955                 m_networkState = MediaPlayer::Loaded;
1956             } else {
1957                 m_readyState = MediaPlayer::HaveFutureData;
1958                 m_networkState = MediaPlayer::Loading;
1959             }
1960
1961             break;
1962         default:
1963             ASSERT_NOT_REACHED();
1964             break;
1965         }
1966
1967         // Sync states where needed.
1968         if (m_currentState == GST_STATE_PAUSED) {
1969             if (!m_volumeAndMuteInitialized) {
1970                 notifyPlayerOfVolumeChange();
1971                 notifyPlayerOfMute();
1972                 m_volumeAndMuteInitialized = true;
1973             }
1974
1975             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1976                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Restarting playback.");
1977                 changePipelineState(GST_STATE_PLAYING);
1978             }
1979         } else if (m_currentState == GST_STATE_PLAYING) {
1980             m_paused = false;
1981
1982             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1983                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Pausing stream for buffering.");
1984                 changePipelineState(GST_STATE_PAUSED);
1985             }
1986         } else
1987             m_paused = true;
1988
1989         GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
1990         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
1991             shouldUpdatePlaybackState = true;
1992             GST_INFO_OBJECT(pipeline(), "Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
1993         }
1994
1995         // Emit play state change notification only when going to PLAYING so that
1996         // the media element gets a chance to enable its page sleep disabler.
1997         // Emitting this notification in more cases triggers unwanted code paths
1998         // and test timeouts.
1999         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
2000             GST_INFO_OBJECT(pipeline(), "Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
2001             shouldUpdatePlaybackState = true;
2002         }
2003
2004         break;
2005     }
2006     case GST_STATE_CHANGE_ASYNC:
2007         GST_DEBUG_OBJECT(pipeline(), "Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2008         // Change in progress.
2009         break;
2010     case GST_STATE_CHANGE_FAILURE:
2011         GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2012         // Change failed
2013         return;
2014     case GST_STATE_CHANGE_NO_PREROLL:
2015         GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2016
2017         // Live pipelines go in PAUSED without prerolling.
2018         m_isStreaming = true;
2019         setDownloadBuffering();
2020
2021         if (m_currentState == GST_STATE_READY)
2022             m_readyState = MediaPlayer::HaveNothing;
2023         else if (m_currentState == GST_STATE_PAUSED) {
2024             m_readyState = MediaPlayer::HaveEnoughData;
2025             m_paused = true;
2026         } else if (m_currentState == GST_STATE_PLAYING)
2027             m_paused = false;
2028
2029         if (!m_paused && m_playbackRate)
2030             changePipelineState(GST_STATE_PLAYING);
2031
2032         m_networkState = MediaPlayer::Loading;
2033         break;
2034     default:
2035         GST_DEBUG_OBJECT(pipeline(), "Else : %d", getStateResult);
2036         break;
2037     }
2038
2039     m_requestedState = GST_STATE_VOID_PENDING;
2040
2041     if (shouldUpdatePlaybackState)
2042         m_player->playbackStateChanged();
2043
2044     if (m_networkState != oldNetworkState) {
2045         GST_DEBUG_OBJECT(pipeline(), "Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
2046         m_player->networkStateChanged();
2047     }
2048     if (m_readyState != oldReadyState) {
2049         GST_DEBUG_OBJECT(pipeline(), "Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
2050         m_player->readyStateChanged();
2051     }
2052
2053     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
2054         updatePlaybackRate();
2055         if (m_seekIsPending) {
2056             GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
2057             m_seekIsPending = false;
2058             m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
2059             if (!m_seeking) {
2060                 m_cachedPosition = MediaTime::invalidTime();
2061                 GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
2062             }
2063         }
2064     }
2065 }
2066
2067 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
2068 {
2069 #if GST_CHECK_VERSION(1, 10, 0)
2070     if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_STREAM_COLLECTION && !m_isLegacyPlaybin) {
2071         GRefPtr<GstStreamCollection> collection;
2072         gst_message_parse_stream_collection(message, &collection.outPtr());
2073
2074         if (collection) {
2075             m_streamCollection.swap(collection);
2076             m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
2077                 this->updateTracks();
2078             });
2079         }
2080     }
2081 #endif
2082
2083     return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
2084 }
2085
2086 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
2087 {
2088     if (m_mediaLocations)
2089         gst_structure_free(m_mediaLocations);
2090
2091     const GstStructure* structure = gst_message_get_structure(message);
2092     if (structure) {
2093         // This structure can contain:
2094         // - both a new-location string and embedded locations structure
2095         // - or only a new-location string.
2096         m_mediaLocations = gst_structure_copy(structure);
2097         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2098
2099         if (locations)
2100             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
2101
2102         loadNextLocation();
2103     }
2104 }
2105
2106 bool MediaPlayerPrivateGStreamer::loadNextLocation()
2107 {
2108     if (!m_mediaLocations)
2109         return false;
2110
2111     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2112     const gchar* newLocation = nullptr;
2113
2114     if (!locations) {
2115         // Fallback on new-location string.
2116         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
2117         if (!newLocation)
2118             return false;
2119     }
2120
2121     if (!newLocation) {
2122         if (m_mediaLocationCurrentIndex < 0) {
2123             m_mediaLocations = nullptr;
2124             return false;
2125         }
2126
2127         const GValue* location = gst_value_list_get_value(locations, m_mediaLocationCurrentIndex);
2128         const GstStructure* structure = gst_value_get_structure(location);
2129
2130         if (!structure) {
2131             m_mediaLocationCurrentIndex--;
2132             return false;
2133         }
2134
2135         newLocation = gst_structure_get_string(structure, "new-location");
2136     }
2137
2138     if (newLocation) {
2139         // Found a candidate. new-location is not always an absolute url
2140         // though. We need to take the base of the current url and
2141         // append the value of new-location to it.
2142         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2143         URL newUrl = URL(baseUrl, newLocation);
2144
2145         auto securityOrigin = SecurityOrigin::create(m_url);
2146         if (securityOrigin->canRequest(newUrl)) {
2147             GST_INFO_OBJECT(pipeline(), "New media url: %s", newUrl.string().utf8().data());
2148
2149             // Reset player states.
2150             m_networkState = MediaPlayer::Loading;
2151             m_player->networkStateChanged();
2152             m_readyState = MediaPlayer::HaveNothing;
2153             m_player->readyStateChanged();
2154
2155             // Reset pipeline state.
2156             m_resetPipeline = true;
2157             changePipelineState(GST_STATE_READY);
2158
2159             GstState state;
2160             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2161             if (state <= GST_STATE_READY) {
2162                 // Set the new uri and start playing.
2163                 setPlaybinURL(newUrl);
2164                 changePipelineState(GST_STATE_PLAYING);
2165                 return true;
2166             }
2167         } else
2168             GST_INFO_OBJECT(pipeline(), "Not allowed to load new media location: %s", newUrl.string().utf8().data());
2169     }
2170     m_mediaLocationCurrentIndex--;
2171     return false;
2172 }
2173
2174 void MediaPlayerPrivateGStreamer::loadStateChanged()
2175 {
2176     updateStates();
2177 }
2178
2179 void MediaPlayerPrivateGStreamer::timeChanged()
2180 {
2181     updateStates();
2182     m_player->timeChanged();
2183 }
2184
2185 void MediaPlayerPrivateGStreamer::didEnd()
2186 {
2187     GST_INFO_OBJECT(pipeline(), "Playback ended");
2188
2189     // Synchronize position and duration values to not confuse the
2190     // HTMLMediaElement. In some cases like reverse playback the
2191     // position is not always reported as 0 for instance.
2192     m_cachedPosition = MediaTime::invalidTime();
2193     MediaTime now = currentMediaTime();
2194     if (now > MediaTime::zeroTime() && !m_seeking) {
2195         m_cachedDuration = now;
2196         m_player->durationChanged();
2197     }
2198
2199     m_isEndReached = true;
2200
2201     if (!m_player->client().mediaPlayerIsLooping()) {
2202         m_paused = true;
2203         changePipelineState(GST_STATE_READY);
2204         m_downloadFinished = false;
2205     }
2206     timeChanged();
2207 }
2208
2209 void MediaPlayerPrivateGStreamer::durationChanged()
2210 {
2211     MediaTime previousDuration = durationMediaTime();
2212     m_cachedDuration = MediaTime::invalidTime();
2213
2214     // Avoid emiting durationchanged in the case where the previous
2215     // duration was 0 because that case is already handled by the
2216     // HTMLMediaElement.
2217     if (previousDuration && durationMediaTime() != previousDuration)
2218         m_player->durationChanged();
2219 }
2220
2221 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState networkError, MediaPlayer::ReadyState readyState, bool forceNotifications)
2222 {
2223     GST_WARNING("Loading failed, error: %s", convertEnumerationToString(networkError).utf8().data());
2224
2225     m_errorOccured = true;
2226     if (forceNotifications || m_networkState != networkError) {
2227         m_networkState = networkError;
2228         m_player->networkStateChanged();
2229     }
2230     if (forceNotifications || m_readyState != readyState) {
2231         m_readyState = readyState;
2232         m_player->readyStateChanged();
2233     }
2234
2235     // Loading failed, remove ready timer.
2236     m_readyTimerHandler.stop();
2237 }
2238
2239 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2240 {
2241     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2242     types = gstRegistryScanner.mimeTypeSet();
2243 }
2244
2245 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2246 {
2247     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
2248 #if ENABLE(MEDIA_SOURCE)
2249     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2250     if (parameters.isMediaSource)
2251         return result;
2252 #endif
2253
2254 #if !ENABLE(MEDIA_STREAM) || !GST_CHECK_VERSION(1, 10, 0)
2255     if (parameters.isMediaStream)
2256         return result;
2257 #endif
2258
2259     if (parameters.type.isEmpty())
2260         return result;
2261
2262     GST_DEBUG("Checking mime-type \"%s\"", parameters.type.raw().utf8().data());
2263     auto containerType = parameters.type.containerType();
2264     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2265     if (gstRegistryScanner.isContainerTypeSupported(containerType)) {
2266         // Spec says we should not return "probably" if the codecs string is empty.
2267         Vector<String> codecs = parameters.type.codecs();
2268         result = codecs.isEmpty() ? MediaPlayer::MayBeSupported : (gstRegistryScanner.areAllCodecsSupported(codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported);
2269     }
2270
2271     auto finalResult = extendedSupportsType(parameters, result);
2272     GST_DEBUG("Supported: %s", convertEnumerationToString(finalResult).utf8().data());
2273     return finalResult;
2274 }
2275
2276 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
2277 {
2278     if (!m_pipeline)
2279         return;
2280
2281     unsigned flags;
2282     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2283
2284     unsigned flagDownload = getGstPlayFlag("download");
2285
2286     // We don't want to stop downloading if we already started it.
2287     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline) {
2288         GST_DEBUG_OBJECT(pipeline(), "Download already started, not starting again");
2289         return;
2290     }
2291
2292     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
2293     if (shouldDownload) {
2294         GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering");
2295         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2296         m_fillTimer.startRepeating(200_ms);
2297     } else {
2298         GST_INFO_OBJECT(pipeline(), "Disabling on-disk buffering");
2299         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2300         m_fillTimer.stop();
2301     }
2302 }
2303
2304 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
2305 {
2306     GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data());
2307     if (preload == MediaPlayer::Auto && isLiveStream())
2308         return;
2309
2310     m_preload = preload;
2311     setDownloadBuffering();
2312
2313     if (m_delayingLoad && m_preload != MediaPlayer::None) {
2314         m_delayingLoad = false;
2315         commitLoad();
2316     }
2317 }
2318
2319 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
2320 {
2321     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
2322     if (!m_autoAudioSink) {
2323         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
2324         return nullptr;
2325     }
2326
2327     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
2328
2329 #if ENABLE(WEB_AUDIO)
2330     GstElement* audioSinkBin = gst_bin_new("audio-sink");
2331     ensureAudioSourceProvider();
2332     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2333     return audioSinkBin;
2334 #else
2335     return m_autoAudioSink.get();
2336 #endif
2337 }
2338
2339 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2340 {
2341     GstElement* sink;
2342     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2343     return sink;
2344 }
2345
2346 #if ENABLE(WEB_AUDIO)
2347 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2348 {
2349     if (!m_audioSourceProvider)
2350         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2351 }
2352
2353 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2354 {
2355     ensureAudioSourceProvider();
2356     return m_audioSourceProvider.get();
2357 }
2358 #endif
2359
2360 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const URL& url, const String& pipelineName)
2361 {
2362     const gchar* playbinName = "playbin";
2363
2364     // MSE doesn't support playbin3. Mediastream requires playbin3. Regular
2365     // playback can use playbin3 on-demand with the WEBKIT_GST_USE_PLAYBIN3
2366     // environment variable.
2367 #if GST_CHECK_VERSION(1, 10, 0)
2368     if ((!isMediaSource() && g_getenv("WEBKIT_GST_USE_PLAYBIN3")) || url.protocolIs("mediastream"))
2369         playbinName = "playbin3";
2370 #endif
2371
2372     if (m_pipeline) {
2373         if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2374             GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2375             return;
2376         }
2377
2378         GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.", playbinName);
2379         changePipelineState(GST_STATE_NULL);
2380         m_pipeline = nullptr;
2381     }
2382
2383     ASSERT(!m_pipeline);
2384
2385     m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
2386
2387     // gst_element_factory_make() returns a floating reference so
2388     // we should not adopt.
2389     static Atomic<uint32_t> pipelineId;
2390     setPipeline(gst_element_factory_make(playbinName,
2391         (pipelineName.isEmpty() ? makeString("media-player-", pipelineId.exchangeAdd(1)) : pipelineName).utf8().data()));
2392     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2393
2394     GST_INFO_OBJECT(pipeline(), "Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2395
2396     // Let also other listeners subscribe to (application) messages in this bus.
2397     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2398     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2399     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2400
2401     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2402
2403     g_signal_connect(GST_BIN_CAST(m_pipeline.get()), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin* subBin, GstElement* element, MediaPlayerPrivateGStreamer* player) {
2404         GUniquePtr<char> binName(gst_element_get_name(GST_ELEMENT_CAST(subBin)));
2405         if (!g_str_has_prefix(binName.get(), "decodebin"))
2406             return;
2407
2408         GUniquePtr<char> elementName(gst_element_get_name(element));
2409         if (g_str_has_prefix(elementName.get(), "v4l2"))
2410             player->m_videoDecoderPlatform = WebKitGstVideoDecoderPlatform::Video4Linux;
2411         else if (g_str_has_prefix(elementName.get(), "imxvpudecoder"))
2412             player->m_videoDecoderPlatform = WebKitGstVideoDecoderPlatform::ImxVPU;
2413
2414         player->updateTextureMapperFlags();
2415     }), this);
2416
2417     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2418     if (m_isLegacyPlaybin) {
2419         g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2420         g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2421     }
2422
2423 #if ENABLE(VIDEO_TRACK)
2424     if (m_isLegacyPlaybin)
2425         g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2426
2427     GstElement* textCombiner = webkitTextCombinerNew();
2428     ASSERT(textCombiner);
2429     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2430
2431     m_textAppSink = webkitTextSinkNew();
2432     ASSERT(m_textAppSink);
2433
2434     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2435     ASSERT(m_textAppSinkPad);
2436
2437     GRefPtr<GstCaps> textCaps;
2438     if (webkitGstCheckVersion(1, 14, 0))
2439         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
2440     else
2441         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
2442     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
2443     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2444
2445     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2446 #endif
2447
2448     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2449
2450     configurePlaySink();
2451
2452     if (m_preservesPitch) {
2453         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2454
2455         if (!scale)
2456             GST_WARNING("Failed to create scaletempo");
2457         else
2458             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2459     }
2460
2461     if (!m_renderingCanBeAccelerated) {
2462         // If not using accelerated compositing, let GStreamer handle
2463         // the image-orientation tag.
2464         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2465         if (videoFlip) {
2466             g_object_set(videoFlip, "method", 8, nullptr);
2467             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2468         } else
2469             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
2470     }
2471
2472     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2473     if (videoSinkPad)
2474         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2475 }
2476
2477 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2478 {
2479     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2480     gst_element_post_message(m_pipeline.get(), message);
2481 }
2482
2483 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2484 {
2485     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2486         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2487     return false;
2488 }
2489
2490 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2491 {
2492     if (isLiveStream())
2493         return false;
2494
2495     if (m_url.isLocalFile())
2496         return true;
2497
2498     if (m_url.protocolIsInHTTPFamily())
2499         return true;
2500
2501     return false;
2502 }
2503
2504 Optional<bool> MediaPlayerPrivateGStreamer::wouldTaintOrigin(const SecurityOrigin& origin) const
2505 {
2506     if (webkitGstCheckVersion(1, 12, 0)) {
2507         GST_TRACE_OBJECT(pipeline(), "Checking %u origins", m_origins.size());
2508         for (auto& responseOrigin : m_origins) {
2509             if (!origin.canAccess(*responseOrigin)) {
2510                 GST_DEBUG_OBJECT(pipeline(), "Found reachable response origin");
2511                 return true;
2512             }
2513         }
2514         GST_DEBUG_OBJECT(pipeline(), "No valid response origin found");
2515         return false;
2516     }
2517
2518     // GStreamer < 1.12 has an incomplete uridownloader implementation so we
2519     // can't use WebKitWebSrc for adaptive fragments downloading if this
2520     // version is detected.
2521     UNUSED_PARAM(origin);
2522     return m_hasTaintedOrigin;
2523 }
2524
2525 }
2526
2527 #endif // USE(GSTREAMER)