[GStreamer] Remove the HLS queue buffering query hack
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "GStreamerCommon.h"
32 #include "GStreamerRegistryScanner.h"
33 #include "HTTPHeaderNames.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <glib.h>
42 #include <gst/gst.h>
43 #include <gst/pbutils/missing-plugins.h>
44 #include <limits>
45 #include <wtf/FileSystem.h>
46 #include <wtf/HexNumber.h>
47 #include <wtf/MediaTime.h>
48 #include <wtf/NeverDestroyed.h>
49 #include <wtf/StringPrintStream.h>
50 #include <wtf/URL.h>
51 #include <wtf/WallTime.h>
52 #include <wtf/glib/GUniquePtr.h>
53 #include <wtf/glib/RunLoopSourcePriority.h>
54 #include <wtf/text/CString.h>
55 #include <wtf/text/StringConcatenateNumbers.h>
56
57 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
58 #include "GStreamerMediaStreamSource.h"
59 #endif
60
61 #if ENABLE(VIDEO_TRACK)
62 #include "AudioTrackPrivateGStreamer.h"
63 #include "InbandMetadataTextTrackPrivateGStreamer.h"
64 #include "InbandTextTrackPrivateGStreamer.h"
65 #include "TextCombinerGStreamer.h"
66 #include "TextSinkGStreamer.h"
67 #include "VideoTrackPrivateGStreamer.h"
68 #endif
69
70 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
71 #define GST_USE_UNSTABLE_API
72 #include <gst/mpegts/mpegts.h>
73 #undef GST_USE_UNSTABLE_API
74 #endif
75 #include <gst/audio/streamvolume.h>
76
77 #if ENABLE(MEDIA_SOURCE)
78 #include "MediaSource.h"
79 #include "WebKitMediaSourceGStreamer.h"
80 #endif
81
82 #if ENABLE(WEB_AUDIO)
83 #include "AudioSourceProviderGStreamer.h"
84 #endif
85
86 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
87 #define GST_CAT_DEFAULT webkit_media_player_debug
88
89
90 namespace WebCore {
91 using namespace std;
92
93 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
94 {
95     player->handleMessage(message);
96 }
97
98 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
99 {
100     player->setAudioStreamProperties(object);
101 }
102
103 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
104 {
105     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
106         return;
107
108     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
109     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
110     g_object_set(object, "stream-properties", structure, nullptr);
111     gst_structure_free(structure);
112     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
113     GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get());
114 }
115
116 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
117 {
118     MediaPlayerPrivateGStreamerBase::initializeDebugCategory();
119     if (isAvailable()) {
120         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
121             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
122     }
123 }
124
125 bool MediaPlayerPrivateGStreamer::isAvailable()
126 {
127     if (!initializeGStreamerAndRegisterWebKitElements())
128         return false;
129
130     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
131     return factory;
132 }
133
134 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
135     : MediaPlayerPrivateGStreamerBase(player)
136     , m_buffering(false)
137     , m_bufferingPercentage(0)
138     , m_cachedPosition(MediaTime::invalidTime())
139     , m_cachedDuration(MediaTime::invalidTime())
140     , m_canFallBackToLastFinishedSeekPosition(false)
141     , m_changingRate(false)
142     , m_downloadFinished(false)
143     , m_errorOccured(false)
144     , m_isEndReached(false)
145     , m_isStreaming(false)
146     , m_paused(true)
147     , m_playbackRate(1)
148     , m_requestedState(GST_STATE_VOID_PENDING)
149     , m_resetPipeline(false)
150     , m_seeking(false)
151     , m_seekIsPending(false)
152     , m_seekTime(MediaTime::invalidTime())
153     , m_source(nullptr)
154     , m_volumeAndMuteInitialized(false)
155     , m_mediaLocations(nullptr)
156     , m_mediaLocationCurrentIndex(0)
157     , m_playbackRatePause(false)
158     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
159     , m_lastPlaybackRate(1)
160     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
161     , m_maxTimeLoaded(MediaTime::zeroTime())
162     , m_preload(player->preload())
163     , m_delayingLoad(false)
164     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
165     , m_hasVideo(false)
166     , m_hasAudio(false)
167     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
168     , m_totalBytes(0)
169     , m_preservesPitch(false)
170 {
171 #if USE(GLIB)
172     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
173 #endif
174 }
175
176 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
177 {
178     GST_DEBUG_OBJECT(pipeline(), "Disposing player");
179
180 #if ENABLE(VIDEO_TRACK)
181     for (auto& track : m_audioTracks.values())
182         track->disconnect();
183
184     for (auto& track : m_textTracks.values())
185         track->disconnect();
186
187     for (auto& track : m_videoTracks.values())
188         track->disconnect();
189 #endif
190     if (m_fillTimer.isActive())
191         m_fillTimer.stop();
192
193     if (m_mediaLocations) {
194         gst_structure_free(m_mediaLocations);
195         m_mediaLocations = nullptr;
196     }
197
198     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
199         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
200
201     if (m_autoAudioSink) {
202         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
203             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
204     }
205
206     m_readyTimerHandler.stop();
207     for (auto& missingPluginCallback : m_missingPluginCallbacks) {
208         if (missingPluginCallback)
209             missingPluginCallback->invalidate();
210     }
211     m_missingPluginCallbacks.clear();
212
213     if (m_videoSink) {
214         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
215         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
216     }
217
218     if (m_pipeline) {
219         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
220         ASSERT(bus);
221         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
222         gst_bus_remove_signal_watch(bus.get());
223         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
224         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
225     }
226 }
227
228 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
229 {
230     // Clean out everything after file:// url path.
231     String cleanURLString(url.string());
232     if (url.isLocalFile())
233         cleanURLString = cleanURLString.substring(0, url.pathEnd());
234
235     m_url = URL(URL(), cleanURLString);
236     GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data());
237     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
238 }
239
240 void MediaPlayerPrivateGStreamer::load(const String& urlString)
241 {
242     loadFull(urlString, nullptr, String());
243 }
244
245 static void setSyncOnClock(GstElement *element, bool sync)
246 {
247     if (!GST_IS_BIN(element)) {
248         g_object_set(element, "sync", sync, NULL);
249         return;
250     }
251
252     GstIterator* it = gst_bin_iterate_sinks(GST_BIN(element));
253     while (gst_iterator_foreach(it, (GstIteratorForeachFunction)([](const GValue* item, void* syncPtr) {
254         bool* sync = static_cast<bool*>(syncPtr);
255         setSyncOnClock(GST_ELEMENT(g_value_get_object(item)), *sync);
256     }), &sync) == GST_ITERATOR_RESYNC)
257         gst_iterator_resync(it);
258     gst_iterator_free(it);
259 }
260
261 void MediaPlayerPrivateGStreamer::syncOnClock(bool sync)
262 {
263     setSyncOnClock(videoSink(), sync);
264     setSyncOnClock(audioSink(), sync);
265 }
266
267 void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const gchar* playbinName,
268     const String& pipelineName)
269 {
270     // FIXME: This method is still called even if supportsType() returned
271     // IsNotSupported. This would deserve more investigation but meanwhile make
272     // sure we don't ever try to play animated gif assets.
273     if (m_player->contentMIMEType() == "image/gif") {
274         loadingFailed(MediaPlayer::FormatError);
275         return;
276     }
277
278     URL url(URL(), urlString);
279     if (url.protocolIsAbout())
280         return;
281
282     if (!m_pipeline)
283         createGSTPlayBin(isMediaSource() ? "playbin" : playbinName, pipelineName);
284     syncOnClock(true);
285     if (m_fillTimer.isActive())
286         m_fillTimer.stop();
287
288     ASSERT(m_pipeline);
289
290     setPlaybinURL(url);
291
292     GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data());
293     if (m_preload == MediaPlayer::None) {
294         GST_INFO_OBJECT(pipeline(), "Delaying load.");
295         m_delayingLoad = true;
296     }
297
298     // Reset network and ready states. Those will be set properly once
299     // the pipeline pre-rolled.
300     m_networkState = MediaPlayer::Loading;
301     m_player->networkStateChanged();
302     m_readyState = MediaPlayer::HaveNothing;
303     m_player->readyStateChanged();
304     m_volumeAndMuteInitialized = false;
305
306     if (!m_delayingLoad)
307         commitLoad();
308 }
309
310 #if ENABLE(MEDIA_SOURCE)
311 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
312 {
313     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
314     m_networkState = MediaPlayer::FormatError;
315     m_player->networkStateChanged();
316 }
317 #endif
318
319 #if ENABLE(MEDIA_STREAM)
320 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream)
321 {
322 #if GST_CHECK_VERSION(1, 10, 0)
323     m_streamPrivate = &stream;
324     auto pipelineName = makeString("mediastream_",
325         (stream.hasCaptureVideoSource() || stream.hasCaptureAudioSource()) ? "Local" : "Remote",
326         "_0x", hex(reinterpret_cast<uintptr_t>(this), Lowercase));
327
328     loadFull(String("mediastream://") + stream.id(), "playbin3", pipelineName);
329     syncOnClock(false);
330
331 #if USE(GSTREAMER_GL)
332     ensureGLVideoSinkContext();
333 #endif
334     m_player->play();
335 #else
336     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
337     m_networkState = MediaPlayer::FormatError;
338     m_player->networkStateChanged();
339     notImplemented();
340 #endif
341 }
342 #endif
343
344 void MediaPlayerPrivateGStreamer::commitLoad()
345 {
346     ASSERT(!m_delayingLoad);
347     GST_DEBUG_OBJECT(pipeline(), "Committing load.");
348
349     // GStreamer needs to have the pipeline set to a paused state to
350     // start providing anything useful.
351     changePipelineState(GST_STATE_PAUSED);
352
353     setDownloadBuffering();
354     updateStates();
355 }
356
357 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
358 {
359     GST_TRACE_OBJECT(pipeline(), "isEndReached: %s, seeking: %s, seekTime: %s", boolForPrinting(m_isEndReached), boolForPrinting(m_seeking), m_seekTime.toString().utf8().data());
360     if (m_isEndReached && m_seeking)
361         return m_seekTime;
362
363     // This constant should remain lower than HTMLMediaElement's maxTimeupdateEventFrequency.
364     static const Seconds positionCacheThreshold = 200_ms;
365     Seconds now = WTF::WallTime::now().secondsSinceEpoch();
366     if (m_lastQueryTime && (now - m_lastQueryTime.value()) < positionCacheThreshold && m_cachedPosition.isValid()) {
367         GST_TRACE_OBJECT(pipeline(), "Returning cached position: %s", m_cachedPosition.toString().utf8().data());
368         return m_cachedPosition;
369     }
370
371     m_lastQueryTime = now;
372
373     // Position is only available if no async state change is going on and the state is either paused or playing.
374     gint64 position = GST_CLOCK_TIME_NONE;
375     GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
376     if (gst_element_query(m_pipeline.get(), query))
377         gst_query_parse_position(query, 0, &position);
378     gst_query_unref(query);
379
380     GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT ", canFallBackToLastFinishedSeekPosition: %s", GST_TIME_ARGS(position), boolForPrinting(m_canFallBackToLastFinishedSeekPosition));
381
382     MediaTime playbackPosition = MediaTime::zeroTime();
383     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
384     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
385         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
386     else if (m_canFallBackToLastFinishedSeekPosition)
387         playbackPosition = m_seekTime;
388
389     m_cachedPosition = playbackPosition;
390     return playbackPosition;
391 }
392
393 void MediaPlayerPrivateGStreamer::readyTimerFired()
394 {
395     GST_DEBUG_OBJECT(pipeline(), "In READY for too long. Releasing pipeline resources.");
396     changePipelineState(GST_STATE_NULL);
397 }
398
399 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
400 {
401     ASSERT(m_pipeline);
402
403     GstState currentState;
404     GstState pending;
405
406     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
407     if (currentState == newState || pending == newState) {
408         GST_DEBUG_OBJECT(pipeline(), "Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
409             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
410         return true;
411     }
412
413     GST_DEBUG_OBJECT(pipeline(), "Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
414         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
415
416 #if USE(GSTREAMER_GL)
417     if (currentState == GST_STATE_READY && newState == GST_STATE_PAUSED)
418         ensureGLVideoSinkContext();
419 #endif
420
421     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
422     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
423     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
424         return false;
425
426     // Create a timer when entering the READY state so that we can free resources
427     // if we stay for too long on READY.
428     // Also lets remove the timer if we request a state change for any state other than READY.
429     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
430     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
431         // Max interval in seconds to stay in the READY state on manual
432         // state change requests.
433         static const Seconds readyStateTimerDelay { 1_min };
434         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
435     } else if (newState != GST_STATE_READY)
436         m_readyTimerHandler.stop();
437
438     return true;
439 }
440
441 void MediaPlayerPrivateGStreamer::prepareToPlay()
442 {
443     GST_DEBUG_OBJECT(pipeline(), "Prepare to play");
444     m_preload = MediaPlayer::Auto;
445     if (m_delayingLoad) {
446         m_delayingLoad = false;
447         commitLoad();
448     }
449 }
450
451 void MediaPlayerPrivateGStreamer::play()
452 {
453     if (!m_playbackRate) {
454         m_playbackRatePause = true;
455         return;
456     }
457
458     if (changePipelineState(GST_STATE_PLAYING)) {
459         m_isEndReached = false;
460         m_delayingLoad = false;
461         m_preload = MediaPlayer::Auto;
462         setDownloadBuffering();
463         GST_INFO_OBJECT(pipeline(), "Play");
464     } else
465         loadingFailed(MediaPlayer::Empty);
466 }
467
468 void MediaPlayerPrivateGStreamer::pause()
469 {
470     m_playbackRatePause = false;
471     GstState currentState, pendingState;
472     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
473     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
474         return;
475
476     if (changePipelineState(GST_STATE_PAUSED))
477         GST_INFO_OBJECT(pipeline(), "Pause");
478     else
479         loadingFailed(MediaPlayer::Empty);
480 }
481
482 MediaTime MediaPlayerPrivateGStreamer::platformDuration() const
483 {
484     GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, pipeline state: %s", boolForPrinting(m_errorOccured), gst_element_state_get_name(GST_STATE(m_pipeline.get())));
485     if (m_errorOccured)
486         return MediaTime::invalidTime();
487
488     // The duration query would fail on a not-prerolled pipeline.
489     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
490         return MediaTime::invalidTime();
491
492     int64_t duration = 0;
493     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &duration) || !GST_CLOCK_TIME_IS_VALID(duration)) {
494         GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
495         return MediaTime::positiveInfiniteTime();
496     }
497
498     GST_LOG_OBJECT(pipeline(), "Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
499     return MediaTime(duration, GST_SECOND);
500 }
501
502 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
503 {
504     GST_TRACE_OBJECT(pipeline(), "Cached duration: %s", m_cachedDuration.toString().utf8().data());
505     if (m_cachedDuration.isValid())
506         return m_cachedDuration;
507
508     MediaTime duration = platformDuration();
509     if (!duration || duration.isInvalid())
510         return MediaTime::zeroTime();
511
512     m_cachedDuration = duration;
513
514     return m_cachedDuration;
515 }
516
517 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
518 {
519     if (!m_pipeline || m_errorOccured)
520         return MediaTime::invalidTime();
521
522     GST_TRACE_OBJECT(pipeline(), "seeking: %s, seekTime: %s", boolForPrinting(m_seeking), m_seekTime.toString().utf8().data());
523     if (m_seeking)
524         return m_seekTime;
525
526     return playbackPosition();
527 }
528
529 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
530 {
531     if (!m_pipeline)
532         return;
533
534     if (m_errorOccured)
535         return;
536
537     GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
538
539     // Avoid useless seeking.
540     if (mediaTime == currentMediaTime()) {
541         GST_DEBUG_OBJECT(pipeline(), "[Seek] seek to EOS position unhandled");
542         return;
543     }
544
545     MediaTime time = std::min(mediaTime, durationMediaTime());
546
547     if (isLiveStream()) {
548         GST_DEBUG_OBJECT(pipeline(), "[Seek] Live stream seek unhandled");
549         return;
550     }
551
552     GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data());
553
554     if (m_seeking) {
555         m_timeOfOverlappingSeek = time;
556         if (m_seekIsPending) {
557             m_seekTime = time;
558             return;
559         }
560     }
561
562     GstState state;
563     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
564     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
565         GST_DEBUG_OBJECT(pipeline(), "[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
566         return;
567     }
568     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
569         m_seekIsPending = true;
570         if (m_isEndReached) {
571             GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline");
572             m_resetPipeline = true;
573             if (!changePipelineState(GST_STATE_PAUSED))
574                 loadingFailed(MediaPlayer::Empty);
575         }
576     } else {
577         // We can seek now.
578         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
579             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(time).utf8().data());
580             return;
581         }
582     }
583
584     m_seeking = true;
585     m_seekTime = time;
586     m_isEndReached = false;
587 }
588
589 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
590 {
591     // Default values for rate >= 0.
592     MediaTime startTime = position, endTime = MediaTime::invalidTime();
593
594     // TODO: Should do more than that, need to notify the media source
595     // and probably flush the pipeline at least.
596     if (isMediaSource())
597         return true;
598
599     if (rate < 0) {
600         startTime = MediaTime::zeroTime();
601         // If we are at beginning of media, start from the end to
602         // avoid immediate EOS.
603         if (position < MediaTime::zeroTime())
604             endTime = durationMediaTime();
605         else
606             endTime = position;
607     }
608
609     if (!rate)
610         rate = 1.0;
611
612     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
613         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
614 }
615
616 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
617 {
618     if (!m_changingRate)
619         return;
620
621     GST_INFO_OBJECT(pipeline(), "Set Rate to %f", m_playbackRate);
622
623     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
624     bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
625
626     GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio");
627
628     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
629         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
630         m_lastPlaybackRate = m_playbackRate;
631     } else {
632         m_playbackRate = m_lastPlaybackRate;
633         GST_ERROR("Set rate to %f failed", m_playbackRate);
634     }
635
636     if (m_playbackRatePause) {
637         GstState state;
638         GstState pending;
639
640         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
641         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
642             changePipelineState(GST_STATE_PLAYING);
643         m_playbackRatePause = false;
644     }
645
646     m_changingRate = false;
647     m_player->rateChanged();
648 }
649
650 bool MediaPlayerPrivateGStreamer::paused() const
651 {
652     if (m_isEndReached) {
653         GST_DEBUG_OBJECT(pipeline(), "Ignoring pause at EOS");
654         return true;
655     }
656
657     if (m_playbackRatePause) {
658         GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state");
659         return false;
660     }
661
662     GstState state;
663     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
664     bool paused = state <= GST_STATE_PAUSED;
665     GST_DEBUG_OBJECT(pipeline(), "Paused: %s", toString(paused).utf8().data());
666     return paused;
667 }
668
669 bool MediaPlayerPrivateGStreamer::seeking() const
670 {
671     return m_seeking;
672 }
673
674 #if GST_CHECK_VERSION(1, 10, 0)
675 #define CLEAR_TRACKS(tracks, method) \
676     for (auto& track : tracks.values())\
677         method(*track);\
678     tracks.clear();
679
680 void MediaPlayerPrivateGStreamer::clearTracks()
681 {
682 #if ENABLE(VIDEO_TRACK)
683     CLEAR_TRACKS(m_audioTracks, m_player->removeAudioTrack);
684     CLEAR_TRACKS(m_videoTracks, m_player->removeVideoTrack);
685     CLEAR_TRACKS(m_textTracks, m_player->removeTextTrack);
686 #endif // ENABLE(VIDEO_TRACK)
687 }
688 #undef CLEAR_TRACKS
689
690 #if ENABLE(VIDEO_TRACK)
691 #define CREATE_TRACK(type, Type) \
692     m_has##Type = true; \
693     if (!useMediaSource) {\
694         RefPtr<Type##TrackPrivateGStreamer> track = Type##TrackPrivateGStreamer::create(makeWeakPtr(*this), i, stream); \
695         m_##type##Tracks.add(track->id(), track); \
696         m_player->add##Type##Track(*track);\
697         if (gst_stream_get_stream_flags(stream.get()) & GST_STREAM_FLAG_SELECT) {                                    \
698             m_current##Type##StreamId = String(gst_stream_get_stream_id(stream.get()));                              \
699         }                                                                                                            \
700     }
701
702 FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
703 {
704 #if ENABLE(MEDIA_STREAM)
705     if (!m_isLegacyPlaybin && !m_currentVideoStreamId.isEmpty()) {
706         RefPtr<VideoTrackPrivateGStreamer> videoTrack = m_videoTracks.get(m_currentVideoStreamId);
707
708         if (videoTrack) {
709             auto tags = adoptGRef(gst_stream_get_tags(videoTrack->stream()));
710             gint width, height;
711
712             if (tags && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_WIDTH, &width) && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_HEIGHT, &height))
713                 return FloatSize(width, height);
714         }
715     }
716 #endif // ENABLE(MEDIA_STREAM)
717
718     return MediaPlayerPrivateGStreamerBase::naturalSize();
719 }
720 #else
721 #define CREATE_TRACK(type, _id, tracks, method, stream) m_has##Type## = true;
722 #endif // ENABLE(VIDEO_TRACK)
723
724 void MediaPlayerPrivateGStreamer::updateTracks()
725 {
726     ASSERT(!m_isLegacyPlaybin);
727
728     bool useMediaSource = isMediaSource();
729     unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
730
731     bool oldHasAudio = m_hasAudio;
732     bool oldHasVideo = m_hasVideo;
733     // New stream collections override previous ones.
734     clearTracks();
735     unsigned textTrackIndex = 0;
736     for (unsigned i = 0; i < length; i++) {
737         GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
738         String streamId(gst_stream_get_stream_id(stream.get()));
739         GstStreamType type = gst_stream_get_stream_type(stream.get());
740
741         GST_DEBUG_OBJECT(pipeline(), "Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
742         if (type & GST_STREAM_TYPE_AUDIO) {
743             CREATE_TRACK(audio, Audio)
744         } else if (type & GST_STREAM_TYPE_VIDEO) {
745             CREATE_TRACK(video, Video)
746         } else if (type & GST_STREAM_TYPE_TEXT && !useMediaSource) {
747 #if ENABLE(VIDEO_TRACK)
748             auto track = InbandTextTrackPrivateGStreamer::create(textTrackIndex++, stream);
749             m_textTracks.add(streamId, track.copyRef());
750             m_player->addTextTrack(track.get());
751 #endif
752         } else
753             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
754     }
755
756     if ((oldHasVideo != m_hasVideo) || (oldHasAudio != m_hasAudio))
757         m_player->characteristicChanged();
758
759     if (m_hasVideo)
760         m_player->sizeChanged();
761
762     m_player->client().mediaPlayerEngineUpdated(m_player);
763 }
764 #endif // GST_CHECK_VERSION(1, 10, 0)
765
766 void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
767 {
768     // FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531.
769     if (isMediaSource()) {
770         GST_FIXME_OBJECT(m_pipeline.get(), "Audio/Video/Text track switching is not yet supported by the MSE backend.");
771         return;
772     }
773
774     const char* propertyName;
775     const char* trackTypeAsString;
776     Vector<String> selectedStreams;
777     String selectedStreamId;
778
779 #if GST_CHECK_VERSION(1, 10, 0)
780     GstStream* stream = nullptr;
781
782     if (!m_isLegacyPlaybin) {
783         stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
784         if (!stream) {
785             GST_WARNING_OBJECT(pipeline(), "No stream to select at index %u", index);
786             return;
787         }
788         selectedStreamId = String::fromUTF8(gst_stream_get_stream_id(stream));
789         selectedStreams.append(selectedStreamId);
790     }
791 #endif // GST_CHECK_VERSION(1,0,0)
792
793     switch (trackType) {
794     case TrackPrivateBaseGStreamer::TrackType::Audio:
795         propertyName = "current-audio";
796         trackTypeAsString = "audio";
797         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentAudioStreamId) {
798             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
799             return;
800         }
801
802         if (!m_currentTextStreamId.isEmpty())
803             selectedStreams.append(m_currentTextStreamId);
804         if (!m_currentVideoStreamId.isEmpty())
805             selectedStreams.append(m_currentVideoStreamId);
806         break;
807     case TrackPrivateBaseGStreamer::TrackType::Video:
808         propertyName = "current-video";
809         trackTypeAsString = "video";
810         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentVideoStreamId) {
811             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
812             return;
813         }
814
815         if (!m_currentAudioStreamId.isEmpty())
816             selectedStreams.append(m_currentAudioStreamId);
817         if (!m_currentTextStreamId.isEmpty())
818             selectedStreams.append(m_currentTextStreamId);
819         break;
820     case TrackPrivateBaseGStreamer::TrackType::Text:
821         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentTextStreamId) {
822             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
823             return;
824         }
825
826         propertyName = "current-text";
827         trackTypeAsString = "text";
828         if (!m_currentAudioStreamId.isEmpty())
829             selectedStreams.append(m_currentAudioStreamId);
830         if (!m_currentVideoStreamId.isEmpty())
831             selectedStreams.append(m_currentVideoStreamId);
832         break;
833     case TrackPrivateBaseGStreamer::TrackType::Unknown:
834     default:
835         ASSERT_NOT_REACHED();
836     }
837
838     GST_INFO_OBJECT(pipeline(), "Enabling %s track with index: %u", trackTypeAsString, index);
839     if (m_isLegacyPlaybin)
840         g_object_set(m_pipeline.get(), propertyName, index, nullptr);
841 #if GST_CHECK_VERSION(1, 10, 0)
842     else {
843         GList* selectedStreamsList = nullptr;
844
845         for (const auto& streamId : selectedStreams)
846             selectedStreamsList = g_list_append(selectedStreamsList, g_strdup(streamId.utf8().data()));
847
848         // TODO: MSE GstStream API support: https://bugs.webkit.org/show_bug.cgi?id=182531
849         gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreamsList));
850         g_list_free_full(selectedStreamsList, reinterpret_cast<GDestroyNotify>(g_free));
851     }
852 #endif
853 }
854
855 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
856 {
857     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] {
858         player->notifyPlayerOfVideo();
859     });
860 }
861
862 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
863 {
864     if (UNLIKELY(!m_pipeline || !m_source))
865         return;
866
867     ASSERT(m_isLegacyPlaybin || isMediaSource());
868
869     gint numTracks = 0;
870     bool useMediaSource = isMediaSource();
871     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
872     g_object_get(element, "n-video", &numTracks, nullptr);
873
874     GST_INFO_OBJECT(pipeline(), "Media has %d video tracks", numTracks);
875
876     bool oldHasVideo = m_hasVideo;
877     m_hasVideo = numTracks > 0;
878     if (oldHasVideo != m_hasVideo)
879         m_player->characteristicChanged();
880
881     if (m_hasVideo)
882         m_player->sizeChanged();
883
884     if (useMediaSource) {
885         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
886         m_player->client().mediaPlayerEngineUpdated(m_player);
887         return;
888     }
889
890 #if ENABLE(VIDEO_TRACK)
891     Vector<String> validVideoStreams;
892     for (gint i = 0; i < numTracks; ++i) {
893         GRefPtr<GstPad> pad;
894         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
895         ASSERT(pad);
896
897         String streamId = "V" + String::number(i);
898         validVideoStreams.append(streamId);
899         if (i < static_cast<gint>(m_videoTracks.size())) {
900             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
901             if (existingTrack) {
902                 existingTrack->setIndex(i);
903                 if (existingTrack->pad() == pad)
904                     continue;
905             }
906         }
907
908         auto track = VideoTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
909         ASSERT(streamId == track->id());
910         m_videoTracks.add(streamId, track.copyRef());
911         m_player->addVideoTrack(track.get());
912     }
913
914     purgeInvalidVideoTracks(validVideoStreams);
915 #endif
916
917     m_player->client().mediaPlayerEngineUpdated(m_player);
918 }
919
920 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
921 {
922     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
923         player->notifyPlayerOfVideoCaps();
924     });
925 }
926
927 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
928 {
929     m_videoSize = IntSize();
930     m_player->client().mediaPlayerEngineUpdated(m_player);
931 }
932
933 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
934 {
935     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] {
936         player->notifyPlayerOfAudio();
937     });
938 }
939
940 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
941 {
942     if (UNLIKELY(!m_pipeline || !m_source))
943         return;
944
945     ASSERT(m_isLegacyPlaybin || isMediaSource());
946
947     gint numTracks = 0;
948     bool useMediaSource = isMediaSource();
949     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
950     g_object_get(element, "n-audio", &numTracks, nullptr);
951
952     GST_INFO_OBJECT(pipeline(), "Media has %d audio tracks", numTracks);
953     bool oldHasAudio = m_hasAudio;
954     m_hasAudio = numTracks > 0;
955     if (oldHasAudio != m_hasAudio)
956         m_player->characteristicChanged();
957
958     if (useMediaSource) {
959         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
960         m_player->client().mediaPlayerEngineUpdated(m_player);
961         return;
962     }
963
964 #if ENABLE(VIDEO_TRACK)
965     Vector<String> validAudioStreams;
966     for (gint i = 0; i < numTracks; ++i) {
967         GRefPtr<GstPad> pad;
968         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
969         ASSERT(pad);
970
971         String streamId = "A" + String::number(i);
972         validAudioStreams.append(streamId);
973         if (i < static_cast<gint>(m_audioTracks.size())) {
974             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
975             if (existingTrack) {
976                 existingTrack->setIndex(i);
977                 if (existingTrack->pad() == pad)
978                     continue;
979             }
980         }
981
982         auto track = AudioTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
983         ASSERT(streamId == track->id());
984         m_audioTracks.add(streamId, track);
985         m_player->addAudioTrack(*track);
986     }
987
988     purgeInvalidAudioTracks(validAudioStreams);
989 #endif
990
991     m_player->client().mediaPlayerEngineUpdated(m_player);
992 }
993
994 #if ENABLE(VIDEO_TRACK)
995 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
996 {
997     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] {
998         player->notifyPlayerOfText();
999     });
1000 }
1001
1002 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
1003 {
1004     if (UNLIKELY(!m_pipeline || !m_source))
1005         return;
1006
1007     ASSERT(m_isLegacyPlaybin || isMediaSource());
1008
1009     gint numTracks = 0;
1010     bool useMediaSource = isMediaSource();
1011     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1012     g_object_get(element, "n-text", &numTracks, nullptr);
1013
1014     GST_INFO_OBJECT(pipeline(), "Media has %d text tracks", numTracks);
1015
1016     if (useMediaSource) {
1017         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1018         return;
1019     }
1020
1021     Vector<String> validTextStreams;
1022     for (gint i = 0; i < numTracks; ++i) {
1023         GRefPtr<GstPad> pad;
1024         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
1025         ASSERT(pad);
1026
1027         // We can't assume the pad has a sticky event here like implemented in
1028         // InbandTextTrackPrivateGStreamer because it might be emitted after the
1029         // track was created. So fallback to a dummy stream ID like in the Audio
1030         // and Video tracks.
1031         String streamId = "T" + String::number(i);
1032
1033         validTextStreams.append(streamId);
1034         if (i < static_cast<gint>(m_textTracks.size())) {
1035             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
1036             if (existingTrack) {
1037                 existingTrack->setIndex(i);
1038                 if (existingTrack->pad() == pad)
1039                     continue;
1040             }
1041         }
1042
1043         auto track = InbandTextTrackPrivateGStreamer::create(i, pad);
1044         m_textTracks.add(streamId, track.copyRef());
1045         m_player->addTextTrack(track.get());
1046     }
1047
1048     purgeInvalidTextTracks(validTextStreams);
1049 }
1050
1051 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
1052 {
1053     player->newTextSample();
1054     return GST_FLOW_OK;
1055 }
1056
1057 void MediaPlayerPrivateGStreamer::newTextSample()
1058 {
1059     if (!m_textAppSink)
1060         return;
1061
1062     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
1063         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
1064
1065     GRefPtr<GstSample> sample;
1066     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
1067     ASSERT(sample);
1068
1069     if (streamStartEvent) {
1070         bool found = FALSE;
1071         const gchar* id;
1072         gst_event_parse_stream_start(streamStartEvent.get(), &id);
1073         for (auto& track : m_textTracks.values()) {
1074             if (!strcmp(track->streamId().utf8().data(), id)) {
1075                 track->handleSample(sample);
1076                 found = true;
1077                 break;
1078             }
1079         }
1080         if (!found)
1081             GST_WARNING("Got sample with unknown stream ID %s.", id);
1082     } else
1083         GST_WARNING("Unable to handle sample with no stream start event.");
1084 }
1085 #endif
1086
1087 void MediaPlayerPrivateGStreamer::setRate(float rate)
1088 {
1089     // Higher rate causes crash.
1090     rate = clampTo(rate, -20.0, 20.0);
1091
1092     // Avoid useless playback rate update.
1093     if (m_playbackRate == rate) {
1094         // and make sure that upper layers were notified if rate was set
1095
1096         if (!m_changingRate && m_player->rate() != m_playbackRate)
1097             m_player->rateChanged();
1098         return;
1099     }
1100
1101     if (isLiveStream()) {
1102         // notify upper layers that we cannot handle passed rate.
1103         m_changingRate = false;
1104         m_player->rateChanged();
1105         return;
1106     }
1107
1108     GstState state;
1109     GstState pending;
1110
1111     m_playbackRate = rate;
1112     m_changingRate = true;
1113
1114     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
1115
1116     if (!rate) {
1117         m_changingRate = false;
1118         m_playbackRatePause = true;
1119         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
1120             changePipelineState(GST_STATE_PAUSED);
1121         return;
1122     }
1123
1124     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
1125         || (pending == GST_STATE_PAUSED))
1126         return;
1127
1128     updatePlaybackRate();
1129 }
1130
1131 double MediaPlayerPrivateGStreamer::rate() const
1132 {
1133     return m_playbackRate;
1134 }
1135
1136 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
1137 {
1138     m_preservesPitch = preservesPitch;
1139 }
1140
1141 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
1142 {
1143     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1144     if (m_errorOccured || isLiveStream())
1145         return timeRanges;
1146
1147     MediaTime mediaDuration = durationMediaTime();
1148     if (!mediaDuration || mediaDuration.isPositiveInfinite())
1149         return timeRanges;
1150
1151     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1152
1153     if (!gst_element_query(m_pipeline.get(), query)) {
1154         gst_query_unref(query);
1155         return timeRanges;
1156     }
1157
1158     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
1159     for (guint index = 0; index < numBufferingRanges; index++) {
1160         gint64 rangeStart = 0, rangeStop = 0;
1161         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop)) {
1162             uint64_t startTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStart, GST_FORMAT_PERCENT_MAX);
1163             uint64_t stopTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStop, GST_FORMAT_PERCENT_MAX);
1164             timeRanges->add(MediaTime(startTime, GST_SECOND), MediaTime(stopTime, GST_SECOND));
1165         }
1166     }
1167
1168     // Fallback to the more general maxTimeLoaded() if no range has
1169     // been found.
1170     if (!timeRanges->length()) {
1171         MediaTime loaded = maxTimeLoaded();
1172         if (loaded.isValid() && loaded)
1173             timeRanges->add(MediaTime::zeroTime(), loaded);
1174     }
1175
1176     gst_query_unref(query);
1177
1178     return timeRanges;
1179 }
1180
1181 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
1182 {
1183     GUniqueOutPtr<GError> err;
1184     GUniqueOutPtr<gchar> debug;
1185     MediaPlayer::NetworkState error;
1186     bool issueError = true;
1187     bool attemptNextLocation = false;
1188     const GstStructure* structure = gst_message_get_structure(message);
1189     GstState requestedState, currentState;
1190
1191     m_canFallBackToLastFinishedSeekPosition = false;
1192
1193     if (structure) {
1194         const gchar* messageTypeName = gst_structure_get_name(structure);
1195
1196         // Redirect messages are sent from elements, like qtdemux, to
1197         // notify of the new location(s) of the media.
1198         if (!g_strcmp0(messageTypeName, "redirect")) {
1199             mediaLocationChanged(message);
1200             return;
1201         }
1202     }
1203
1204     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
1205     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
1206
1207     GST_LOG_OBJECT(pipeline(), "Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
1208     switch (GST_MESSAGE_TYPE(message)) {
1209     case GST_MESSAGE_ERROR:
1210         if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured)
1211             break;
1212         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
1213         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
1214
1215         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
1216
1217         error = MediaPlayer::Empty;
1218         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
1219             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
1220             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
1221             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
1222             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
1223             error = MediaPlayer::FormatError;
1224         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
1225             // Let the mediaPlayerClient handle the stream error, in
1226             // this case the HTMLMediaElement will emit a stalled
1227             // event.
1228             GST_ERROR("Decode error, let the Media element emit a stalled event.");
1229             m_loadingStalled = true;
1230             break;
1231         } else if (err->domain == GST_STREAM_ERROR) {
1232             error = MediaPlayer::DecodeError;
1233             attemptNextLocation = true;
1234         } else if (err->domain == GST_RESOURCE_ERROR)
1235             error = MediaPlayer::NetworkError;
1236
1237         if (attemptNextLocation)
1238             issueError = !loadNextLocation();
1239         if (issueError) {
1240             m_errorOccured = true;
1241             if (m_networkState != error) {
1242                 m_networkState = error;
1243                 m_player->networkStateChanged();
1244             }
1245         }
1246         break;
1247     case GST_MESSAGE_EOS:
1248         didEnd();
1249         break;
1250     case GST_MESSAGE_ASYNC_DONE:
1251         if (!messageSourceIsPlaybin || m_delayingLoad)
1252             break;
1253         asyncStateChangeDone();
1254         break;
1255     case GST_MESSAGE_STATE_CHANGED: {
1256         if (!messageSourceIsPlaybin || m_delayingLoad)
1257             break;
1258         updateStates();
1259
1260         // Construct a filename for the graphviz dot file output.
1261         GstState newState;
1262         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
1263         CString dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), '.',
1264             gst_element_state_get_name(currentState), '_', gst_element_state_get_name(newState)).utf8();
1265         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
1266
1267         break;
1268     }
1269     case GST_MESSAGE_BUFFERING:
1270         processBufferingStats(message);
1271         break;
1272     case GST_MESSAGE_DURATION_CHANGED:
1273         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
1274         if (messageSourceIsPlaybin && !isMediaSource())
1275             durationChanged();
1276         break;
1277     case GST_MESSAGE_REQUEST_STATE:
1278         gst_message_parse_request_state(message, &requestedState);
1279         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1280         if (requestedState < currentState) {
1281             GST_INFO_OBJECT(pipeline(), "Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
1282                 gst_element_state_get_name(requestedState));
1283             m_requestedState = requestedState;
1284             if (!changePipelineState(requestedState))
1285                 loadingFailed(MediaPlayer::Empty);
1286         }
1287         break;
1288     case GST_MESSAGE_CLOCK_LOST:
1289         // This can only happen in PLAYING state and we should just
1290         // get a new clock by moving back to PAUSED and then to
1291         // PLAYING again.
1292         // This can happen if the stream that ends in a sink that
1293         // provides the current clock disappears, for example if
1294         // the audio sink provides the clock and the audio stream
1295         // is disabled. It also happens relatively often with
1296         // HTTP adaptive streams when switching between different
1297         // variants of a stream.
1298         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1299         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1300         break;
1301     case GST_MESSAGE_LATENCY:
1302         // Recalculate the latency, we don't need any special handling
1303         // here other than the GStreamer default.
1304         // This can happen if the latency of live elements changes, or
1305         // for one reason or another a new live element is added or
1306         // removed from the pipeline.
1307         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1308         break;
1309     case GST_MESSAGE_ELEMENT:
1310         if (gst_is_missing_plugin_message(message)) {
1311             if (gst_install_plugins_supported()) {
1312                 auto missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = makeWeakPtr(*this)](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
1313                     if (!weakThis) {
1314                         GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
1315                         return;
1316                     }
1317
1318                     GST_DEBUG("got missing plugin installation callback with result %u", result);
1319                     RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
1320                     weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
1321                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1322                         return;
1323
1324                     weakThis->changePipelineState(GST_STATE_READY);
1325                     weakThis->changePipelineState(GST_STATE_PAUSED);
1326                 });
1327                 m_missingPluginCallbacks.append(missingPluginCallback.copyRef());
1328                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1329                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1330                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), missingPluginCallback.get());
1331             }
1332         }
1333 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1334         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
1335             processMpegTsSection(section);
1336             gst_mpegts_section_unref(section);
1337         }
1338 #endif
1339 #if ENABLE(ENCRYPTED_MEDIA)
1340         else if (gst_structure_has_name(structure, "drm-waiting-for-key")) {
1341             GST_DEBUG_OBJECT(pipeline(), "drm-waiting-for-key message from %s", GST_MESSAGE_SRC_NAME(message));
1342             setWaitingForKey(true);
1343             // FIXME: The decryptors should be able to attempt to decrypt after being created and linked in a pipeline but currently they are not and current
1344             // architecture does not make this very easy. Fortunately, the arch will change soon and it does not pay off to fix this now with something that could be
1345             // more convoluted. In the meantime, force attempt to decrypt when they get blocked.
1346             attemptToDecryptWithLocalInstance();
1347         } else if (gst_structure_has_name(structure, "drm-key-received")) {
1348             GST_DEBUG_OBJECT(pipeline(), "drm-key-received message from %s", GST_MESSAGE_SRC_NAME(message));
1349             setWaitingForKey(false);
1350         }
1351 #endif
1352         else if (gst_structure_has_name(structure, "http-headers")) {
1353             const char* redirectionUri = gst_structure_get_string(structure, "redirection-uri");
1354             const char* uri = redirectionUri ? redirectionUri : gst_structure_get_string(structure, "uri");
1355             if (uri) {
1356                 URL url(URL(), uri);
1357
1358                 m_origins.add(SecurityOrigin::create(url));
1359
1360                 if (url != m_url) {
1361                     GST_DEBUG_OBJECT(pipeline(), "Ignoring HTTP response headers for non-main URI.");
1362                     break;
1363                 }
1364             }
1365             GUniqueOutPtr<GstStructure> responseHeaders;
1366             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders.outPtr(), nullptr)) {
1367                 const char* contentLengthHeaderName = httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data();
1368                 uint64_t contentLength = 0;
1369                 gst_structure_get_uint64(responseHeaders.get(), contentLengthHeaderName, &contentLength);
1370                 GST_INFO_OBJECT(pipeline(), "%s stream detected", !contentLength ? "Live" : "Non-live");
1371                 if (!contentLength) {
1372                     m_isStreaming = true;
1373                     setDownloadBuffering();
1374                 }
1375             }
1376         } else if (gst_structure_has_name(structure, "webkit-network-statistics")) {
1377             if (gst_structure_get(structure, "read-position", G_TYPE_UINT64, &m_networkReadPosition, "size", G_TYPE_UINT64, &m_httpResponseTotalSize, nullptr))
1378                 GST_DEBUG_OBJECT(pipeline(), "Updated network read position %" G_GUINT64_FORMAT ", size: %" G_GUINT64_FORMAT, m_networkReadPosition, m_httpResponseTotalSize);
1379         } else
1380             GST_DEBUG_OBJECT(pipeline(), "Unhandled element message: %" GST_PTR_FORMAT, structure);
1381         break;
1382 #if ENABLE(VIDEO_TRACK)
1383     case GST_MESSAGE_TOC:
1384         processTableOfContents(message);
1385         break;
1386 #endif
1387     case GST_MESSAGE_TAG: {
1388         GstTagList* tags = nullptr;
1389         GUniqueOutPtr<gchar> tag;
1390         gst_message_parse_tag(message, &tags);
1391         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1392             if (!g_strcmp0(tag.get(), "rotate-90"))
1393                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1394             else if (!g_strcmp0(tag.get(), "rotate-180"))
1395                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1396             else if (!g_strcmp0(tag.get(), "rotate-270"))
1397                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1398         }
1399         gst_tag_list_unref(tags);
1400         break;
1401     }
1402 #if GST_CHECK_VERSION(1, 10, 0)
1403     case GST_MESSAGE_STREAMS_SELECTED: {
1404         GRefPtr<GstStreamCollection> collection;
1405         gst_message_parse_streams_selected(message, &collection.outPtr());
1406
1407         if (!collection)
1408             break;
1409
1410         m_streamCollection.swap(collection);
1411         m_currentAudioStreamId = "";
1412         m_currentVideoStreamId = "";
1413         m_currentTextStreamId = "";
1414
1415         unsigned length = gst_message_streams_selected_get_size(message);
1416         for (unsigned i = 0; i < length; i++) {
1417             GRefPtr<GstStream> stream = gst_message_streams_selected_get_stream(message, i);
1418             if (!stream)
1419                 continue;
1420
1421             GstStreamType type = gst_stream_get_stream_type(stream.get());
1422             String streamId(gst_stream_get_stream_id(stream.get()));
1423
1424             GST_DEBUG_OBJECT(pipeline(), "Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
1425             // Playbin3 can send more than one selected stream of the same type
1426             // but there's no priority or ordering system in place, so we assume
1427             // the selected stream is the last one as reported by playbin3.
1428             if (type & GST_STREAM_TYPE_AUDIO) {
1429                 m_currentAudioStreamId = streamId;
1430                 auto track = m_audioTracks.get(m_currentAudioStreamId);
1431                 ASSERT(track);
1432                 track->markAsActive();
1433             } else if (type & GST_STREAM_TYPE_VIDEO) {
1434                 m_currentVideoStreamId = streamId;
1435                 auto track = m_videoTracks.get(m_currentVideoStreamId);
1436                 ASSERT(track);
1437                 track->markAsActive();
1438             } else if (type & GST_STREAM_TYPE_TEXT)
1439                 m_currentTextStreamId = streamId;
1440             else
1441                 GST_WARNING("Unknown stream type with stream-id %s", streamId.utf8().data());
1442         }
1443         break;
1444     }
1445 #endif
1446     default:
1447         GST_DEBUG_OBJECT(pipeline(), "Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
1448         break;
1449     }
1450 }
1451
1452 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1453 {
1454     m_buffering = true;
1455     gst_message_parse_buffering(message, &m_bufferingPercentage);
1456
1457     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1458
1459     if (m_bufferingPercentage == 100)
1460         updateStates();
1461 }
1462
1463 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1464 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1465 {
1466     ASSERT(section);
1467
1468     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1469         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1470         m_metadataTracks.clear();
1471         for (guint i = 0; i < pmt->streams->len; ++i) {
1472             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1473             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1474                 AtomicString pid = String::number(stream->pid);
1475                 auto track = InbandMetadataTextTrackPrivateGStreamer::create(
1476                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1477
1478                 // 4.7.10.12.2 Sourcing in-band text tracks
1479                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1480                 // type as follows, based on the type of the media resource:
1481                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1482                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1483                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1484                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1485                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1486                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1487                 // expressed in hexadecimal using uppercase ASCII hex digits.
1488                 String inbandMetadataTrackDispatchType;
1489                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1490                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1491                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1492                     for (guint k = 0; k < descriptor->length; ++k)
1493                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1494                 }
1495                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1496
1497                 m_metadataTracks.add(pid, track);
1498                 m_player->addTextTrack(*track);
1499             }
1500         }
1501     } else {
1502         AtomicString pid = String::number(section->pid);
1503         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1504         if (!track)
1505             return;
1506
1507         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1508         gsize size;
1509         const void* bytes = g_bytes_get_data(data.get(), &size);
1510
1511         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1512     }
1513 }
1514 #endif
1515
1516 #if ENABLE(VIDEO_TRACK)
1517 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1518 {
1519     if (m_chaptersTrack)
1520         m_player->removeTextTrack(*m_chaptersTrack);
1521
1522     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1523     m_player->addTextTrack(*m_chaptersTrack);
1524
1525     GRefPtr<GstToc> toc;
1526     gboolean updated;
1527     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1528     ASSERT(toc);
1529
1530     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1531         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1532 }
1533
1534 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1535 {
1536     ASSERT(entry);
1537
1538     auto cue = GenericCueData::create();
1539
1540     gint64 start = -1, stop = -1;
1541     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1542     if (start != -1)
1543         cue->setStartTime(MediaTime(start, GST_SECOND));
1544     if (stop != -1)
1545         cue->setEndTime(MediaTime(stop, GST_SECOND));
1546
1547     GstTagList* tags = gst_toc_entry_get_tags(entry);
1548     if (tags) {
1549         gchar* title = nullptr;
1550         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1551         if (title) {
1552             cue->setContent(title);
1553             g_free(title);
1554         }
1555     }
1556
1557     m_chaptersTrack->addGenericCue(cue);
1558
1559     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1560         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1561 }
1562
1563 void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
1564 {
1565     m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
1566         return !validTrackIds.contains(keyAndValue.key);
1567     });
1568 }
1569
1570 void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
1571 {
1572     m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
1573         return !validTrackIds.contains(keyAndValue.key);
1574     });
1575 }
1576
1577 void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
1578 {
1579     m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
1580         return !validTrackIds.contains(keyAndValue.key);
1581     });
1582 }
1583 #endif
1584
1585 void MediaPlayerPrivateGStreamer::fillTimerFired()
1586 {
1587     GRefPtr<GstQuery> query = adoptGRef(gst_query_new_buffering(GST_FORMAT_PERCENT));
1588     double fillStatus = 100.0;
1589
1590     if (gst_element_query(m_pipeline.get(), query.get())) {
1591         int64_t stop;
1592         GstFormat format;
1593         gst_query_parse_buffering_range(query.get(), &format, nullptr, &stop, nullptr);
1594         ASSERT(format == GST_FORMAT_PERCENT);
1595
1596         if (stop != -1)
1597             fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1598     } else if (m_httpResponseTotalSize) {
1599         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Query failed, falling back to network read position estimation");
1600         fillStatus = 100.0 * (m_networkReadPosition / m_httpResponseTotalSize);
1601     } else {
1602         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Unable to determine on-disk buffering status");
1603         return;
1604     }
1605
1606     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Download buffer filled up to %f%%", fillStatus);
1607
1608     MediaTime mediaDuration = durationMediaTime();
1609
1610     // Update maxTimeLoaded only if the media duration is
1611     // available. Otherwise we can't compute it.
1612     if (mediaDuration) {
1613         if (fillStatus == 100.0)
1614             m_maxTimeLoaded = mediaDuration;
1615         else
1616             m_maxTimeLoaded = MediaTime(fillStatus * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
1617         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
1618     }
1619
1620     m_downloadFinished = fillStatus == 100.0;
1621     if (!m_downloadFinished) {
1622         updateStates();
1623         return;
1624     }
1625
1626     // Media is now fully loaded. It will play even if network
1627     // connection is cut. Buffering is done, remove the fill source
1628     // from the main loop.
1629     m_fillTimer.stop();
1630     updateStates();
1631 }
1632
1633 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
1634 {
1635     GST_TRACE_OBJECT(pipeline(), "errorOccured: %s, isLiveStream: %s", boolForPrinting(m_errorOccured), boolForPrinting(isLiveStream()));
1636     if (m_errorOccured)
1637         return MediaTime::zeroTime();
1638
1639     if (isLiveStream())
1640         return MediaTime::zeroTime();
1641
1642     MediaTime duration = durationMediaTime();
1643     GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
1644     // infinite duration means live stream
1645     if (duration.isPositiveInfinite())
1646         return MediaTime::zeroTime();
1647
1648     return duration;
1649 }
1650
1651 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1652 {
1653     if (m_errorOccured)
1654         return MediaTime::zeroTime();
1655
1656     MediaTime loaded = m_maxTimeLoaded;
1657     if (m_isEndReached)
1658         loaded = durationMediaTime();
1659     GST_LOG("maxTimeLoaded: %s", toString(loaded).utf8().data());
1660     return loaded;
1661 }
1662
1663 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1664 {
1665     if (m_errorOccured || m_loadingStalled)
1666         return false;
1667
1668     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1669         GST_LOG_OBJECT(pipeline(), "Last network read position: %" G_GUINT64_FORMAT ", current: %" G_GUINT64_FORMAT, m_readPositionAtLastDidLoadingProgress, m_networkReadPosition);
1670         bool didLoadingProgress = m_readPositionAtLastDidLoadingProgress != m_networkReadPosition;
1671         m_readPositionAtLastDidLoadingProgress = m_networkReadPosition;
1672         return didLoadingProgress;
1673     }
1674
1675     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1676         return false;
1677
1678     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1679     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1680     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1681     GST_LOG_OBJECT(pipeline(), "didLoadingProgress: %s", boolForPrinting(didLoadingProgress));
1682     return didLoadingProgress;
1683 }
1684
1685 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1686 {
1687     if (m_errorOccured)
1688         return 0;
1689
1690     if (m_totalBytes)
1691         return m_totalBytes;
1692
1693     if (!m_source)
1694         return 0;
1695
1696     if (isLiveStream())
1697         return 0;
1698
1699     GstFormat fmt = GST_FORMAT_BYTES;
1700     gint64 length = 0;
1701     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1702         GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1703         m_totalBytes = static_cast<unsigned long long>(length);
1704         m_isStreaming = !length;
1705         return m_totalBytes;
1706     }
1707
1708     // Fall back to querying the source pads manually.
1709     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1710     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1711     bool done = false;
1712     while (!done) {
1713         GValue item = G_VALUE_INIT;
1714         switch (gst_iterator_next(iter, &item)) {
1715         case GST_ITERATOR_OK: {
1716             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1717             gint64 padLength = 0;
1718             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1719                 length = padLength;
1720             break;
1721         }
1722         case GST_ITERATOR_RESYNC:
1723             gst_iterator_resync(iter);
1724             break;
1725         case GST_ITERATOR_ERROR:
1726             FALLTHROUGH;
1727         case GST_ITERATOR_DONE:
1728             done = true;
1729             break;
1730         }
1731
1732         g_value_unset(&item);
1733     }
1734
1735     gst_iterator_free(iter);
1736
1737     GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1738     m_totalBytes = static_cast<unsigned long long>(length);
1739     m_isStreaming = !length;
1740     return m_totalBytes;
1741 }
1742
1743 void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1744 {
1745     player->sourceSetup(sourceElement);
1746 }
1747
1748 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1749 {
1750     if (g_strcmp0(G_OBJECT_TYPE_NAME(element), "GstDownloadBuffer"))
1751         return;
1752
1753     player->m_downloadBuffer = element;
1754     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1755     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1756
1757     GUniqueOutPtr<char> oldDownloadTemplate;
1758     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1759
1760     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1761     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1762     GST_DEBUG_OBJECT(player->pipeline(), "Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1763
1764     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1765 }
1766
1767 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1768 {
1769     ASSERT(player->m_downloadBuffer);
1770
1771     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1772
1773     GUniqueOutPtr<char> downloadFile;
1774     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1775     player->m_downloadBuffer = nullptr;
1776
1777     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
1778         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1779         return;
1780     }
1781
1782     GST_DEBUG_OBJECT(player->pipeline(), "Unlinked media temporary file %s after creation", downloadFile.get());
1783 }
1784
1785 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1786 {
1787     if (!downloadFileTemplate)
1788         return;
1789
1790     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1791     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1792     String templatePattern = String(templateFile.get()).replace("X", "?");
1793
1794     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
1795         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
1796             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1797             continue;
1798         }
1799
1800         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1801     }
1802 }
1803
1804 void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1805 {
1806     GST_DEBUG_OBJECT(pipeline(), "Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1807
1808     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1809         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1810
1811     m_source = sourceElement;
1812
1813     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1814         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1815         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1816 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
1817     } else if (WEBKIT_IS_MEDIA_STREAM_SRC(sourceElement)) {
1818         auto stream = m_streamPrivate.get();
1819         ASSERT(stream);
1820         webkitMediaStreamSrcSetStream(WEBKIT_MEDIA_STREAM_SRC(sourceElement), stream);
1821 #endif
1822     }
1823 }
1824
1825 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1826 {
1827     if (!m_source)
1828         return false;
1829
1830     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1831         return true;
1832
1833     GUniqueOutPtr<char> originalURI, resolvedURI;
1834     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1835     if (!originalURI || !resolvedURI)
1836         return false;
1837     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1838         return true;
1839
1840     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1841     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1842     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1843 }
1844
1845 void MediaPlayerPrivateGStreamer::cancelLoad()
1846 {
1847     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1848         return;
1849
1850     if (m_pipeline)
1851         changePipelineState(GST_STATE_READY);
1852 }
1853
1854 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1855 {
1856     if (!m_pipeline || m_errorOccured)
1857         return;
1858
1859     if (m_seeking) {
1860         if (m_seekIsPending)
1861             updateStates();
1862         else {
1863             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data());
1864             m_seeking = false;
1865             m_cachedPosition = MediaTime::invalidTime();
1866             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
1867                 seek(m_timeOfOverlappingSeek);
1868                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
1869                 return;
1870             }
1871             m_timeOfOverlappingSeek = MediaTime::invalidTime();
1872
1873             // The pipeline can still have a pending state. In this case a position query will fail.
1874             // Right now we can use m_seekTime as a fallback.
1875             m_canFallBackToLastFinishedSeekPosition = true;
1876             timeChanged();
1877         }
1878     } else
1879         updateStates();
1880 }
1881
1882 void MediaPlayerPrivateGStreamer::updateStates()
1883 {
1884     if (!m_pipeline)
1885         return;
1886
1887     if (m_errorOccured)
1888         return;
1889
1890     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1891     MediaPlayer::ReadyState oldReadyState = m_readyState;
1892     GstState pending;
1893     GstState state;
1894     bool stateReallyChanged = false;
1895
1896     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1897     if (state != m_currentState) {
1898         m_oldState = m_currentState;
1899         m_currentState = state;
1900         stateReallyChanged = true;
1901     }
1902
1903     bool shouldUpdatePlaybackState = false;
1904     switch (getStateResult) {
1905     case GST_STATE_CHANGE_SUCCESS: {
1906         GST_DEBUG_OBJECT(pipeline(), "State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1907
1908         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1909         // on HTMLMediaElement and properly generate the video 'ended' event.
1910         if (m_isEndReached && m_currentState == GST_STATE_READY)
1911             break;
1912
1913         m_resetPipeline = m_currentState <= GST_STATE_READY;
1914
1915         bool didBuffering = m_buffering;
1916
1917         // Update ready and network states.
1918         switch (m_currentState) {
1919         case GST_STATE_NULL:
1920             m_readyState = MediaPlayer::HaveNothing;
1921             m_networkState = MediaPlayer::Empty;
1922             break;
1923         case GST_STATE_READY:
1924             m_readyState = MediaPlayer::HaveMetadata;
1925             m_networkState = MediaPlayer::Empty;
1926             break;
1927         case GST_STATE_PAUSED:
1928         case GST_STATE_PLAYING:
1929             if (m_buffering) {
1930                 if (m_bufferingPercentage == 100) {
1931                     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Complete.");
1932                     m_buffering = false;
1933                     m_readyState = MediaPlayer::HaveEnoughData;
1934                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1935                 } else {
1936                     m_readyState = MediaPlayer::HaveCurrentData;
1937                     m_networkState = MediaPlayer::Loading;
1938                 }
1939             } else if (m_downloadFinished) {
1940                 m_readyState = MediaPlayer::HaveEnoughData;
1941                 m_networkState = MediaPlayer::Loaded;
1942             } else {
1943                 m_readyState = MediaPlayer::HaveFutureData;
1944                 m_networkState = MediaPlayer::Loading;
1945             }
1946
1947             break;
1948         default:
1949             ASSERT_NOT_REACHED();
1950             break;
1951         }
1952
1953         // Sync states where needed.
1954         if (m_currentState == GST_STATE_PAUSED) {
1955             if (!m_volumeAndMuteInitialized) {
1956                 notifyPlayerOfVolumeChange();
1957                 notifyPlayerOfMute();
1958                 m_volumeAndMuteInitialized = true;
1959             }
1960
1961             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1962                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Restarting playback.");
1963                 changePipelineState(GST_STATE_PLAYING);
1964             }
1965         } else if (m_currentState == GST_STATE_PLAYING) {
1966             m_paused = false;
1967
1968             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1969                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Pausing stream for buffering.");
1970                 changePipelineState(GST_STATE_PAUSED);
1971             }
1972         } else
1973             m_paused = true;
1974
1975         GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
1976         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
1977             shouldUpdatePlaybackState = true;
1978             GST_INFO_OBJECT(pipeline(), "Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
1979         }
1980
1981         // Emit play state change notification only when going to PLAYING so that
1982         // the media element gets a chance to enable its page sleep disabler.
1983         // Emitting this notification in more cases triggers unwanted code paths
1984         // and test timeouts.
1985         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
1986             GST_INFO_OBJECT(pipeline(), "Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
1987             shouldUpdatePlaybackState = true;
1988         }
1989
1990         break;
1991     }
1992     case GST_STATE_CHANGE_ASYNC:
1993         GST_DEBUG_OBJECT(pipeline(), "Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1994         // Change in progress.
1995         break;
1996     case GST_STATE_CHANGE_FAILURE:
1997         GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1998         // Change failed
1999         return;
2000     case GST_STATE_CHANGE_NO_PREROLL:
2001         GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2002
2003         // Live pipelines go in PAUSED without prerolling.
2004         m_isStreaming = true;
2005         setDownloadBuffering();
2006
2007         if (m_currentState == GST_STATE_READY)
2008             m_readyState = MediaPlayer::HaveNothing;
2009         else if (m_currentState == GST_STATE_PAUSED) {
2010             m_readyState = MediaPlayer::HaveEnoughData;
2011             m_paused = true;
2012         } else if (m_currentState == GST_STATE_PLAYING)
2013             m_paused = false;
2014
2015         if (!m_paused && m_playbackRate)
2016             changePipelineState(GST_STATE_PLAYING);
2017
2018         m_networkState = MediaPlayer::Loading;
2019         break;
2020     default:
2021         GST_DEBUG_OBJECT(pipeline(), "Else : %d", getStateResult);
2022         break;
2023     }
2024
2025     m_requestedState = GST_STATE_VOID_PENDING;
2026
2027     if (shouldUpdatePlaybackState)
2028         m_player->playbackStateChanged();
2029
2030     if (m_networkState != oldNetworkState) {
2031         GST_DEBUG_OBJECT(pipeline(), "Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
2032         m_player->networkStateChanged();
2033     }
2034     if (m_readyState != oldReadyState) {
2035         GST_DEBUG_OBJECT(pipeline(), "Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
2036         m_player->readyStateChanged();
2037     }
2038
2039     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
2040         updatePlaybackRate();
2041         if (m_seekIsPending) {
2042             GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
2043             m_seekIsPending = false;
2044             m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
2045             if (!m_seeking) {
2046                 m_cachedPosition = MediaTime::invalidTime();
2047                 GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
2048             }
2049         }
2050     }
2051 }
2052
2053 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
2054 {
2055 #if GST_CHECK_VERSION(1, 10, 0)
2056     if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_STREAM_COLLECTION && !m_isLegacyPlaybin) {
2057         GRefPtr<GstStreamCollection> collection;
2058         gst_message_parse_stream_collection(message, &collection.outPtr());
2059
2060         if (collection) {
2061             m_streamCollection.swap(collection);
2062             m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
2063                 this->updateTracks();
2064             });
2065         }
2066     }
2067 #endif
2068
2069     return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
2070 }
2071
2072 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
2073 {
2074     if (m_mediaLocations)
2075         gst_structure_free(m_mediaLocations);
2076
2077     const GstStructure* structure = gst_message_get_structure(message);
2078     if (structure) {
2079         // This structure can contain:
2080         // - both a new-location string and embedded locations structure
2081         // - or only a new-location string.
2082         m_mediaLocations = gst_structure_copy(structure);
2083         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2084
2085         if (locations)
2086             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
2087
2088         loadNextLocation();
2089     }
2090 }
2091
2092 bool MediaPlayerPrivateGStreamer::loadNextLocation()
2093 {
2094     if (!m_mediaLocations)
2095         return false;
2096
2097     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2098     const gchar* newLocation = nullptr;
2099
2100     if (!locations) {
2101         // Fallback on new-location string.
2102         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
2103         if (!newLocation)
2104             return false;
2105     }
2106
2107     if (!newLocation) {
2108         if (m_mediaLocationCurrentIndex < 0) {
2109             m_mediaLocations = nullptr;
2110             return false;
2111         }
2112
2113         const GValue* location = gst_value_list_get_value(locations, m_mediaLocationCurrentIndex);
2114         const GstStructure* structure = gst_value_get_structure(location);
2115
2116         if (!structure) {
2117             m_mediaLocationCurrentIndex--;
2118             return false;
2119         }
2120
2121         newLocation = gst_structure_get_string(structure, "new-location");
2122     }
2123
2124     if (newLocation) {
2125         // Found a candidate. new-location is not always an absolute url
2126         // though. We need to take the base of the current url and
2127         // append the value of new-location to it.
2128         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2129         URL newUrl = URL(baseUrl, newLocation);
2130
2131         auto securityOrigin = SecurityOrigin::create(m_url);
2132         if (securityOrigin->canRequest(newUrl)) {
2133             GST_INFO_OBJECT(pipeline(), "New media url: %s", newUrl.string().utf8().data());
2134
2135             // Reset player states.
2136             m_networkState = MediaPlayer::Loading;
2137             m_player->networkStateChanged();
2138             m_readyState = MediaPlayer::HaveNothing;
2139             m_player->readyStateChanged();
2140
2141             // Reset pipeline state.
2142             m_resetPipeline = true;
2143             changePipelineState(GST_STATE_READY);
2144
2145             GstState state;
2146             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2147             if (state <= GST_STATE_READY) {
2148                 // Set the new uri and start playing.
2149                 setPlaybinURL(newUrl);
2150                 changePipelineState(GST_STATE_PLAYING);
2151                 return true;
2152             }
2153         } else
2154             GST_INFO_OBJECT(pipeline(), "Not allowed to load new media location: %s", newUrl.string().utf8().data());
2155     }
2156     m_mediaLocationCurrentIndex--;
2157     return false;
2158 }
2159
2160 void MediaPlayerPrivateGStreamer::loadStateChanged()
2161 {
2162     updateStates();
2163 }
2164
2165 void MediaPlayerPrivateGStreamer::timeChanged()
2166 {
2167     updateStates();
2168     m_player->timeChanged();
2169 }
2170
2171 void MediaPlayerPrivateGStreamer::didEnd()
2172 {
2173     GST_INFO_OBJECT(pipeline(), "Playback ended");
2174
2175     // Synchronize position and duration values to not confuse the
2176     // HTMLMediaElement. In some cases like reverse playback the
2177     // position is not always reported as 0 for instance.
2178     m_cachedPosition = MediaTime::invalidTime();
2179     MediaTime now = currentMediaTime();
2180     if (now > MediaTime::zeroTime() && !m_seeking) {
2181         m_cachedDuration = now;
2182         m_player->durationChanged();
2183     }
2184
2185     m_isEndReached = true;
2186
2187     if (!m_player->client().mediaPlayerIsLooping()) {
2188         m_paused = true;
2189         changePipelineState(GST_STATE_READY);
2190         m_downloadFinished = false;
2191     }
2192     timeChanged();
2193 }
2194
2195 void MediaPlayerPrivateGStreamer::durationChanged()
2196 {
2197     MediaTime previousDuration = durationMediaTime();
2198     m_cachedDuration = MediaTime::invalidTime();
2199
2200     // Avoid emiting durationchanged in the case where the previous
2201     // duration was 0 because that case is already handled by the
2202     // HTMLMediaElement.
2203     if (previousDuration && durationMediaTime() != previousDuration)
2204         m_player->durationChanged();
2205 }
2206
2207 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
2208 {
2209     GST_WARNING("Loading failed, error: %d", error);
2210
2211     m_errorOccured = true;
2212     if (m_networkState != error) {
2213         m_networkState = error;
2214         m_player->networkStateChanged();
2215     }
2216     if (m_readyState != MediaPlayer::HaveNothing) {
2217         m_readyState = MediaPlayer::HaveNothing;
2218         m_player->readyStateChanged();
2219     }
2220
2221     // Loading failed, remove ready timer.
2222     m_readyTimerHandler.stop();
2223 }
2224
2225 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2226 {
2227     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2228     types = gstRegistryScanner.mimeTypeSet();
2229 }
2230
2231 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2232 {
2233     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
2234 #if ENABLE(MEDIA_SOURCE)
2235     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2236     if (parameters.isMediaSource)
2237         return result;
2238 #endif
2239
2240 #if !ENABLE(MEDIA_STREAM) || !GST_CHECK_VERSION(1, 10, 0)
2241     if (parameters.isMediaStream)
2242         return result;
2243 #endif
2244
2245     if (parameters.type.isEmpty())
2246         return result;
2247
2248     GST_DEBUG("Checking mime-type \"%s\"", parameters.type.raw().utf8().data());
2249     auto containerType = parameters.type.containerType();
2250     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2251     if (gstRegistryScanner.isContainerTypeSupported(containerType)) {
2252         // Spec says we should not return "probably" if the codecs string is empty.
2253         Vector<String> codecs = parameters.type.codecs();
2254         result = codecs.isEmpty() ? MediaPlayer::MayBeSupported : (gstRegistryScanner.areAllCodecsSupported(codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported);
2255     }
2256
2257     auto finalResult = extendedSupportsType(parameters, result);
2258     GST_DEBUG("Supported: %s", convertEnumerationToString(finalResult).utf8().data());
2259     return finalResult;
2260 }
2261
2262 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
2263 {
2264     if (!m_pipeline)
2265         return;
2266
2267     unsigned flags;
2268     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2269
2270     unsigned flagDownload = getGstPlayFlag("download");
2271
2272     // We don't want to stop downloading if we already started it.
2273     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline) {
2274         GST_DEBUG_OBJECT(pipeline(), "Download already started, not starting again");
2275         return;
2276     }
2277
2278     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
2279     if (shouldDownload) {
2280         GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering");
2281         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2282         m_fillTimer.startRepeating(200_ms);
2283     } else {
2284         GST_INFO_OBJECT(pipeline(), "Disabling on-disk buffering");
2285         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2286         m_fillTimer.stop();
2287     }
2288 }
2289
2290 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
2291 {
2292     GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data());
2293     if (preload == MediaPlayer::Auto && isLiveStream())
2294         return;
2295
2296     m_preload = preload;
2297     setDownloadBuffering();
2298
2299     if (m_delayingLoad && m_preload != MediaPlayer::None) {
2300         m_delayingLoad = false;
2301         commitLoad();
2302     }
2303 }
2304
2305 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
2306 {
2307     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
2308     if (!m_autoAudioSink) {
2309         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
2310         return nullptr;
2311     }
2312
2313     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
2314
2315 #if ENABLE(WEB_AUDIO)
2316     GstElement* audioSinkBin = gst_bin_new("audio-sink");
2317     ensureAudioSourceProvider();
2318     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2319     return audioSinkBin;
2320 #else
2321     return m_autoAudioSink.get();
2322 #endif
2323 }
2324
2325 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2326 {
2327     GstElement* sink;
2328     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2329     return sink;
2330 }
2331
2332 #if ENABLE(WEB_AUDIO)
2333 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2334 {
2335     if (!m_audioSourceProvider)
2336         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2337 }
2338
2339 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2340 {
2341     ensureAudioSourceProvider();
2342     return m_audioSourceProvider.get();
2343 }
2344 #endif
2345
2346 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const gchar* playbinName, const String& pipelineName)
2347 {
2348     if (m_pipeline) {
2349         if (!playbinName) {
2350             GST_INFO_OBJECT(pipeline(), "Keeping same playbin as nothing forced");
2351             return;
2352         }
2353
2354         if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2355             GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2356             return;
2357         }
2358
2359         GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.",
2360             playbinName);
2361         changePipelineState(GST_STATE_NULL);
2362         m_pipeline = nullptr;
2363     }
2364
2365     ASSERT(!m_pipeline);
2366
2367 #if GST_CHECK_VERSION(1, 10, 0)
2368     if (g_getenv("USE_PLAYBIN3"))
2369         playbinName = "playbin3";
2370 #else
2371     playbinName = "playbin";
2372 #endif
2373
2374     if (!playbinName)
2375         playbinName = "playbin";
2376
2377     m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
2378
2379     // gst_element_factory_make() returns a floating reference so
2380     // we should not adopt.
2381     setPipeline(gst_element_factory_make(playbinName,
2382         (pipelineName.isEmpty() ? makeString("play_0x", hex(reinterpret_cast<uintptr_t>(this), Lowercase)) : pipelineName).utf8().data()));
2383     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2384
2385     GST_INFO_OBJECT(pipeline(), "Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2386
2387     // Let also other listeners subscribe to (application) messages in this bus.
2388     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2389     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2390     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2391
2392     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2393
2394     g_signal_connect(GST_BIN_CAST(m_pipeline.get()), "deep-element-added", G_CALLBACK(+[](GstBin*, GstBin* subBin, GstElement* element, MediaPlayerPrivateGStreamer* player) {
2395         GUniquePtr<char> binName(gst_element_get_name(GST_ELEMENT_CAST(subBin)));
2396         if (!g_str_has_prefix(binName.get(), "decodebin"))
2397             return;
2398
2399         GUniquePtr<char> elementName(gst_element_get_name(element));
2400         player->m_isVideoDecoderVideo4Linux = g_str_has_prefix(elementName.get(), "v4l2");
2401     }), this);
2402
2403     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2404     if (m_isLegacyPlaybin) {
2405         g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2406         g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2407     }
2408
2409 #if ENABLE(VIDEO_TRACK)
2410     if (m_isLegacyPlaybin)
2411         g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2412
2413     GstElement* textCombiner = webkitTextCombinerNew();
2414     ASSERT(textCombiner);
2415     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2416
2417     m_textAppSink = webkitTextSinkNew();
2418     ASSERT(m_textAppSink);
2419
2420     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2421     ASSERT(m_textAppSinkPad);
2422
2423     GRefPtr<GstCaps> textCaps;
2424     if (webkitGstCheckVersion(1, 14, 0))
2425         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
2426     else
2427         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
2428     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
2429     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2430
2431     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2432 #endif
2433
2434     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2435
2436     configurePlaySink();
2437
2438     if (m_preservesPitch) {
2439         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2440
2441         if (!scale)
2442             GST_WARNING("Failed to create scaletempo");
2443         else
2444             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2445     }
2446
2447     if (!m_renderingCanBeAccelerated) {
2448         // If not using accelerated compositing, let GStreamer handle
2449         // the image-orientation tag.
2450         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2451         if (videoFlip) {
2452             g_object_set(videoFlip, "method", 8, nullptr);
2453             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2454         } else
2455             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
2456     }
2457
2458     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2459     if (videoSinkPad)
2460         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2461 }
2462
2463 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2464 {
2465     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2466     gst_element_post_message(m_pipeline.get(), message);
2467 }
2468
2469 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2470 {
2471     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2472         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2473     return false;
2474 }
2475
2476 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2477 {
2478     if (isLiveStream())
2479         return false;
2480
2481     if (m_url.isLocalFile())
2482         return true;
2483
2484     if (m_url.protocolIsInHTTPFamily())
2485         return true;
2486
2487     return false;
2488 }
2489
2490 Optional<bool> MediaPlayerPrivateGStreamer::wouldTaintOrigin(const SecurityOrigin& origin) const
2491 {
2492     GST_TRACE_OBJECT(pipeline(), "Checking %u origins", m_origins.size());
2493     for (auto& responseOrigin : m_origins) {
2494         if (!origin.canAccess(*responseOrigin)) {
2495             GST_DEBUG_OBJECT(pipeline(), "Found reachable response origin");
2496             return true;
2497         }
2498     }
2499     GST_DEBUG_OBJECT(pipeline(), "No valid response origin found");
2500     return false;
2501 }
2502
2503 }
2504
2505 #endif // USE(GSTREAMER)