Move URL from WebCore to WTF
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "FileSystem.h"
32 #include "GStreamerCommon.h"
33 #include "HTTPHeaderNames.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <glib.h>
42 #include <gst/gst.h>
43 #include <gst/pbutils/missing-plugins.h>
44 #include <limits>
45 #include <wtf/HexNumber.h>
46 #include <wtf/MediaTime.h>
47 #include <wtf/NeverDestroyed.h>
48 #include <wtf/StringPrintStream.h>
49 #include <wtf/URL.h>
50 #include <wtf/WallTime.h>
51 #include <wtf/glib/GUniquePtr.h>
52 #include <wtf/glib/RunLoopSourcePriority.h>
53 #include <wtf/text/CString.h>
54
55 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
56 #include "GStreamerMediaStreamSource.h"
57 #endif
58
59 #if ENABLE(VIDEO_TRACK)
60 #include "AudioTrackPrivateGStreamer.h"
61 #include "InbandMetadataTextTrackPrivateGStreamer.h"
62 #include "InbandTextTrackPrivateGStreamer.h"
63 #include "TextCombinerGStreamer.h"
64 #include "TextSinkGStreamer.h"
65 #include "VideoTrackPrivateGStreamer.h"
66 #endif
67
68 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
69 #define GST_USE_UNSTABLE_API
70 #include <gst/mpegts/mpegts.h>
71 #undef GST_USE_UNSTABLE_API
72 #endif
73 #include <gst/audio/streamvolume.h>
74
75 #if ENABLE(MEDIA_SOURCE)
76 #include "MediaSource.h"
77 #include "WebKitMediaSourceGStreamer.h"
78 #endif
79
80 #if ENABLE(WEB_AUDIO)
81 #include "AudioSourceProviderGStreamer.h"
82 #endif
83
84 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
85 #define GST_CAT_DEFAULT webkit_media_player_debug
86
87
88 namespace WebCore {
89 using namespace std;
90
91 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
92 {
93     player->handleMessage(message);
94 }
95
96 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
97 {
98     player->setAudioStreamProperties(object);
99 }
100
101 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
102 {
103     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
104         return;
105
106     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
107     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
108     g_object_set(object, "stream-properties", structure, nullptr);
109     gst_structure_free(structure);
110     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
111     GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get());
112 }
113
114 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
115 {
116     MediaPlayerPrivateGStreamerBase::initializeDebugCategory();
117     if (isAvailable()) {
118         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
119             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
120     }
121 }
122
123 bool MediaPlayerPrivateGStreamer::isAvailable()
124 {
125     if (!initializeGStreamerAndRegisterWebKitElements())
126         return false;
127
128     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
129     return factory;
130 }
131
132 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
133     : MediaPlayerPrivateGStreamerBase(player)
134     , m_buffering(false)
135     , m_bufferingPercentage(0)
136     , m_cachedPosition(MediaTime::invalidTime())
137     , m_canFallBackToLastFinishedSeekPosition(false)
138     , m_changingRate(false)
139     , m_downloadFinished(false)
140     , m_errorOccured(false)
141     , m_isEndReached(false)
142     , m_isStreaming(false)
143     , m_durationAtEOS(MediaTime::invalidTime())
144     , m_paused(true)
145     , m_playbackRate(1)
146     , m_requestedState(GST_STATE_VOID_PENDING)
147     , m_resetPipeline(false)
148     , m_seeking(false)
149     , m_seekIsPending(false)
150     , m_seekTime(MediaTime::invalidTime())
151     , m_source(nullptr)
152     , m_volumeAndMuteInitialized(false)
153     , m_mediaLocations(nullptr)
154     , m_mediaLocationCurrentIndex(0)
155     , m_playbackRatePause(false)
156     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
157     , m_lastPlaybackRate(1)
158     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
159     , m_maxTimeLoaded(MediaTime::zeroTime())
160     , m_preload(player->preload())
161     , m_delayingLoad(false)
162     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
163     , m_hasVideo(false)
164     , m_hasAudio(false)
165     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
166     , m_totalBytes(0)
167     , m_preservesPitch(false)
168 {
169 #if USE(GLIB)
170     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
171 #endif
172 }
173
174 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
175 {
176     GST_DEBUG_OBJECT(pipeline(), "Disposing player");
177
178 #if ENABLE(VIDEO_TRACK)
179     for (auto& track : m_audioTracks.values())
180         track->disconnect();
181
182     for (auto& track : m_textTracks.values())
183         track->disconnect();
184
185     for (auto& track : m_videoTracks.values())
186         track->disconnect();
187 #endif
188     if (m_fillTimer.isActive())
189         m_fillTimer.stop();
190
191     if (m_mediaLocations) {
192         gst_structure_free(m_mediaLocations);
193         m_mediaLocations = nullptr;
194     }
195
196     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
197         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
198
199     if (m_autoAudioSink) {
200         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
201             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
202     }
203
204     m_readyTimerHandler.stop();
205     for (auto& missingPluginCallback : m_missingPluginCallbacks) {
206         if (missingPluginCallback)
207             missingPluginCallback->invalidate();
208     }
209     m_missingPluginCallbacks.clear();
210
211     if (m_videoSink) {
212         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
213         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
214     }
215
216     if (m_pipeline) {
217         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
218         ASSERT(bus);
219         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
220         gst_bus_remove_signal_watch(bus.get());
221         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
222         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
223     }
224 }
225
226 static void convertToInternalProtocol(URL& url)
227 {
228     if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
229         url.setProtocol("webkit+" + url.protocol());
230 }
231
232 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
233 {
234     // Clean out everything after file:// url path.
235     String cleanURLString(url.string());
236     if (url.isLocalFile())
237         cleanURLString = cleanURLString.substring(0, url.pathEnd());
238
239     m_url = URL(URL(), cleanURLString);
240     convertToInternalProtocol(m_url);
241
242     GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data());
243     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
244 }
245
246 void MediaPlayerPrivateGStreamer::load(const String& urlString)
247 {
248     loadFull(urlString, nullptr, String());
249 }
250
251 static void setSyncOnClock(GstElement *element, bool sync)
252 {
253     if (!GST_IS_BIN(element)) {
254         g_object_set(element, "sync", sync, NULL);
255         return;
256     }
257
258     GstIterator* it = gst_bin_iterate_sinks(GST_BIN(element));
259     while (gst_iterator_foreach(it, (GstIteratorForeachFunction)([](const GValue* item, void* syncPtr) {
260         bool* sync = static_cast<bool*>(syncPtr);
261         setSyncOnClock(GST_ELEMENT(g_value_get_object(item)), *sync);
262     }), &sync) == GST_ITERATOR_RESYNC)
263         gst_iterator_resync(it);
264     gst_iterator_free(it);
265 }
266
267 void MediaPlayerPrivateGStreamer::syncOnClock(bool sync)
268 {
269     setSyncOnClock(videoSink(), sync);
270     setSyncOnClock(audioSink(), sync);
271 }
272
273 void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const gchar* playbinName,
274     const String& pipelineName)
275 {
276     // FIXME: This method is still called even if supportsType() returned
277     // IsNotSupported. This would deserve more investigation but meanwhile make
278     // sure we don't ever try to play animated gif assets.
279     if (m_player->contentMIMEType() == "image/gif") {
280         loadingFailed(MediaPlayer::FormatError);
281         return;
282     }
283
284     URL url(URL(), urlString);
285     if (url.protocolIsAbout())
286         return;
287
288     if (!m_pipeline)
289         createGSTPlayBin(isMediaSource() ? "playbin" : playbinName, pipelineName);
290     syncOnClock(true);
291     if (m_fillTimer.isActive())
292         m_fillTimer.stop();
293
294     ASSERT(m_pipeline);
295
296     setPlaybinURL(url);
297
298     GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data());
299     if (m_preload == MediaPlayer::None) {
300         GST_INFO_OBJECT(pipeline(), "Delaying load.");
301         m_delayingLoad = true;
302     }
303
304     // Reset network and ready states. Those will be set properly once
305     // the pipeline pre-rolled.
306     m_networkState = MediaPlayer::Loading;
307     m_player->networkStateChanged();
308     m_readyState = MediaPlayer::HaveNothing;
309     m_player->readyStateChanged();
310     m_volumeAndMuteInitialized = false;
311     m_durationAtEOS = MediaTime::invalidTime();
312     m_hasTaintedOrigin = std::nullopt;
313
314     if (!m_delayingLoad)
315         commitLoad();
316 }
317
318 #if ENABLE(MEDIA_SOURCE)
319 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
320 {
321     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
322     m_networkState = MediaPlayer::FormatError;
323     m_player->networkStateChanged();
324 }
325 #endif
326
327 #if ENABLE(MEDIA_STREAM)
328 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream)
329 {
330 #if GST_CHECK_VERSION(1, 10, 0)
331     m_streamPrivate = &stream;
332     auto pipelineName = String::format("mediastream_%s_%p",
333         (stream.hasCaptureVideoSource() || stream.hasCaptureAudioSource()) ? "Local" : "Remote", this);
334
335     loadFull(String("mediastream://") + stream.id(), "playbin3", pipelineName);
336     syncOnClock(false);
337
338 #if USE(GSTREAMER_GL)
339     ensureGLVideoSinkContext();
340 #endif
341     m_player->play();
342 #else
343     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
344     m_networkState = MediaPlayer::FormatError;
345     m_player->networkStateChanged();
346     notImplemented();
347 #endif
348 }
349 #endif
350
351 void MediaPlayerPrivateGStreamer::commitLoad()
352 {
353     ASSERT(!m_delayingLoad);
354     GST_DEBUG_OBJECT(pipeline(), "Committing load.");
355
356     // GStreamer needs to have the pipeline set to a paused state to
357     // start providing anything useful.
358     changePipelineState(GST_STATE_PAUSED);
359
360     setDownloadBuffering();
361     updateStates();
362 }
363
364 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
365 {
366     if (m_isEndReached && m_seeking)
367         return m_seekTime;
368
369     // This constant should remain lower than HTMLMediaElement's maxTimeupdateEventFrequency.
370     static const Seconds positionCacheThreshold = 200_ms;
371     Seconds now = WTF::WallTime::now().secondsSinceEpoch();
372     if (m_lastQueryTime && (now - m_lastQueryTime.value()) < positionCacheThreshold && m_cachedPosition.isValid())
373         return m_cachedPosition;
374
375     m_lastQueryTime = now;
376
377     // Position is only available if no async state change is going on and the state is either paused or playing.
378     gint64 position = GST_CLOCK_TIME_NONE;
379     GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
380     if (gst_element_query(m_pipeline.get(), query))
381         gst_query_parse_position(query, 0, &position);
382     gst_query_unref(query);
383
384     GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
385
386     MediaTime playbackPosition = MediaTime::zeroTime();
387     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
388     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
389         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
390     else if (m_canFallBackToLastFinishedSeekPosition)
391         playbackPosition = m_seekTime;
392
393     m_cachedPosition = playbackPosition;
394     return playbackPosition;
395 }
396
397 void MediaPlayerPrivateGStreamer::readyTimerFired()
398 {
399     GST_DEBUG_OBJECT(pipeline(), "In READY for too long. Releasing pipeline resources.");
400     changePipelineState(GST_STATE_NULL);
401 }
402
403 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
404 {
405     ASSERT(m_pipeline);
406
407     GstState currentState;
408     GstState pending;
409
410     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
411     if (currentState == newState || pending == newState) {
412         GST_DEBUG_OBJECT(pipeline(), "Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
413             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
414         return true;
415     }
416
417     GST_DEBUG_OBJECT(pipeline(), "Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
418         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
419
420 #if USE(GSTREAMER_GL)
421     if (currentState == GST_STATE_READY && newState == GST_STATE_PAUSED)
422         ensureGLVideoSinkContext();
423 #endif
424
425     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
426     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
427     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
428         return false;
429
430     // Create a timer when entering the READY state so that we can free resources
431     // if we stay for too long on READY.
432     // Also lets remove the timer if we request a state change for any state other than READY.
433     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
434     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
435         // Max interval in seconds to stay in the READY state on manual
436         // state change requests.
437         static const Seconds readyStateTimerDelay { 1_min };
438         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
439     } else if (newState != GST_STATE_READY)
440         m_readyTimerHandler.stop();
441
442     return true;
443 }
444
445 void MediaPlayerPrivateGStreamer::prepareToPlay()
446 {
447     GST_DEBUG_OBJECT(pipeline(), "Prepare to play");
448     m_preload = MediaPlayer::Auto;
449     if (m_delayingLoad) {
450         m_delayingLoad = false;
451         commitLoad();
452     }
453 }
454
455 void MediaPlayerPrivateGStreamer::play()
456 {
457     if (!m_playbackRate) {
458         m_playbackRatePause = true;
459         return;
460     }
461
462     if (changePipelineState(GST_STATE_PLAYING)) {
463         m_isEndReached = false;
464         m_delayingLoad = false;
465         m_preload = MediaPlayer::Auto;
466         setDownloadBuffering();
467         GST_INFO_OBJECT(pipeline(), "Play");
468     } else
469         loadingFailed(MediaPlayer::Empty);
470 }
471
472 void MediaPlayerPrivateGStreamer::pause()
473 {
474     m_playbackRatePause = false;
475     GstState currentState, pendingState;
476     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
477     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
478         return;
479
480     if (changePipelineState(GST_STATE_PAUSED))
481         GST_INFO_OBJECT(pipeline(), "Pause");
482     else
483         loadingFailed(MediaPlayer::Empty);
484 }
485
486 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
487 {
488     if (!m_pipeline || m_errorOccured)
489         return MediaTime::invalidTime();
490
491     if (m_durationAtEOS.isValid())
492         return m_durationAtEOS;
493
494     // The duration query would fail on a not-prerolled pipeline.
495     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
496         return MediaTime::positiveInfiniteTime();
497
498     gint64 timeLength = 0;
499
500     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &timeLength) || !GST_CLOCK_TIME_IS_VALID(timeLength)) {
501         GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
502         return MediaTime::positiveInfiniteTime();
503     }
504
505     GST_LOG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
506
507     return MediaTime(timeLength, GST_SECOND);
508     // FIXME: handle 3.14.9.5 properly
509 }
510
511 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
512 {
513     if (!m_pipeline || m_errorOccured)
514         return MediaTime::invalidTime();
515
516     if (m_seeking)
517         return m_seekTime;
518
519     return playbackPosition();
520 }
521
522 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
523 {
524     if (!m_pipeline)
525         return;
526
527     if (m_errorOccured)
528         return;
529
530     GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
531
532     // Avoid useless seeking.
533     if (mediaTime == currentMediaTime())
534         return;
535
536     MediaTime time = std::min(mediaTime, durationMediaTime());
537
538     if (isLiveStream())
539         return;
540
541     GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data());
542
543     if (m_seeking) {
544         m_timeOfOverlappingSeek = time;
545         if (m_seekIsPending) {
546             m_seekTime = time;
547             return;
548         }
549     }
550
551     GstState state;
552     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
553     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
554         GST_DEBUG_OBJECT(pipeline(), "[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
555         return;
556     }
557     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
558         m_seekIsPending = true;
559         if (m_isEndReached) {
560             GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline");
561             m_resetPipeline = true;
562             if (!changePipelineState(GST_STATE_PAUSED))
563                 loadingFailed(MediaPlayer::Empty);
564         }
565     } else {
566         // We can seek now.
567         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
568             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(time).utf8().data());
569             return;
570         }
571     }
572
573     m_seeking = true;
574     m_seekTime = time;
575     m_isEndReached = false;
576 }
577
578 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
579 {
580     // Default values for rate >= 0.
581     MediaTime startTime = position, endTime = MediaTime::invalidTime();
582
583     // TODO: Should do more than that, need to notify the media source
584     // and probably flush the pipeline at least.
585     if (isMediaSource())
586         return true;
587
588     if (rate < 0) {
589         startTime = MediaTime::zeroTime();
590         // If we are at beginning of media, start from the end to
591         // avoid immediate EOS.
592         if (position < MediaTime::zeroTime())
593             endTime = durationMediaTime();
594         else
595             endTime = position;
596     }
597
598     if (!rate)
599         rate = 1.0;
600
601     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
602         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
603 }
604
605 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
606 {
607     if (!m_changingRate)
608         return;
609
610     GST_INFO_OBJECT(pipeline(), "Set Rate to %f", m_playbackRate);
611
612     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
613     bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
614
615     GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio");
616
617     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
618         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
619         m_lastPlaybackRate = m_playbackRate;
620     } else {
621         m_playbackRate = m_lastPlaybackRate;
622         GST_ERROR("Set rate to %f failed", m_playbackRate);
623     }
624
625     if (m_playbackRatePause) {
626         GstState state;
627         GstState pending;
628
629         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
630         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
631             changePipelineState(GST_STATE_PLAYING);
632         m_playbackRatePause = false;
633     }
634
635     m_changingRate = false;
636     m_player->rateChanged();
637 }
638
639 bool MediaPlayerPrivateGStreamer::paused() const
640 {
641     if (m_isEndReached) {
642         GST_DEBUG_OBJECT(pipeline(), "Ignoring pause at EOS");
643         return true;
644     }
645
646     if (m_playbackRatePause) {
647         GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state");
648         return false;
649     }
650
651     GstState state;
652     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
653     bool paused = state <= GST_STATE_PAUSED;
654     GST_DEBUG_OBJECT(pipeline(), "Paused: %s", toString(paused).utf8().data());
655     return paused;
656 }
657
658 bool MediaPlayerPrivateGStreamer::seeking() const
659 {
660     return m_seeking;
661 }
662
663 #if GST_CHECK_VERSION(1, 10, 0)
664 #define CLEAR_TRACKS(tracks, method) \
665     for (auto& track : tracks.values())\
666         method(*track);\
667     tracks.clear();
668
669 void MediaPlayerPrivateGStreamer::clearTracks()
670 {
671 #if ENABLE(VIDEO_TRACK)
672     CLEAR_TRACKS(m_audioTracks, m_player->removeAudioTrack);
673     CLEAR_TRACKS(m_videoTracks, m_player->removeVideoTrack);
674     CLEAR_TRACKS(m_textTracks, m_player->removeTextTrack);
675 #endif // ENABLE(VIDEO_TRACK)
676 }
677 #undef CLEAR_TRACKS
678
679 #if ENABLE(VIDEO_TRACK)
680 #define CREATE_TRACK(type, Type) \
681     m_has##Type = true; \
682     if (!useMediaSource) {\
683         RefPtr<Type##TrackPrivateGStreamer> track = Type##TrackPrivateGStreamer::create(makeWeakPtr(*this), i, stream); \
684         m_##type##Tracks.add(track->id(), track); \
685         m_player->add##Type##Track(*track);\
686         if (gst_stream_get_stream_flags(stream.get()) & GST_STREAM_FLAG_SELECT) {                                    \
687             m_current##Type##StreamId = String(gst_stream_get_stream_id(stream.get()));                              \
688         }                                                                                                            \
689     }
690
691 FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
692 {
693 #if ENABLE(MEDIA_STREAM)
694     if (!m_isLegacyPlaybin && !m_currentVideoStreamId.isEmpty()) {
695         RefPtr<VideoTrackPrivateGStreamer> videoTrack = m_videoTracks.get(m_currentVideoStreamId);
696
697         if (videoTrack) {
698             auto tags = adoptGRef(gst_stream_get_tags(videoTrack->stream()));
699             gint width, height;
700
701             if (tags && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_WIDTH, &width) && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_HEIGHT, &height))
702                 return FloatSize(width, height);
703         }
704     }
705 #endif // ENABLE(MEDIA_STREAM)
706
707     return MediaPlayerPrivateGStreamerBase::naturalSize();
708 }
709 #else
710 #define CREATE_TRACK(type, _id, tracks, method, stream) m_has##Type## = true;
711 #endif // ENABLE(VIDEO_TRACK)
712
713 void MediaPlayerPrivateGStreamer::updateTracks()
714 {
715     ASSERT(!m_isLegacyPlaybin);
716
717     bool useMediaSource = isMediaSource();
718     unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
719
720     bool oldHasAudio = m_hasAudio;
721     bool oldHasVideo = m_hasVideo;
722     // New stream collections override previous ones.
723     clearTracks();
724     unsigned textTrackIndex = 0;
725     for (unsigned i = 0; i < length; i++) {
726         GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
727         String streamId(gst_stream_get_stream_id(stream.get()));
728         GstStreamType type = gst_stream_get_stream_type(stream.get());
729
730         GST_DEBUG_OBJECT(pipeline(), "Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
731         if (type & GST_STREAM_TYPE_AUDIO) {
732             CREATE_TRACK(audio, Audio)
733         } else if (type & GST_STREAM_TYPE_VIDEO) {
734             CREATE_TRACK(video, Video)
735         } else if (type & GST_STREAM_TYPE_TEXT && !useMediaSource) {
736 #if ENABLE(VIDEO_TRACK)
737             RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(textTrackIndex++, stream);
738             m_textTracks.add(streamId, track);
739             m_player->addTextTrack(*track);
740 #endif
741         } else
742             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
743     }
744
745     if ((oldHasVideo != m_hasVideo) || (oldHasAudio != m_hasAudio))
746         m_player->characteristicChanged();
747
748     if (m_hasVideo)
749         m_player->sizeChanged();
750
751     m_player->client().mediaPlayerEngineUpdated(m_player);
752 }
753 #endif // GST_CHECK_VERSION(1, 10, 0)
754
755 void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
756 {
757     // FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531.
758     if (isMediaSource()) {
759         GST_FIXME_OBJECT(m_pipeline.get(), "Audio/Video/Text track switching is not yet supported by the MSE backend.");
760         return;
761     }
762
763     const char* propertyName;
764     const char* trackTypeAsString;
765     Vector<String> selectedStreams;
766     String selectedStreamId;
767
768 #if GST_CHECK_VERSION(1, 10, 0)
769     GstStream* stream = nullptr;
770
771     if (!m_isLegacyPlaybin) {
772         stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
773         if (!stream) {
774             GST_WARNING_OBJECT(pipeline(), "No stream to select at index %u", index);
775             return;
776         }
777         selectedStreamId = String::fromUTF8(gst_stream_get_stream_id(stream));
778         selectedStreams.append(selectedStreamId);
779     }
780 #endif // GST_CHECK_VERSION(1,0,0)
781
782     switch (trackType) {
783     case TrackPrivateBaseGStreamer::TrackType::Audio:
784         propertyName = "current-audio";
785         trackTypeAsString = "audio";
786         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentAudioStreamId) {
787             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
788             return;
789         }
790
791         if (!m_currentTextStreamId.isEmpty())
792             selectedStreams.append(m_currentTextStreamId);
793         if (!m_currentVideoStreamId.isEmpty())
794             selectedStreams.append(m_currentVideoStreamId);
795         break;
796     case TrackPrivateBaseGStreamer::TrackType::Video:
797         propertyName = "current-video";
798         trackTypeAsString = "video";
799         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentVideoStreamId) {
800             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
801             return;
802         }
803
804         if (!m_currentAudioStreamId.isEmpty())
805             selectedStreams.append(m_currentAudioStreamId);
806         if (!m_currentTextStreamId.isEmpty())
807             selectedStreams.append(m_currentTextStreamId);
808         break;
809     case TrackPrivateBaseGStreamer::TrackType::Text:
810         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentTextStreamId) {
811             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
812             return;
813         }
814
815         propertyName = "current-text";
816         trackTypeAsString = "text";
817         if (!m_currentAudioStreamId.isEmpty())
818             selectedStreams.append(m_currentAudioStreamId);
819         if (!m_currentVideoStreamId.isEmpty())
820             selectedStreams.append(m_currentVideoStreamId);
821         break;
822     case TrackPrivateBaseGStreamer::TrackType::Unknown:
823     default:
824         ASSERT_NOT_REACHED();
825     }
826
827     GST_INFO_OBJECT(pipeline(), "Enabling %s track with index: %u", trackTypeAsString, index);
828     if (m_isLegacyPlaybin)
829         g_object_set(m_pipeline.get(), propertyName, index, nullptr);
830 #if GST_CHECK_VERSION(1, 10, 0)
831     else {
832         GList* selectedStreamsList = nullptr;
833
834         for (const auto& streamId : selectedStreams)
835             selectedStreamsList = g_list_append(selectedStreamsList, g_strdup(streamId.utf8().data()));
836
837         // TODO: MSE GstStream API support: https://bugs.webkit.org/show_bug.cgi?id=182531
838         gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreamsList));
839         g_list_free_full(selectedStreamsList, reinterpret_cast<GDestroyNotify>(g_free));
840     }
841 #endif
842 }
843
844 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
845 {
846     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] {
847         player->notifyPlayerOfVideo();
848     });
849 }
850
851 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
852 {
853     if (UNLIKELY(!m_pipeline || !m_source))
854         return;
855
856     ASSERT(m_isLegacyPlaybin || isMediaSource());
857
858     gint numTracks = 0;
859     bool useMediaSource = isMediaSource();
860     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
861     g_object_get(element, "n-video", &numTracks, nullptr);
862
863     GST_INFO_OBJECT(pipeline(), "Media has %d video tracks", numTracks);
864
865     bool oldHasVideo = m_hasVideo;
866     m_hasVideo = numTracks > 0;
867     if (oldHasVideo != m_hasVideo)
868         m_player->characteristicChanged();
869
870     if (m_hasVideo)
871         m_player->sizeChanged();
872
873     if (useMediaSource) {
874         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
875         m_player->client().mediaPlayerEngineUpdated(m_player);
876         return;
877     }
878
879 #if ENABLE(VIDEO_TRACK)
880     Vector<String> validVideoStreams;
881     for (gint i = 0; i < numTracks; ++i) {
882         GRefPtr<GstPad> pad;
883         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
884         ASSERT(pad);
885
886         String streamId = "V" + String::number(i);
887         validVideoStreams.append(streamId);
888         if (i < static_cast<gint>(m_videoTracks.size())) {
889             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
890             if (existingTrack) {
891                 existingTrack->setIndex(i);
892                 if (existingTrack->pad() == pad)
893                     continue;
894             }
895         }
896
897         RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
898         ASSERT(streamId == track->id());
899         m_videoTracks.add(streamId, track);
900         m_player->addVideoTrack(*track);
901     }
902
903     purgeInvalidVideoTracks(validVideoStreams);
904 #endif
905
906     m_player->client().mediaPlayerEngineUpdated(m_player);
907 }
908
909 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
910 {
911     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
912         player->notifyPlayerOfVideoCaps();
913     });
914 }
915
916 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
917 {
918     m_videoSize = IntSize();
919     m_player->client().mediaPlayerEngineUpdated(m_player);
920 }
921
922 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
923 {
924     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] {
925         player->notifyPlayerOfAudio();
926     });
927 }
928
929 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
930 {
931     if (UNLIKELY(!m_pipeline || !m_source))
932         return;
933
934     ASSERT(m_isLegacyPlaybin || isMediaSource());
935
936     gint numTracks = 0;
937     bool useMediaSource = isMediaSource();
938     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
939     g_object_get(element, "n-audio", &numTracks, nullptr);
940
941     GST_INFO_OBJECT(pipeline(), "Media has %d audio tracks", numTracks);
942     bool oldHasAudio = m_hasAudio;
943     m_hasAudio = numTracks > 0;
944     if (oldHasAudio != m_hasAudio)
945         m_player->characteristicChanged();
946
947     if (useMediaSource) {
948         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
949         m_player->client().mediaPlayerEngineUpdated(m_player);
950         return;
951     }
952
953 #if ENABLE(VIDEO_TRACK)
954     Vector<String> validAudioStreams;
955     for (gint i = 0; i < numTracks; ++i) {
956         GRefPtr<GstPad> pad;
957         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
958         ASSERT(pad);
959
960         String streamId = "A" + String::number(i);
961         validAudioStreams.append(streamId);
962         if (i < static_cast<gint>(m_audioTracks.size())) {
963             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
964             if (existingTrack) {
965                 existingTrack->setIndex(i);
966                 if (existingTrack->pad() == pad)
967                     continue;
968             }
969         }
970
971         RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
972         ASSERT(streamId == track->id());
973         m_audioTracks.add(streamId, track);
974         m_player->addAudioTrack(*track);
975     }
976
977     purgeInvalidAudioTracks(validAudioStreams);
978 #endif
979
980     m_player->client().mediaPlayerEngineUpdated(m_player);
981 }
982
983 #if ENABLE(VIDEO_TRACK)
984 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
985 {
986     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] {
987         player->notifyPlayerOfText();
988     });
989 }
990
991 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
992 {
993     if (UNLIKELY(!m_pipeline || !m_source))
994         return;
995
996     ASSERT(m_isLegacyPlaybin || isMediaSource());
997
998     gint numTracks = 0;
999     bool useMediaSource = isMediaSource();
1000     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1001     g_object_get(element, "n-text", &numTracks, nullptr);
1002
1003     GST_INFO_OBJECT(pipeline(), "Media has %d text tracks", numTracks);
1004
1005     if (useMediaSource) {
1006         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1007         return;
1008     }
1009
1010     Vector<String> validTextStreams;
1011     for (gint i = 0; i < numTracks; ++i) {
1012         GRefPtr<GstPad> pad;
1013         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
1014         ASSERT(pad);
1015
1016         // We can't assume the pad has a sticky event here like implemented in
1017         // InbandTextTrackPrivateGStreamer because it might be emitted after the
1018         // track was created. So fallback to a dummy stream ID like in the Audio
1019         // and Video tracks.
1020         String streamId = "T" + String::number(i);
1021
1022         validTextStreams.append(streamId);
1023         if (i < static_cast<gint>(m_textTracks.size())) {
1024             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
1025             if (existingTrack) {
1026                 existingTrack->setIndex(i);
1027                 if (existingTrack->pad() == pad)
1028                     continue;
1029             }
1030         }
1031
1032         RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(i, pad);
1033         m_textTracks.add(streamId, track);
1034         m_player->addTextTrack(*track);
1035     }
1036
1037     purgeInvalidTextTracks(validTextStreams);
1038 }
1039
1040 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
1041 {
1042     player->newTextSample();
1043     return GST_FLOW_OK;
1044 }
1045
1046 void MediaPlayerPrivateGStreamer::newTextSample()
1047 {
1048     if (!m_textAppSink)
1049         return;
1050
1051     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
1052         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
1053
1054     GRefPtr<GstSample> sample;
1055     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
1056     ASSERT(sample);
1057
1058     if (streamStartEvent) {
1059         bool found = FALSE;
1060         const gchar* id;
1061         gst_event_parse_stream_start(streamStartEvent.get(), &id);
1062         for (auto& track : m_textTracks.values()) {
1063             if (!strcmp(track->streamId().utf8().data(), id)) {
1064                 track->handleSample(sample);
1065                 found = true;
1066                 break;
1067             }
1068         }
1069         if (!found)
1070             GST_WARNING("Got sample with unknown stream ID %s.", id);
1071     } else
1072         GST_WARNING("Unable to handle sample with no stream start event.");
1073 }
1074 #endif
1075
1076 void MediaPlayerPrivateGStreamer::setRate(float rate)
1077 {
1078     // Higher rate causes crash.
1079     rate = clampTo(rate, -20.0, 20.0);
1080
1081     // Avoid useless playback rate update.
1082     if (m_playbackRate == rate) {
1083         // and make sure that upper layers were notified if rate was set
1084
1085         if (!m_changingRate && m_player->rate() != m_playbackRate)
1086             m_player->rateChanged();
1087         return;
1088     }
1089
1090     if (isLiveStream()) {
1091         // notify upper layers that we cannot handle passed rate.
1092         m_changingRate = false;
1093         m_player->rateChanged();
1094         return;
1095     }
1096
1097     GstState state;
1098     GstState pending;
1099
1100     m_playbackRate = rate;
1101     m_changingRate = true;
1102
1103     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
1104
1105     if (!rate) {
1106         m_changingRate = false;
1107         m_playbackRatePause = true;
1108         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
1109             changePipelineState(GST_STATE_PAUSED);
1110         return;
1111     }
1112
1113     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
1114         || (pending == GST_STATE_PAUSED))
1115         return;
1116
1117     updatePlaybackRate();
1118 }
1119
1120 double MediaPlayerPrivateGStreamer::rate() const
1121 {
1122     return m_playbackRate;
1123 }
1124
1125 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
1126 {
1127     m_preservesPitch = preservesPitch;
1128 }
1129
1130 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
1131 {
1132     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1133     if (m_errorOccured || isLiveStream())
1134         return timeRanges;
1135
1136     MediaTime mediaDuration = durationMediaTime();
1137     if (!mediaDuration || mediaDuration.isPositiveInfinite())
1138         return timeRanges;
1139
1140     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1141
1142     if (!gst_element_query(m_pipeline.get(), query)) {
1143         gst_query_unref(query);
1144         return timeRanges;
1145     }
1146
1147     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
1148     for (guint index = 0; index < numBufferingRanges; index++) {
1149         gint64 rangeStart = 0, rangeStop = 0;
1150         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop)) {
1151             uint64_t startTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStart, GST_FORMAT_PERCENT_MAX);
1152             uint64_t stopTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStop, GST_FORMAT_PERCENT_MAX);
1153             timeRanges->add(MediaTime(startTime, GST_SECOND), MediaTime(stopTime, GST_SECOND));
1154         }
1155     }
1156
1157     // Fallback to the more general maxTimeLoaded() if no range has
1158     // been found.
1159     if (!timeRanges->length()) {
1160         MediaTime loaded = maxTimeLoaded();
1161         if (loaded.isValid() && loaded)
1162             timeRanges->add(MediaTime::zeroTime(), loaded);
1163     }
1164
1165     gst_query_unref(query);
1166
1167     return timeRanges;
1168 }
1169
1170 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
1171 {
1172     GUniqueOutPtr<GError> err;
1173     GUniqueOutPtr<gchar> debug;
1174     MediaPlayer::NetworkState error;
1175     bool issueError = true;
1176     bool attemptNextLocation = false;
1177     const GstStructure* structure = gst_message_get_structure(message);
1178     GstState requestedState, currentState;
1179
1180     m_canFallBackToLastFinishedSeekPosition = false;
1181
1182     if (structure) {
1183         const gchar* messageTypeName = gst_structure_get_name(structure);
1184
1185         // Redirect messages are sent from elements, like qtdemux, to
1186         // notify of the new location(s) of the media.
1187         if (!g_strcmp0(messageTypeName, "redirect")) {
1188             mediaLocationChanged(message);
1189             return;
1190         }
1191     }
1192
1193     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
1194     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
1195
1196     GST_LOG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
1197     switch (GST_MESSAGE_TYPE(message)) {
1198     case GST_MESSAGE_ERROR:
1199         if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured)
1200             break;
1201         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
1202         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
1203
1204         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
1205
1206         error = MediaPlayer::Empty;
1207         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
1208             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
1209             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
1210             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
1211             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
1212             error = MediaPlayer::FormatError;
1213         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
1214             // Let the mediaPlayerClient handle the stream error, in
1215             // this case the HTMLMediaElement will emit a stalled
1216             // event.
1217             GST_ERROR("Decode error, let the Media element emit a stalled event.");
1218             m_loadingStalled = true;
1219             break;
1220         } else if (err->domain == GST_STREAM_ERROR) {
1221             error = MediaPlayer::DecodeError;
1222             attemptNextLocation = true;
1223         } else if (err->domain == GST_RESOURCE_ERROR)
1224             error = MediaPlayer::NetworkError;
1225
1226         if (attemptNextLocation)
1227             issueError = !loadNextLocation();
1228         if (issueError) {
1229             m_errorOccured = true;
1230             if (m_networkState != error) {
1231                 m_networkState = error;
1232                 m_player->networkStateChanged();
1233             }
1234         }
1235         break;
1236     case GST_MESSAGE_EOS:
1237         didEnd();
1238         break;
1239     case GST_MESSAGE_ASYNC_DONE:
1240         if (!messageSourceIsPlaybin || m_delayingLoad)
1241             break;
1242         asyncStateChangeDone();
1243         break;
1244     case GST_MESSAGE_STATE_CHANGED: {
1245         if (!messageSourceIsPlaybin || m_delayingLoad)
1246             break;
1247         updateStates();
1248
1249         // Construct a filename for the graphviz dot file output.
1250         GstState newState;
1251         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
1252         CString dotFileName = String::format("%s.%s_%s", GST_OBJECT_NAME(m_pipeline.get()),
1253             gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8();
1254         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
1255
1256         break;
1257     }
1258     case GST_MESSAGE_BUFFERING:
1259         processBufferingStats(message);
1260         break;
1261     case GST_MESSAGE_DURATION_CHANGED:
1262         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
1263         if (messageSourceIsPlaybin && !isMediaSource())
1264             durationChanged();
1265         break;
1266     case GST_MESSAGE_REQUEST_STATE:
1267         gst_message_parse_request_state(message, &requestedState);
1268         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1269         if (requestedState < currentState) {
1270             GST_INFO_OBJECT(pipeline(), "Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
1271                 gst_element_state_get_name(requestedState));
1272             m_requestedState = requestedState;
1273             if (!changePipelineState(requestedState))
1274                 loadingFailed(MediaPlayer::Empty);
1275         }
1276         break;
1277     case GST_MESSAGE_CLOCK_LOST:
1278         // This can only happen in PLAYING state and we should just
1279         // get a new clock by moving back to PAUSED and then to
1280         // PLAYING again.
1281         // This can happen if the stream that ends in a sink that
1282         // provides the current clock disappears, for example if
1283         // the audio sink provides the clock and the audio stream
1284         // is disabled. It also happens relatively often with
1285         // HTTP adaptive streams when switching between different
1286         // variants of a stream.
1287         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1288         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1289         break;
1290     case GST_MESSAGE_LATENCY:
1291         // Recalculate the latency, we don't need any special handling
1292         // here other than the GStreamer default.
1293         // This can happen if the latency of live elements changes, or
1294         // for one reason or another a new live element is added or
1295         // removed from the pipeline.
1296         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1297         break;
1298     case GST_MESSAGE_ELEMENT:
1299         if (gst_is_missing_plugin_message(message)) {
1300             if (gst_install_plugins_supported()) {
1301                 RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = makeWeakPtr(*this)](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
1302                     if (!weakThis) {
1303                         GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
1304                         return;
1305                     }
1306
1307                     GST_DEBUG("got missing plugin installation callback with result %u", result);
1308                     RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
1309                     weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
1310                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1311                         return;
1312
1313                     weakThis->changePipelineState(GST_STATE_READY);
1314                     weakThis->changePipelineState(GST_STATE_PAUSED);
1315                 });
1316                 m_missingPluginCallbacks.append(missingPluginCallback);
1317                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1318                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1319                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), *missingPluginCallback);
1320             }
1321         }
1322 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1323         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
1324             processMpegTsSection(section);
1325             gst_mpegts_section_unref(section);
1326         }
1327 #endif
1328 #if ENABLE(ENCRYPTED_MEDIA)
1329         else if (gst_structure_has_name(structure, "drm-waiting-for-key")) {
1330             GST_DEBUG_OBJECT(pipeline(), "drm-waiting-for-key message from %s", GST_MESSAGE_SRC_NAME(message));
1331             setWaitingForKey(true);
1332             // FIXME: The decryptors should be able to attempt to decrypt after being created and linked in a pipeline but currently they are not and current
1333             // architecture does not make this very easy. Fortunately, the arch will change soon and it does not pay off to fix this now with something that could be
1334             // more convoluted. In the meantime, force attempt to decrypt when they get blocked.
1335             attemptToDecryptWithLocalInstance();
1336         } else if (gst_structure_has_name(structure, "drm-key-received")) {
1337             GST_DEBUG_OBJECT(pipeline(), "drm-key-received message from %s", GST_MESSAGE_SRC_NAME(message));
1338             setWaitingForKey(false);
1339         }
1340 #endif
1341         else if (gst_structure_has_name(structure, "http-headers")) {
1342             GstStructure* responseHeaders;
1343             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders, nullptr)) {
1344                 if (!gst_structure_has_field(responseHeaders, httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data())) {
1345                     GST_INFO_OBJECT(pipeline(), "Live stream detected. Disabling on-disk buffering");
1346                     m_isStreaming = true;
1347                     setDownloadBuffering();
1348                 }
1349                 gst_structure_free(responseHeaders);
1350             }
1351         } else if (gst_structure_has_name(structure, "adaptive-streaming-statistics")) {
1352             if (WEBKIT_IS_WEB_SRC(m_source.get()))
1353                 if (const char* uri = gst_structure_get_string(structure, "uri"))
1354                     m_hasTaintedOrigin = webKitSrcWouldTaintOrigin(WEBKIT_WEB_SRC(m_source.get()), SecurityOrigin::create(URL(URL(), uri)));
1355         } else
1356             GST_DEBUG_OBJECT(pipeline(), "Unhandled element message: %" GST_PTR_FORMAT, structure);
1357         break;
1358 #if ENABLE(VIDEO_TRACK)
1359     case GST_MESSAGE_TOC:
1360         processTableOfContents(message);
1361         break;
1362 #endif
1363     case GST_MESSAGE_TAG: {
1364         GstTagList* tags = nullptr;
1365         GUniqueOutPtr<gchar> tag;
1366         gst_message_parse_tag(message, &tags);
1367         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1368             if (!g_strcmp0(tag.get(), "rotate-90"))
1369                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1370             else if (!g_strcmp0(tag.get(), "rotate-180"))
1371                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1372             else if (!g_strcmp0(tag.get(), "rotate-270"))
1373                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1374         }
1375         gst_tag_list_unref(tags);
1376         break;
1377     }
1378 #if GST_CHECK_VERSION(1, 10, 0)
1379     case GST_MESSAGE_STREAMS_SELECTED: {
1380         GRefPtr<GstStreamCollection> collection;
1381         gst_message_parse_streams_selected(message, &collection.outPtr());
1382
1383         if (!collection)
1384             break;
1385
1386         m_streamCollection.swap(collection);
1387         m_currentAudioStreamId = "";
1388         m_currentVideoStreamId = "";
1389         m_currentTextStreamId = "";
1390
1391         unsigned length = gst_message_streams_selected_get_size(message);
1392         for (unsigned i = 0; i < length; i++) {
1393             GRefPtr<GstStream> stream = gst_message_streams_selected_get_stream(message, i);
1394             if (!stream)
1395                 continue;
1396
1397             GstStreamType type = gst_stream_get_stream_type(stream.get());
1398             String streamId(gst_stream_get_stream_id(stream.get()));
1399
1400             GST_DEBUG_OBJECT(pipeline(), "Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
1401             // Playbin3 can send more than one selected stream of the same type
1402             // but there's no priority or ordering system in place, so we assume
1403             // the selected stream is the last one as reported by playbin3.
1404             if (type & GST_STREAM_TYPE_AUDIO) {
1405                 m_currentAudioStreamId = streamId;
1406                 auto track = m_audioTracks.get(m_currentAudioStreamId);
1407                 ASSERT(track);
1408                 track->markAsActive();
1409             } else if (type & GST_STREAM_TYPE_VIDEO) {
1410                 m_currentVideoStreamId = streamId;
1411                 auto track = m_videoTracks.get(m_currentVideoStreamId);
1412                 ASSERT(track);
1413                 track->markAsActive();
1414             } else if (type & GST_STREAM_TYPE_TEXT)
1415                 m_currentTextStreamId = streamId;
1416             else
1417                 GST_WARNING("Unknown stream type with stream-id %s", streamId.utf8().data());
1418         }
1419         break;
1420     }
1421 #endif
1422     default:
1423         GST_DEBUG_OBJECT(pipeline(), "Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
1424         break;
1425     }
1426 }
1427
1428 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1429 {
1430     m_buffering = true;
1431     gst_message_parse_buffering(message, &m_bufferingPercentage);
1432
1433     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1434
1435     if (m_bufferingPercentage == 100)
1436         updateStates();
1437 }
1438
1439 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1440 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1441 {
1442     ASSERT(section);
1443
1444     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1445         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1446         m_metadataTracks.clear();
1447         for (guint i = 0; i < pmt->streams->len; ++i) {
1448             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1449             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1450                 AtomicString pid = String::number(stream->pid);
1451                 RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = InbandMetadataTextTrackPrivateGStreamer::create(
1452                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1453
1454                 // 4.7.10.12.2 Sourcing in-band text tracks
1455                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1456                 // type as follows, based on the type of the media resource:
1457                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1458                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1459                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1460                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1461                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1462                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1463                 // expressed in hexadecimal using uppercase ASCII hex digits.
1464                 String inbandMetadataTrackDispatchType;
1465                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1466                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1467                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1468                     for (guint k = 0; k < descriptor->length; ++k)
1469                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1470                 }
1471                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1472
1473                 m_metadataTracks.add(pid, track);
1474                 m_player->addTextTrack(*track);
1475             }
1476         }
1477     } else {
1478         AtomicString pid = String::number(section->pid);
1479         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1480         if (!track)
1481             return;
1482
1483         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1484         gsize size;
1485         const void* bytes = g_bytes_get_data(data.get(), &size);
1486
1487         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1488     }
1489 }
1490 #endif
1491
1492 #if ENABLE(VIDEO_TRACK)
1493 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1494 {
1495     if (m_chaptersTrack)
1496         m_player->removeTextTrack(*m_chaptersTrack);
1497
1498     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1499     m_player->addTextTrack(*m_chaptersTrack);
1500
1501     GRefPtr<GstToc> toc;
1502     gboolean updated;
1503     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1504     ASSERT(toc);
1505
1506     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1507         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1508 }
1509
1510 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1511 {
1512     ASSERT(entry);
1513
1514     auto cue = GenericCueData::create();
1515
1516     gint64 start = -1, stop = -1;
1517     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1518     if (start != -1)
1519         cue->setStartTime(MediaTime(start, GST_SECOND));
1520     if (stop != -1)
1521         cue->setEndTime(MediaTime(stop, GST_SECOND));
1522
1523     GstTagList* tags = gst_toc_entry_get_tags(entry);
1524     if (tags) {
1525         gchar* title = nullptr;
1526         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1527         if (title) {
1528             cue->setContent(title);
1529             g_free(title);
1530         }
1531     }
1532
1533     m_chaptersTrack->addGenericCue(cue);
1534
1535     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1536         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1537 }
1538
1539 void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
1540 {
1541     m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
1542         return !validTrackIds.contains(keyAndValue.key);
1543     });
1544 }
1545
1546 void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
1547 {
1548     m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
1549         return !validTrackIds.contains(keyAndValue.key);
1550     });
1551 }
1552
1553 void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
1554 {
1555     m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
1556         return !validTrackIds.contains(keyAndValue.key);
1557     });
1558 }
1559 #endif
1560
1561 static gint findHLSQueue(gconstpointer a, gconstpointer)
1562 {
1563     GValue* item = static_cast<GValue*>(const_cast<gpointer>(a));
1564     GstElement* element = GST_ELEMENT(g_value_get_object(item));
1565     if (g_str_has_prefix(GST_ELEMENT_NAME(element), "queue")) {
1566         GstElement* parent = GST_ELEMENT(GST_ELEMENT_PARENT(element));
1567         if (!GST_IS_OBJECT(parent))
1568             return 1;
1569
1570         if (g_str_has_prefix(GST_ELEMENT_NAME(GST_ELEMENT_PARENT(parent)), "hlsdemux"))
1571             return 0;
1572     }
1573
1574     return 1;
1575 }
1576
1577 static bool isHLSProgressing(GstElement* playbin, GstQuery* query)
1578 {
1579     GValue item = { };
1580     GstIterator* binIterator = gst_bin_iterate_recurse(GST_BIN(playbin));
1581     bool foundHLSQueue = gst_iterator_find_custom(binIterator, reinterpret_cast<GCompareFunc>(findHLSQueue), &item, nullptr);
1582     gst_iterator_free(binIterator);
1583
1584     if (!foundHLSQueue)
1585         return false;
1586
1587     GstElement* queueElement = GST_ELEMENT(g_value_get_object(&item));
1588     bool queryResult = gst_element_query(queueElement, query);
1589     g_value_unset(&item);
1590
1591     return queryResult;
1592 }
1593
1594 void MediaPlayerPrivateGStreamer::fillTimerFired()
1595 {
1596     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1597
1598     if (G_UNLIKELY(!gst_element_query(m_pipeline.get(), query))) {
1599         // This query always fails for live pipelines. In the case of HLS, try and find
1600         // the queue inside the HLS element to get a proxy measure of progress. Note
1601         // that the percentage value is rather meaningless as used below.
1602         // This is a hack, see https://bugs.webkit.org/show_bug.cgi?id=141469.
1603         if (!isHLSProgressing(m_pipeline.get(), query)) {
1604             gst_query_unref(query);
1605             return;
1606         }
1607     }
1608
1609     gint64 start, stop;
1610     gdouble fillStatus = 100.0;
1611
1612     gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
1613     gst_query_unref(query);
1614
1615     if (stop != -1)
1616         fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1617
1618     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Download buffer filled up to %f%%", fillStatus);
1619
1620     MediaTime mediaDuration = durationMediaTime();
1621
1622     // Update maxTimeLoaded only if the media duration is
1623     // available. Otherwise we can't compute it.
1624     if (mediaDuration) {
1625         if (fillStatus == 100.0)
1626             m_maxTimeLoaded = mediaDuration;
1627         else
1628             m_maxTimeLoaded = MediaTime(fillStatus * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
1629         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
1630     }
1631
1632     m_downloadFinished = fillStatus == 100.0;
1633     if (!m_downloadFinished) {
1634         updateStates();
1635         return;
1636     }
1637
1638     // Media is now fully loaded. It will play even if network
1639     // connection is cut. Buffering is done, remove the fill source
1640     // from the main loop.
1641     m_fillTimer.stop();
1642     updateStates();
1643 }
1644
1645 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
1646 {
1647     if (m_errorOccured)
1648         return MediaTime::zeroTime();
1649
1650     MediaTime duration = durationMediaTime();
1651     GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
1652     // infinite duration means live stream
1653     if (duration.isPositiveInfinite())
1654         return MediaTime::zeroTime();
1655
1656     return duration;
1657 }
1658
1659 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1660 {
1661     if (m_errorOccured)
1662         return MediaTime::zeroTime();
1663
1664     MediaTime loaded = m_maxTimeLoaded;
1665     if (m_isEndReached)
1666         loaded = durationMediaTime();
1667     GST_LOG("maxTimeLoaded: %s", toString(loaded).utf8().data());
1668     return loaded;
1669 }
1670
1671 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1672 {
1673     if (m_errorOccured || m_loadingStalled)
1674         return false;
1675
1676     if (isLiveStream())
1677         return true;
1678
1679     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1680         return false;
1681
1682     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1683     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1684     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1685     GST_LOG("didLoadingProgress: %s", toString(didLoadingProgress).utf8().data());
1686     return didLoadingProgress;
1687 }
1688
1689 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1690 {
1691     if (m_errorOccured)
1692         return 0;
1693
1694     if (m_totalBytes)
1695         return m_totalBytes;
1696
1697     if (!m_source)
1698         return 0;
1699
1700     if (isLiveStream())
1701         return 0;
1702
1703     GstFormat fmt = GST_FORMAT_BYTES;
1704     gint64 length = 0;
1705     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1706         GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1707         m_totalBytes = static_cast<unsigned long long>(length);
1708         m_isStreaming = !length;
1709         return m_totalBytes;
1710     }
1711
1712     // Fall back to querying the source pads manually.
1713     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1714     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1715     bool done = false;
1716     while (!done) {
1717         GValue item = G_VALUE_INIT;
1718         switch (gst_iterator_next(iter, &item)) {
1719         case GST_ITERATOR_OK: {
1720             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1721             gint64 padLength = 0;
1722             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1723                 length = padLength;
1724             break;
1725         }
1726         case GST_ITERATOR_RESYNC:
1727             gst_iterator_resync(iter);
1728             break;
1729         case GST_ITERATOR_ERROR:
1730             FALLTHROUGH;
1731         case GST_ITERATOR_DONE:
1732             done = true;
1733             break;
1734         }
1735
1736         g_value_unset(&item);
1737     }
1738
1739     gst_iterator_free(iter);
1740
1741     GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1742     m_totalBytes = static_cast<unsigned long long>(length);
1743     m_isStreaming = !length;
1744     return m_totalBytes;
1745 }
1746
1747 void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1748 {
1749     player->sourceSetup(sourceElement);
1750 }
1751
1752 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1753 {
1754     if (g_strcmp0(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(G_OBJECT(element))), "GstDownloadBuffer"))
1755         return;
1756
1757     player->m_downloadBuffer = element;
1758     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1759     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1760
1761     GUniqueOutPtr<char> oldDownloadTemplate;
1762     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1763
1764     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1765     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1766     GST_DEBUG_OBJECT(player->pipeline(), "Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1767
1768     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1769 }
1770
1771 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1772 {
1773     ASSERT(player->m_downloadBuffer);
1774
1775     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1776
1777     GUniqueOutPtr<char> downloadFile;
1778     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1779     player->m_downloadBuffer = nullptr;
1780
1781     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
1782         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1783         return;
1784     }
1785
1786     GST_DEBUG_OBJECT(player->pipeline(), "Unlinked media temporary file %s after creation", downloadFile.get());
1787 }
1788
1789 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1790 {
1791     if (!downloadFileTemplate)
1792         return;
1793
1794     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1795     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1796     String templatePattern = String(templateFile.get()).replace("X", "?");
1797
1798     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
1799         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
1800             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1801             continue;
1802         }
1803
1804         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1805     }
1806 }
1807
1808 void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1809 {
1810     GST_DEBUG_OBJECT(pipeline(), "Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1811
1812     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1813         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1814
1815     m_source = sourceElement;
1816
1817     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1818         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1819         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1820 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
1821     } else if (WEBKIT_IS_MEDIA_STREAM_SRC(sourceElement)) {
1822         auto stream = m_streamPrivate.get();
1823         ASSERT(stream);
1824         webkitMediaStreamSrcSetStream(WEBKIT_MEDIA_STREAM_SRC(sourceElement), stream);
1825 #endif
1826     }
1827 }
1828
1829 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1830 {
1831     if (!m_source)
1832         return false;
1833
1834     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1835         return true;
1836
1837     GUniqueOutPtr<char> originalURI, resolvedURI;
1838     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1839     if (!originalURI || !resolvedURI)
1840         return false;
1841     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1842         return true;
1843
1844     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1845     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1846     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1847 }
1848
1849 void MediaPlayerPrivateGStreamer::cancelLoad()
1850 {
1851     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1852         return;
1853
1854     if (m_pipeline)
1855         changePipelineState(GST_STATE_READY);
1856 }
1857
1858 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1859 {
1860     if (!m_pipeline || m_errorOccured)
1861         return;
1862
1863     if (m_seeking) {
1864         if (m_seekIsPending)
1865             updateStates();
1866         else {
1867             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data());
1868             m_seeking = false;
1869             m_cachedPosition = MediaTime::invalidTime();
1870             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
1871                 seek(m_timeOfOverlappingSeek);
1872                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
1873                 return;
1874             }
1875             m_timeOfOverlappingSeek = MediaTime::invalidTime();
1876
1877             // The pipeline can still have a pending state. In this case a position query will fail.
1878             // Right now we can use m_seekTime as a fallback.
1879             m_canFallBackToLastFinishedSeekPosition = true;
1880             timeChanged();
1881         }
1882     } else
1883         updateStates();
1884 }
1885
1886 void MediaPlayerPrivateGStreamer::updateStates()
1887 {
1888     if (!m_pipeline)
1889         return;
1890
1891     if (m_errorOccured)
1892         return;
1893
1894     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1895     MediaPlayer::ReadyState oldReadyState = m_readyState;
1896     GstState pending;
1897     GstState state;
1898     bool stateReallyChanged = false;
1899
1900     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1901     if (state != m_currentState) {
1902         m_oldState = m_currentState;
1903         m_currentState = state;
1904         stateReallyChanged = true;
1905     }
1906
1907     bool shouldUpdatePlaybackState = false;
1908     switch (getStateResult) {
1909     case GST_STATE_CHANGE_SUCCESS: {
1910         GST_DEBUG_OBJECT(pipeline(), "State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1911
1912         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1913         // on HTMLMediaElement and properly generate the video 'ended' event.
1914         if (m_isEndReached && m_currentState == GST_STATE_READY)
1915             break;
1916
1917         m_resetPipeline = m_currentState <= GST_STATE_READY;
1918
1919         bool didBuffering = m_buffering;
1920
1921         // Update ready and network states.
1922         switch (m_currentState) {
1923         case GST_STATE_NULL:
1924             m_readyState = MediaPlayer::HaveNothing;
1925             m_networkState = MediaPlayer::Empty;
1926             break;
1927         case GST_STATE_READY:
1928             m_readyState = MediaPlayer::HaveMetadata;
1929             m_networkState = MediaPlayer::Empty;
1930             break;
1931         case GST_STATE_PAUSED:
1932         case GST_STATE_PLAYING:
1933             if (m_buffering) {
1934                 if (m_bufferingPercentage == 100) {
1935                     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Complete.");
1936                     m_buffering = false;
1937                     m_readyState = MediaPlayer::HaveEnoughData;
1938                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1939                 } else {
1940                     m_readyState = MediaPlayer::HaveCurrentData;
1941                     m_networkState = MediaPlayer::Loading;
1942                 }
1943             } else if (m_downloadFinished) {
1944                 m_readyState = MediaPlayer::HaveEnoughData;
1945                 m_networkState = MediaPlayer::Loaded;
1946             } else {
1947                 m_readyState = MediaPlayer::HaveFutureData;
1948                 m_networkState = MediaPlayer::Loading;
1949             }
1950
1951             break;
1952         default:
1953             ASSERT_NOT_REACHED();
1954             break;
1955         }
1956
1957         // Sync states where needed.
1958         if (m_currentState == GST_STATE_PAUSED) {
1959             if (!m_volumeAndMuteInitialized) {
1960                 notifyPlayerOfVolumeChange();
1961                 notifyPlayerOfMute();
1962                 m_volumeAndMuteInitialized = true;
1963             }
1964
1965             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1966                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Restarting playback.");
1967                 changePipelineState(GST_STATE_PLAYING);
1968             }
1969         } else if (m_currentState == GST_STATE_PLAYING) {
1970             m_paused = false;
1971
1972             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1973                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Pausing stream for buffering.");
1974                 changePipelineState(GST_STATE_PAUSED);
1975             }
1976         } else
1977             m_paused = true;
1978
1979         GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
1980         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
1981             shouldUpdatePlaybackState = true;
1982             GST_INFO_OBJECT(pipeline(), "Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
1983         }
1984
1985         // Emit play state change notification only when going to PLAYING so that
1986         // the media element gets a chance to enable its page sleep disabler.
1987         // Emitting this notification in more cases triggers unwanted code paths
1988         // and test timeouts.
1989         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
1990             GST_INFO_OBJECT(pipeline(), "Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
1991             shouldUpdatePlaybackState = true;
1992         }
1993
1994         break;
1995     }
1996     case GST_STATE_CHANGE_ASYNC:
1997         GST_DEBUG_OBJECT(pipeline(), "Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1998         // Change in progress.
1999         break;
2000     case GST_STATE_CHANGE_FAILURE:
2001         GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2002         // Change failed
2003         return;
2004     case GST_STATE_CHANGE_NO_PREROLL:
2005         GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2006
2007         // Live pipelines go in PAUSED without prerolling.
2008         m_isStreaming = true;
2009         setDownloadBuffering();
2010
2011         if (m_currentState == GST_STATE_READY)
2012             m_readyState = MediaPlayer::HaveNothing;
2013         else if (m_currentState == GST_STATE_PAUSED) {
2014             m_readyState = MediaPlayer::HaveEnoughData;
2015             m_paused = true;
2016         } else if (m_currentState == GST_STATE_PLAYING)
2017             m_paused = false;
2018
2019         if (!m_paused && m_playbackRate)
2020             changePipelineState(GST_STATE_PLAYING);
2021
2022         m_networkState = MediaPlayer::Loading;
2023         break;
2024     default:
2025         GST_DEBUG_OBJECT(pipeline(), "Else : %d", getStateResult);
2026         break;
2027     }
2028
2029     m_requestedState = GST_STATE_VOID_PENDING;
2030
2031     if (shouldUpdatePlaybackState)
2032         m_player->playbackStateChanged();
2033
2034     if (m_networkState != oldNetworkState) {
2035         GST_DEBUG_OBJECT(pipeline(), "Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
2036         m_player->networkStateChanged();
2037     }
2038     if (m_readyState != oldReadyState) {
2039         GST_DEBUG_OBJECT(pipeline(), "Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
2040         m_player->readyStateChanged();
2041     }
2042
2043     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
2044         updatePlaybackRate();
2045         if (m_seekIsPending) {
2046             GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
2047             m_seekIsPending = false;
2048             m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
2049             if (!m_seeking) {
2050                 m_cachedPosition = MediaTime::invalidTime();
2051                 GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
2052             }
2053         }
2054     }
2055 }
2056
2057 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
2058 {
2059 #if GST_CHECK_VERSION(1, 10, 0)
2060     if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_STREAM_COLLECTION && !m_isLegacyPlaybin) {
2061         GRefPtr<GstStreamCollection> collection;
2062         gst_message_parse_stream_collection(message, &collection.outPtr());
2063
2064         if (collection) {
2065             m_streamCollection.swap(collection);
2066             m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
2067                 this->updateTracks();
2068             });
2069         }
2070     }
2071 #endif
2072
2073     return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
2074 }
2075
2076 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
2077 {
2078     if (m_mediaLocations)
2079         gst_structure_free(m_mediaLocations);
2080
2081     const GstStructure* structure = gst_message_get_structure(message);
2082     if (structure) {
2083         // This structure can contain:
2084         // - both a new-location string and embedded locations structure
2085         // - or only a new-location string.
2086         m_mediaLocations = gst_structure_copy(structure);
2087         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2088
2089         if (locations)
2090             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
2091
2092         loadNextLocation();
2093     }
2094 }
2095
2096 bool MediaPlayerPrivateGStreamer::loadNextLocation()
2097 {
2098     if (!m_mediaLocations)
2099         return false;
2100
2101     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2102     const gchar* newLocation = nullptr;
2103
2104     if (!locations) {
2105         // Fallback on new-location string.
2106         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
2107         if (!newLocation)
2108             return false;
2109     }
2110
2111     if (!newLocation) {
2112         if (m_mediaLocationCurrentIndex < 0) {
2113             m_mediaLocations = nullptr;
2114             return false;
2115         }
2116
2117         const GValue* location = gst_value_list_get_value(locations, m_mediaLocationCurrentIndex);
2118         const GstStructure* structure = gst_value_get_structure(location);
2119
2120         if (!structure) {
2121             m_mediaLocationCurrentIndex--;
2122             return false;
2123         }
2124
2125         newLocation = gst_structure_get_string(structure, "new-location");
2126     }
2127
2128     if (newLocation) {
2129         // Found a candidate. new-location is not always an absolute url
2130         // though. We need to take the base of the current url and
2131         // append the value of new-location to it.
2132         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2133         URL newUrl = URL(baseUrl, newLocation);
2134         convertToInternalProtocol(newUrl);
2135
2136         RefPtr<SecurityOrigin> securityOrigin = SecurityOrigin::create(m_url);
2137         if (securityOrigin->canRequest(newUrl)) {
2138             GST_INFO_OBJECT(pipeline(), "New media url: %s", newUrl.string().utf8().data());
2139
2140             // Reset player states.
2141             m_networkState = MediaPlayer::Loading;
2142             m_player->networkStateChanged();
2143             m_readyState = MediaPlayer::HaveNothing;
2144             m_player->readyStateChanged();
2145
2146             // Reset pipeline state.
2147             m_resetPipeline = true;
2148             changePipelineState(GST_STATE_READY);
2149
2150             GstState state;
2151             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2152             if (state <= GST_STATE_READY) {
2153                 // Set the new uri and start playing.
2154                 setPlaybinURL(newUrl);
2155                 changePipelineState(GST_STATE_PLAYING);
2156                 return true;
2157             }
2158         } else
2159             GST_INFO_OBJECT(pipeline(), "Not allowed to load new media location: %s", newUrl.string().utf8().data());
2160     }
2161     m_mediaLocationCurrentIndex--;
2162     return false;
2163 }
2164
2165 void MediaPlayerPrivateGStreamer::loadStateChanged()
2166 {
2167     updateStates();
2168 }
2169
2170 void MediaPlayerPrivateGStreamer::timeChanged()
2171 {
2172     updateStates();
2173     m_player->timeChanged();
2174 }
2175
2176 void MediaPlayerPrivateGStreamer::didEnd()
2177 {
2178     GST_INFO_OBJECT(pipeline(), "Playback ended");
2179
2180     // Synchronize position and duration values to not confuse the
2181     // HTMLMediaElement. In some cases like reverse playback the
2182     // position is not always reported as 0 for instance.
2183     m_cachedPosition = MediaTime::invalidTime();
2184     MediaTime now = currentMediaTime();
2185     if (now > MediaTime { } && now <= durationMediaTime())
2186         m_player->durationChanged();
2187
2188     m_isEndReached = true;
2189     timeChanged();
2190
2191     if (!m_player->client().mediaPlayerIsLooping()) {
2192         m_paused = true;
2193         m_durationAtEOS = durationMediaTime();
2194         changePipelineState(GST_STATE_READY);
2195         m_downloadFinished = false;
2196     }
2197 }
2198
2199 void MediaPlayerPrivateGStreamer::durationChanged()
2200 {
2201     MediaTime previousDuration = durationMediaTime();
2202
2203     // FIXME: Check if this method is still useful, because it's not doing its work at all
2204     // since bug #159458 removed a cacheDuration() call here.
2205
2206     // Avoid emiting durationchanged in the case where the previous
2207     // duration was 0 because that case is already handled by the
2208     // HTMLMediaElement.
2209     if (previousDuration && durationMediaTime() != previousDuration)
2210         m_player->durationChanged();
2211 }
2212
2213 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
2214 {
2215     GST_WARNING("Loading failed, error: %d", error);
2216
2217     m_errorOccured = true;
2218     if (m_networkState != error) {
2219         m_networkState = error;
2220         m_player->networkStateChanged();
2221     }
2222     if (m_readyState != MediaPlayer::HaveNothing) {
2223         m_readyState = MediaPlayer::HaveNothing;
2224         m_player->readyStateChanged();
2225     }
2226
2227     // Loading failed, remove ready timer.
2228     m_readyTimerHandler.stop();
2229 }
2230
2231 static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeSet()
2232 {
2233     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> mimeTypes = []()
2234     {
2235         initializeGStreamerAndRegisterWebKitElements();
2236         HashSet<String, ASCIICaseInsensitiveHash> set;
2237
2238         GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
2239         GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
2240         GList* demuxerFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DEMUXER, GST_RANK_MARGINAL);
2241
2242         enum ElementType {
2243             AudioDecoder = 0,
2244             VideoDecoder,
2245             Demuxer
2246         };
2247         struct GstCapsWebKitMapping {
2248             ElementType elementType;
2249             const char* capsString;
2250             Vector<AtomicString> webkitMimeTypes;
2251         };
2252
2253         Vector<GstCapsWebKitMapping> mapping = {
2254             {AudioDecoder, "audio/midi", {"audio/midi", "audio/riff-midi"}},
2255             {AudioDecoder, "audio/x-sbc", { }},
2256             {AudioDecoder, "audio/x-sid", { }},
2257             {AudioDecoder, "audio/x-flac", {"audio/x-flac", "audio/flac"}},
2258             {AudioDecoder, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
2259             {AudioDecoder, "audio/x-wavpack", {"audio/x-wavpack"}},
2260             {AudioDecoder, "audio/x-speex", {"audio/speex", "audio/x-speex"}},
2261             {AudioDecoder, "audio/x-ac3", { }},
2262             {AudioDecoder, "audio/x-eac3", {"audio/x-ac3"}},
2263             {AudioDecoder, "audio/x-dts", { }},
2264             {VideoDecoder, "video/x-h264, profile=(string)high", {"video/mp4", "video/x-m4v"}},
2265             {VideoDecoder, "video/x-msvideocodec", {"video/x-msvideo"}},
2266             {VideoDecoder, "video/x-h263", { }},
2267             {VideoDecoder, "video/mpegts", { }},
2268             {VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", {"video/mpeg"}},
2269             {VideoDecoder, "video/x-dirac", { }},
2270             {VideoDecoder, "video/x-flash-video", {"video/flv", "video/x-flv"}},
2271             {Demuxer, "video/quicktime", { }},
2272             {Demuxer, "video/quicktime, variant=(string)3gpp", {"video/3gpp"}},
2273             {Demuxer, "application/x-3gp", { }},
2274             {Demuxer, "video/x-ms-asf", { }},
2275             {Demuxer, "audio/x-aiff", { }},
2276             {Demuxer, "application/x-pn-realaudio", { }},
2277             {Demuxer, "application/vnd.rn-realmedia", { }},
2278             {Demuxer, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
2279             {Demuxer, "application/x-hls", {"application/vnd.apple.mpegurl", "application/x-mpegurl"}}
2280         };
2281
2282         for (auto& current : mapping) {
2283             GList* factories = demuxerFactories;
2284             if (current.elementType == AudioDecoder)
2285                 factories = audioDecoderFactories;
2286             else if (current.elementType == VideoDecoder)
2287                 factories = videoDecoderFactories;
2288
2289             if (gstRegistryHasElementForMediaType(factories, current.capsString)) {
2290                 if (!current.webkitMimeTypes.isEmpty()) {
2291                     for (const auto& mimeType : current.webkitMimeTypes)
2292                         set.add(mimeType);
2293                 } else
2294                     set.add(AtomicString(current.capsString));
2295             }
2296         }
2297
2298         bool opusSupported = false;
2299         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-opus")) {
2300             opusSupported = true;
2301             set.add(AtomicString("audio/opus"));
2302         }
2303
2304         bool vorbisSupported = false;
2305         if (gstRegistryHasElementForMediaType(demuxerFactories, "application/ogg")) {
2306             set.add(AtomicString("application/ogg"));
2307
2308             vorbisSupported = gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-vorbis");
2309             if (vorbisSupported) {
2310                 set.add(AtomicString("audio/ogg"));
2311                 set.add(AtomicString("audio/x-vorbis+ogg"));
2312             }
2313
2314             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-theora"))
2315                 set.add(AtomicString("video/ogg"));
2316         }
2317
2318         bool audioMpegSupported = false;
2319         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
2320             audioMpegSupported = true;
2321             set.add(AtomicString("audio/mp1"));
2322             set.add(AtomicString("audio/mp3"));
2323             set.add(AtomicString("audio/x-mp3"));
2324         }
2325
2326         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
2327             audioMpegSupported = true;
2328             set.add(AtomicString("audio/aac"));
2329             set.add(AtomicString("audio/mp2"));
2330             set.add(AtomicString("audio/mp4"));
2331             set.add(AtomicString("audio/x-m4a"));
2332         }
2333
2334         if (audioMpegSupported) {
2335             set.add(AtomicString("audio/mpeg"));
2336             set.add(AtomicString("audio/x-mpeg"));
2337         }
2338
2339         if (gstRegistryHasElementForMediaType(demuxerFactories, "video/x-matroska")) {
2340             set.add(AtomicString("video/x-matroska"));
2341
2342             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp8")
2343                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp9")
2344                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp10"))
2345                 set.add(AtomicString("video/webm"));
2346
2347             if (vorbisSupported || opusSupported)
2348                 set.add(AtomicString("audio/webm"));
2349         }
2350
2351         gst_plugin_feature_list_free(audioDecoderFactories);
2352         gst_plugin_feature_list_free(videoDecoderFactories);
2353         gst_plugin_feature_list_free(demuxerFactories);
2354         return set;
2355     }();
2356     return mimeTypes;
2357 }
2358
2359 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2360 {
2361     types = mimeTypeSet();
2362 }
2363
2364 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2365 {
2366     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
2367 #if ENABLE(MEDIA_SOURCE)
2368     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2369     if (parameters.isMediaSource)
2370         return result;
2371 #endif
2372
2373 #if !ENABLE(MEDIA_STREAM) || !GST_CHECK_VERSION(1, 10, 0)
2374     if (parameters.isMediaStream)
2375         return result;
2376 #endif
2377
2378     if (parameters.type.isEmpty())
2379         return result;
2380
2381     // Spec says we should not return "probably" if the codecs string is empty.
2382     if (mimeTypeSet().contains(parameters.type.containerType()))
2383         result = parameters.type.codecs().isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
2384
2385     auto containerType = parameters.type.containerType();
2386     if (containerType == "video/mp4"_s || containerType == "video/webm"_s) {
2387         if (mimeTypeSet().contains(containerType)) {
2388             GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
2389             bool av1DecoderFound = gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-av1"_s);
2390             gst_plugin_feature_list_free(videoDecoderFactories);
2391             for (auto& codec : parameters.type.codecs()) {
2392                 if (codec.startsWith("av01"_s)) {
2393                     result = av1DecoderFound ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
2394                     break;
2395                 }
2396                 if (codec.startsWith("av1"_s)) {
2397                     result = MediaPlayer::IsNotSupported;
2398                     break;
2399                 }
2400             }
2401         }
2402     }
2403
2404     return extendedSupportsType(parameters, result);
2405 }
2406
2407 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
2408 {
2409     if (!m_pipeline)
2410         return;
2411
2412     unsigned flags;
2413     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2414
2415     unsigned flagDownload = getGstPlayFlag("download");
2416
2417     // We don't want to stop downloading if we already started it.
2418     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline) {
2419         GST_DEBUG_OBJECT(pipeline(), "Download already started, not starting again");
2420         return;
2421     }
2422
2423     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
2424     if (shouldDownload) {
2425         GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering");
2426         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2427         m_fillTimer.startRepeating(200_ms);
2428     } else {
2429         GST_INFO_OBJECT(pipeline(), "Disabling on-disk buffering");
2430         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2431         m_fillTimer.stop();
2432     }
2433 }
2434
2435 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
2436 {
2437     GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data());
2438     if (preload == MediaPlayer::Auto && isLiveStream())
2439         return;
2440
2441     m_preload = preload;
2442     setDownloadBuffering();
2443
2444     if (m_delayingLoad && m_preload != MediaPlayer::None) {
2445         m_delayingLoad = false;
2446         commitLoad();
2447     }
2448 }
2449
2450 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
2451 {
2452     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
2453     if (!m_autoAudioSink) {
2454         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
2455         return nullptr;
2456     }
2457
2458     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
2459
2460     GstElement* audioSinkBin;
2461
2462     if (webkitGstCheckVersion(1, 4, 2)) {
2463 #if ENABLE(WEB_AUDIO)
2464         audioSinkBin = gst_bin_new("audio-sink");
2465         ensureAudioSourceProvider();
2466         m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2467         return audioSinkBin;
2468 #else
2469         return m_autoAudioSink.get();
2470 #endif
2471     }
2472
2473     // Construct audio sink only if pitch preserving is enabled.
2474     // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
2475     if (m_preservesPitch) {
2476         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2477         if (!scale) {
2478             GST_WARNING("Failed to create scaletempo");
2479             return m_autoAudioSink.get();
2480         }
2481
2482         audioSinkBin = gst_bin_new("audio-sink");
2483         gst_bin_add(GST_BIN(audioSinkBin), scale);
2484         GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
2485         gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
2486
2487 #if ENABLE(WEB_AUDIO)
2488         ensureAudioSourceProvider();
2489         m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
2490 #else
2491         GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
2492         GstElement* resample = gst_element_factory_make("audioresample", nullptr);
2493
2494         gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
2495
2496         if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
2497             GST_WARNING("Failed to link audio sink elements");
2498             gst_object_unref(audioSinkBin);
2499             return m_autoAudioSink.get();
2500         }
2501 #endif
2502         return audioSinkBin;
2503     }
2504
2505 #if ENABLE(WEB_AUDIO)
2506     audioSinkBin = gst_bin_new("audio-sink");
2507     ensureAudioSourceProvider();
2508     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2509     return audioSinkBin;
2510 #endif
2511     ASSERT_NOT_REACHED();
2512     return nullptr;
2513 }
2514
2515 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2516 {
2517     GstElement* sink;
2518     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2519     return sink;
2520 }
2521
2522 #if ENABLE(WEB_AUDIO)
2523 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2524 {
2525     if (!m_audioSourceProvider)
2526         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2527 }
2528
2529 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2530 {
2531     ensureAudioSourceProvider();
2532     return m_audioSourceProvider.get();
2533 }
2534 #endif
2535
2536 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const gchar* playbinName, const String& pipelineName)
2537 {
2538     if (m_pipeline) {
2539         if (!playbinName) {
2540             GST_INFO_OBJECT(pipeline(), "Keeping same playbin as nothing forced");
2541             return;
2542         }
2543
2544         if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2545             GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2546             return;
2547         }
2548
2549         GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.",
2550             playbinName);
2551         changePipelineState(GST_STATE_NULL);
2552         m_pipeline = nullptr;
2553     }
2554
2555     ASSERT(!m_pipeline);
2556
2557 #if GST_CHECK_VERSION(1, 10, 0)
2558     if (g_getenv("USE_PLAYBIN3"))
2559         playbinName = "playbin3";
2560 #else
2561     playbinName = "playbin";
2562 #endif
2563
2564     if (!playbinName)
2565         playbinName = "playbin";
2566
2567     m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
2568
2569     // gst_element_factory_make() returns a floating reference so
2570     // we should not adopt.
2571     setPipeline(gst_element_factory_make(playbinName,
2572         pipelineName.isEmpty() ? String::format("play_%p", this).utf8().data() : pipelineName.utf8().data()));
2573     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2574
2575     GST_INFO_OBJECT(pipeline(), "Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2576
2577     // Let also other listeners subscribe to (application) messages in this bus.
2578     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2579     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2580     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2581
2582     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2583
2584     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2585     if (m_isLegacyPlaybin) {
2586         g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2587         g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2588     }
2589
2590 #if ENABLE(VIDEO_TRACK)
2591     if (m_isLegacyPlaybin)
2592         g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2593
2594     GstElement* textCombiner = webkitTextCombinerNew();
2595     ASSERT(textCombiner);
2596     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2597
2598     m_textAppSink = webkitTextSinkNew();
2599     ASSERT(m_textAppSink);
2600
2601     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2602     ASSERT(m_textAppSinkPad);
2603
2604     GRefPtr<GstCaps> textCaps;
2605     if (webkitGstCheckVersion(1, 13, 0))
2606         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
2607     else
2608         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
2609     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
2610     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2611
2612     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2613 #endif
2614
2615     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2616
2617     configurePlaySink();
2618
2619     // On 1.4.2 and newer we use the audio-filter property instead.
2620     // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
2621     // the reason for using >= 1.4.2 instead of >= 1.4.0.
2622     if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
2623         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2624
2625         if (!scale)
2626             GST_WARNING("Failed to create scaletempo");
2627         else
2628             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2629     }
2630
2631     if (!m_renderingCanBeAccelerated) {
2632         // If not using accelerated compositing, let GStreamer handle
2633         // the image-orientation tag.
2634         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2635         if (videoFlip) {
2636             g_object_set(videoFlip, "method", 8, nullptr);
2637             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2638         } else
2639             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
2640     }
2641
2642     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2643     if (videoSinkPad)
2644         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2645 }
2646
2647 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2648 {
2649     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2650     gst_element_post_message(m_pipeline.get(), message);
2651 }
2652
2653 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2654 {
2655     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2656         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2657     return false;
2658 }
2659
2660 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2661 {
2662     if (isLiveStream())
2663         return false;
2664
2665     if (m_url.isLocalFile())
2666         return true;
2667
2668     if (m_url.protocolIsInHTTPFamily())
2669         return true;
2670
2671     return false;
2672 }
2673
2674 std::optional<bool> MediaPlayerPrivateGStreamer::wouldTaintOrigin(const SecurityOrigin&) const
2675 {
2676     // Ideally the given origin should always be verified with
2677     // webKitSrcWouldTaintOrigin() instead of only checking it for
2678     // adaptive-streaming-statistics. We can't do this yet because HLS fragments
2679     // are currently downloaded independently from WebKit.
2680     // See also https://bugs.webkit.org/show_bug.cgi?id=189967.
2681     return m_hasTaintedOrigin;
2682 }
2683
2684
2685 }
2686
2687 #endif // USE(GSTREAMER)