Include audio/vnd.wave as a valid mime-type for wav files.
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "FileSystem.h"
32 #include "GStreamerUtilities.h"
33 #include "URL.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <glib.h>
42 #include <gst/gst.h>
43 #include <gst/pbutils/missing-plugins.h>
44 #include <limits>
45 #include <wtf/HexNumber.h>
46 #include <wtf/MediaTime.h>
47 #include <wtf/NeverDestroyed.h>
48 #include <wtf/glib/GUniquePtr.h>
49 #include <wtf/glib/RunLoopSourcePriority.h>
50 #include <wtf/text/CString.h>
51
52 #if ENABLE(VIDEO_TRACK)
53 #include "AudioTrackPrivateGStreamer.h"
54 #include "InbandMetadataTextTrackPrivateGStreamer.h"
55 #include "InbandTextTrackPrivateGStreamer.h"
56 #include "TextCombinerGStreamer.h"
57 #include "TextSinkGStreamer.h"
58 #include "VideoTrackPrivateGStreamer.h"
59 #endif
60
61 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
62 #define GST_USE_UNSTABLE_API
63 #include <gst/mpegts/mpegts.h>
64 #undef GST_USE_UNSTABLE_API
65 #endif
66 #include <gst/audio/streamvolume.h>
67
68 #if ENABLE(MEDIA_SOURCE)
69 #include "MediaSource.h"
70 #include "WebKitMediaSourceGStreamer.h"
71 #endif
72
73 #if ENABLE(WEB_AUDIO)
74 #include "AudioSourceProviderGStreamer.h"
75 #endif
76
77 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
78 #define GST_CAT_DEFAULT webkit_media_player_debug
79
80 using namespace std;
81
82 namespace WebCore {
83
84 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
85 {
86     player->handleMessage(message);
87 }
88
89 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
90 {
91     player->setAudioStreamProperties(object);
92 }
93
94 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
95 {
96     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
97         return;
98
99     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
100     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
101     g_object_set(object, "stream-properties", structure, nullptr);
102     gst_structure_free(structure);
103     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
104     GST_DEBUG("Set media.role as %s at %s", role, elementName.get());
105 }
106
107 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
108 {
109     if (isAvailable())
110         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
111             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
112 }
113
114 bool MediaPlayerPrivateGStreamer::isAvailable()
115 {
116     if (!MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements())
117         return false;
118
119     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
120     return factory;
121 }
122
123 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
124     : MediaPlayerPrivateGStreamerBase(player)
125     , m_buffering(false)
126     , m_bufferingPercentage(0)
127     , m_canFallBackToLastFinishedSeekPosition(false)
128     , m_changingRate(false)
129     , m_downloadFinished(false)
130     , m_errorOccured(false)
131     , m_isEndReached(false)
132     , m_isStreaming(false)
133     , m_durationAtEOS(0)
134     , m_paused(true)
135     , m_playbackRate(1)
136     , m_requestedState(GST_STATE_VOID_PENDING)
137     , m_resetPipeline(false)
138     , m_seeking(false)
139     , m_seekIsPending(false)
140     , m_seekTime(0)
141     , m_source(nullptr)
142     , m_volumeAndMuteInitialized(false)
143     , m_weakPtrFactory(this)
144     , m_mediaLocations(nullptr)
145     , m_mediaLocationCurrentIndex(0)
146     , m_playbackRatePause(false)
147     , m_timeOfOverlappingSeek(-1)
148     , m_lastPlaybackRate(1)
149     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
150     , m_maxTimeLoaded(0)
151     , m_preload(player->preload())
152     , m_delayingLoad(false)
153     , m_maxTimeLoadedAtLastDidLoadingProgress(0)
154     , m_hasVideo(false)
155     , m_hasAudio(false)
156     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
157     , m_totalBytes(0)
158     , m_preservesPitch(false)
159 {
160 #if USE(GLIB)
161     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
162 #endif
163 }
164
165 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
166 {
167 #if ENABLE(VIDEO_TRACK)
168     for (size_t i = 0; i < m_audioTracks.size(); ++i)
169         m_audioTracks[i]->disconnect();
170
171     for (size_t i = 0; i < m_textTracks.size(); ++i)
172         m_textTracks[i]->disconnect();
173
174     for (size_t i = 0; i < m_videoTracks.size(); ++i)
175         m_videoTracks[i]->disconnect();
176 #endif
177     if (m_fillTimer.isActive())
178         m_fillTimer.stop();
179
180     if (m_mediaLocations) {
181         gst_structure_free(m_mediaLocations);
182         m_mediaLocations = nullptr;
183     }
184
185     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
186         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
187
188     if (m_autoAudioSink)
189         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
190             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
191
192     m_readyTimerHandler.stop();
193     if (m_missingPluginsCallback) {
194         m_missingPluginsCallback->invalidate();
195         m_missingPluginsCallback = nullptr;
196     }
197
198     if (m_videoSink) {
199         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
200         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
201     }
202
203     if (m_pipeline) {
204         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
205         ASSERT(bus);
206         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
207         gst_bus_remove_signal_watch(bus.get());
208         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
209         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
210     }
211 }
212
213 void MediaPlayerPrivateGStreamer::load(const String& urlString)
214 {
215     if (!MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements())
216         return;
217
218     URL url(URL(), urlString);
219     if (url.isBlankURL())
220         return;
221
222     // Clean out everything after file:// url path.
223     String cleanURL(urlString);
224     if (url.isLocalFile())
225         cleanURL = cleanURL.substring(0, url.pathEnd());
226
227     if (!m_pipeline)
228         createGSTPlayBin();
229
230     if (m_fillTimer.isActive())
231         m_fillTimer.stop();
232
233     ASSERT(m_pipeline);
234
235     m_url = URL(URL(), cleanURL);
236     g_object_set(m_pipeline.get(), "uri", cleanURL.utf8().data(), nullptr);
237
238     GST_INFO("Load %s", cleanURL.utf8().data());
239
240     if (m_preload == MediaPlayer::None) {
241         GST_DEBUG("Delaying load.");
242         m_delayingLoad = true;
243     }
244
245     // Reset network and ready states. Those will be set properly once
246     // the pipeline pre-rolled.
247     m_networkState = MediaPlayer::Loading;
248     m_player->networkStateChanged();
249     m_readyState = MediaPlayer::HaveNothing;
250     m_player->readyStateChanged();
251     m_volumeAndMuteInitialized = false;
252     m_durationAtEOS = 0;
253
254     if (!m_delayingLoad)
255         commitLoad();
256 }
257
258 #if ENABLE(MEDIA_SOURCE)
259 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
260 {
261     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
262     m_networkState = MediaPlayer::FormatError;
263     m_player->networkStateChanged();
264 }
265 #endif
266
267 #if ENABLE(MEDIA_STREAM)
268 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate&)
269 {
270     notImplemented();
271 }
272 #endif
273
274 void MediaPlayerPrivateGStreamer::commitLoad()
275 {
276     ASSERT(!m_delayingLoad);
277     GST_DEBUG("Committing load.");
278
279     // GStreamer needs to have the pipeline set to a paused state to
280     // start providing anything useful.
281     changePipelineState(GST_STATE_PAUSED);
282
283     setDownloadBuffering();
284     updateStates();
285 }
286
287 double MediaPlayerPrivateGStreamer::playbackPosition() const
288 {
289     if (m_isEndReached) {
290         // Position queries on a null pipeline return 0. If we're at
291         // the end of the stream the pipeline is null but we want to
292         // report either the seek time or the duration because this is
293         // what the Media element spec expects us to do.
294         if (m_seeking)
295             return m_seekTime;
296
297         MediaTime mediaDuration = durationMediaTime();
298         if (mediaDuration)
299             return mediaDuration.toDouble();
300         return 0;
301     }
302
303     // Position is only available if no async state change is going on and the state is either paused or playing.
304     gint64 position = GST_CLOCK_TIME_NONE;
305     GstQuery* query= gst_query_new_position(GST_FORMAT_TIME);
306     if (gst_element_query(m_pipeline.get(), query))
307         gst_query_parse_position(query, 0, &position);
308     gst_query_unref(query);
309
310     GST_DEBUG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
311
312     double result = 0.0f;
313     if (static_cast<GstClockTime>(position) != GST_CLOCK_TIME_NONE) {
314         GTimeVal timeValue;
315         GST_TIME_TO_TIMEVAL(position, timeValue);
316         result = static_cast<double>(timeValue.tv_sec + (timeValue.tv_usec / 1000000.0));
317     } else if (m_canFallBackToLastFinishedSeekPosition)
318         result = m_seekTime;
319
320     return result;
321 }
322
323 void MediaPlayerPrivateGStreamer::readyTimerFired()
324 {
325     changePipelineState(GST_STATE_NULL);
326 }
327
328 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
329 {
330     ASSERT(m_pipeline);
331
332     GstState currentState;
333     GstState pending;
334
335     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
336     if (currentState == newState || pending == newState) {
337         GST_DEBUG("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
338             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
339         return true;
340     }
341
342     GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
343         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
344
345     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
346     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
347     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE) {
348         return false;
349     }
350
351     // Create a timer when entering the READY state so that we can free resources
352     // if we stay for too long on READY.
353     // Also lets remove the timer if we request a state change for any state other than READY.
354     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
355     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
356         // Max interval in seconds to stay in the READY state on manual
357         // state change requests.
358         static const Seconds readyStateTimerDelay { 1_min };
359         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
360     } else if (newState != GST_STATE_READY)
361         m_readyTimerHandler.stop();
362
363     return true;
364 }
365
366 void MediaPlayerPrivateGStreamer::prepareToPlay()
367 {
368     m_preload = MediaPlayer::Auto;
369     if (m_delayingLoad) {
370         m_delayingLoad = false;
371         commitLoad();
372     }
373 }
374
375 void MediaPlayerPrivateGStreamer::play()
376 {
377     if (!m_playbackRate) {
378         m_playbackRatePause = true;
379         return;
380     }
381
382     if (changePipelineState(GST_STATE_PLAYING)) {
383         m_isEndReached = false;
384         m_delayingLoad = false;
385         m_preload = MediaPlayer::Auto;
386         setDownloadBuffering();
387         GST_DEBUG("Play");
388     } else {
389         loadingFailed(MediaPlayer::Empty);
390     }
391 }
392
393 void MediaPlayerPrivateGStreamer::pause()
394 {
395     m_playbackRatePause = false;
396     GstState currentState, pendingState;
397     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
398     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
399         return;
400
401     if (changePipelineState(GST_STATE_PAUSED))
402         GST_INFO("Pause");
403     else
404         loadingFailed(MediaPlayer::Empty);
405 }
406
407 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
408 {
409     if (!m_pipeline)
410         return { };
411
412     if (m_errorOccured)
413         return { };
414
415     if (m_durationAtEOS)
416         return MediaTime::createWithDouble(m_durationAtEOS);
417
418     // The duration query would fail on a not-prerolled pipeline.
419     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
420         return { };
421
422     GstFormat timeFormat = GST_FORMAT_TIME;
423     gint64 timeLength = 0;
424
425     bool failure = !gst_element_query_duration(m_pipeline.get(), timeFormat, &timeLength) || static_cast<guint64>(timeLength) == GST_CLOCK_TIME_NONE;
426     if (failure) {
427         GST_DEBUG("Time duration query failed for %s", m_url.string().utf8().data());
428         return MediaTime::positiveInfiniteTime();
429     }
430
431     GST_DEBUG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
432
433     return MediaTime::createWithDouble(static_cast<double>(timeLength) / GST_SECOND);
434     // FIXME: handle 3.14.9.5 properly
435 }
436
437 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
438 {
439     if (!m_pipeline)
440         return { };
441
442     if (m_errorOccured)
443         return { };
444
445     if (m_seeking)
446         return MediaTime::createWithFloat(m_seekTime);
447
448     // Workaround for
449     // https://bugzilla.gnome.org/show_bug.cgi?id=639941 In GStreamer
450     // 0.10.35 basesink reports wrong duration in case of EOS and
451     // negative playback rate. There's no upstream accepted patch for
452     // this bug yet, hence this temporary workaround.
453     if (m_isEndReached && m_playbackRate < 0)
454         return { };
455
456     return MediaTime::createWithDouble(playbackPosition());
457 }
458
459 void MediaPlayerPrivateGStreamer::seek(float time)
460 {
461     if (!m_pipeline)
462         return;
463
464     if (m_errorOccured)
465         return;
466
467     GST_INFO("[Seek] seek attempt to %f secs", time);
468
469     // Avoid useless seeking.
470     if (MediaTime::createWithFloat(time) == currentMediaTime())
471         return;
472
473     if (isLiveStream())
474         return;
475
476     GstClockTime clockTime = toGstClockTime(time);
477     GST_INFO("[Seek] seeking to %" GST_TIME_FORMAT " (%f)", GST_TIME_ARGS(clockTime), time);
478
479     if (m_seeking) {
480         m_timeOfOverlappingSeek = time;
481         if (m_seekIsPending) {
482             m_seekTime = time;
483             return;
484         }
485     }
486
487     GstState state;
488     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
489     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
490         GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
491         return;
492     }
493     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
494         m_seekIsPending = true;
495         if (m_isEndReached) {
496             GST_DEBUG("[Seek] reset pipeline");
497             m_resetPipeline = true;
498             if (!changePipelineState(GST_STATE_PAUSED))
499                 loadingFailed(MediaPlayer::Empty);
500         }
501     } else {
502         // We can seek now.
503         if (!doSeek(clockTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
504             GST_DEBUG("[Seek] seeking to %f failed", time);
505             return;
506         }
507     }
508
509     m_seeking = true;
510     m_seekTime = time;
511     m_isEndReached = false;
512 }
513
514 bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFlags seekType)
515 {
516     gint64 startTime, endTime;
517
518     // TODO: Should do more than that, need to notify the media source
519     // and probably flush the pipeline at least.
520     if (isMediaSource())
521         return true;
522
523     if (rate > 0) {
524         startTime = position;
525         endTime = GST_CLOCK_TIME_NONE;
526     } else {
527         startTime = 0;
528         // If we are at beginning of media, start from the end to
529         // avoid immediate EOS.
530         if (position < 0)
531             endTime = static_cast<gint64>(durationMediaTime().toDouble() * GST_SECOND);
532         else
533             endTime = position;
534     }
535
536     if (!rate)
537         rate = 1.0;
538
539     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
540         GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime);
541 }
542
543 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
544 {
545     if (!m_changingRate)
546         return;
547
548     float currentPosition = static_cast<float>(playbackPosition() * GST_SECOND);
549     bool mute = false;
550
551     GST_INFO("Set Rate to %f", m_playbackRate);
552
553     if (m_playbackRate > 0) {
554         // Mute the sound if the playback rate is too extreme and
555         // audio pitch is not adjusted.
556         mute = (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
557     } else {
558         if (currentPosition == 0.0f)
559             currentPosition = -1.0f;
560         mute = true;
561     }
562
563     GST_INFO("Need to mute audio?: %d", (int) mute);
564     if (doSeek(currentPosition, m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
565         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
566         m_lastPlaybackRate = m_playbackRate;
567     } else {
568         m_playbackRate = m_lastPlaybackRate;
569         GST_ERROR("Set rate to %f failed", m_playbackRate);
570     }
571
572     if (m_playbackRatePause) {
573         GstState state;
574         GstState pending;
575
576         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
577         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
578             changePipelineState(GST_STATE_PLAYING);
579         m_playbackRatePause = false;
580     }
581
582     m_changingRate = false;
583     m_player->rateChanged();
584 }
585
586 bool MediaPlayerPrivateGStreamer::paused() const
587 {
588     if (m_isEndReached) {
589         GST_DEBUG("Ignoring pause at EOS");
590         return true;
591     }
592
593     if (m_playbackRatePause)
594         return false;
595
596     GstState state;
597     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
598     return state <= GST_STATE_PAUSED;
599 }
600
601 bool MediaPlayerPrivateGStreamer::seeking() const
602 {
603     return m_seeking;
604 }
605
606 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
607 {
608     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] { player->notifyPlayerOfVideo(); });
609 }
610
611 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
612 {
613     if (UNLIKELY(!m_pipeline || !m_source))
614         return;
615
616     gint numTracks = 0;
617     bool useMediaSource = isMediaSource();
618     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
619     g_object_get(element, "n-video", &numTracks, nullptr);
620
621     m_hasVideo = numTracks > 0;
622     if (m_hasVideo)
623         m_player->sizeChanged();
624
625     if (useMediaSource) {
626         GST_DEBUG("Tracks managed by source element. Bailing out now.");
627         m_player->client().mediaPlayerEngineUpdated(m_player);
628         return;
629     }
630
631 #if ENABLE(VIDEO_TRACK)
632     for (gint i = 0; i < numTracks; ++i) {
633         GRefPtr<GstPad> pad;
634         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
635         ASSERT(pad);
636
637         if (i < static_cast<gint>(m_videoTracks.size())) {
638             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks[i];
639             existingTrack->setIndex(i);
640             if (existingTrack->pad() == pad)
641                 continue;
642         }
643
644         RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(m_pipeline, i, pad);
645         m_videoTracks.append(track);
646         m_player->addVideoTrack(*track);
647     }
648
649     while (static_cast<gint>(m_videoTracks.size()) > numTracks) {
650         RefPtr<VideoTrackPrivateGStreamer> track = m_videoTracks.last();
651         track->disconnect();
652         m_videoTracks.removeLast();
653         m_player->removeVideoTrack(*track);
654     }
655 #endif
656
657     m_player->client().mediaPlayerEngineUpdated(m_player);
658 }
659
660 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
661 {
662     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] { player->notifyPlayerOfVideoCaps(); });
663 }
664
665 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
666 {
667     m_videoSize = IntSize();
668     m_player->client().mediaPlayerEngineUpdated(m_player);
669 }
670
671 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
672 {
673     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] { player->notifyPlayerOfAudio(); });
674 }
675
676 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
677 {
678     if (UNLIKELY(!m_pipeline || !m_source))
679         return;
680
681     gint numTracks = 0;
682     bool useMediaSource = isMediaSource();
683     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
684     g_object_get(element, "n-audio", &numTracks, nullptr);
685
686     m_hasAudio = numTracks > 0;
687
688     if (useMediaSource) {
689         GST_DEBUG("Tracks managed by source element. Bailing out now.");
690         m_player->client().mediaPlayerEngineUpdated(m_player);
691         return;
692     }
693
694 #if ENABLE(VIDEO_TRACK)
695     for (gint i = 0; i < numTracks; ++i) {
696         GRefPtr<GstPad> pad;
697         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
698         ASSERT(pad);
699
700         if (i < static_cast<gint>(m_audioTracks.size())) {
701             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks[i];
702             existingTrack->setIndex(i);
703             if (existingTrack->pad() == pad)
704                 continue;
705         }
706
707         RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(m_pipeline, i, pad);
708         m_audioTracks.insert(i, track);
709         m_player->addAudioTrack(*track);
710     }
711
712     while (static_cast<gint>(m_audioTracks.size()) > numTracks) {
713         RefPtr<AudioTrackPrivateGStreamer> track = m_audioTracks.last();
714         track->disconnect();
715         m_audioTracks.removeLast();
716         m_player->removeAudioTrack(*track);
717     }
718 #endif
719
720     m_player->client().mediaPlayerEngineUpdated(m_player);
721 }
722
723 #if ENABLE(VIDEO_TRACK)
724 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
725 {
726     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] { player->notifyPlayerOfText(); });
727 }
728
729 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
730 {
731     if (UNLIKELY(!m_pipeline || !m_source))
732         return;
733
734     gint numTracks = 0;
735     bool useMediaSource = isMediaSource();
736     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
737     g_object_get(element, "n-text", &numTracks, nullptr);
738
739     if (useMediaSource) {
740         GST_DEBUG("Tracks managed by source element. Bailing out now.");
741         return;
742     }
743
744     for (gint i = 0; i < numTracks; ++i) {
745         GRefPtr<GstPad> pad;
746         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
747         ASSERT(pad);
748
749         if (i < static_cast<gint>(m_textTracks.size())) {
750             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks[i];
751             existingTrack->setIndex(i);
752             if (existingTrack->pad() == pad)
753                 continue;
754         }
755
756         RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(i, pad);
757         m_textTracks.insert(i, track);
758         m_player->addTextTrack(*track);
759     }
760
761     while (static_cast<gint>(m_textTracks.size()) > numTracks) {
762         RefPtr<InbandTextTrackPrivateGStreamer> track = m_textTracks.last();
763         track->disconnect();
764         m_textTracks.removeLast();
765         m_player->removeTextTrack(*track);
766     }
767 }
768
769 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
770 {
771     player->newTextSample();
772     return GST_FLOW_OK;
773 }
774
775 void MediaPlayerPrivateGStreamer::newTextSample()
776 {
777     if (!m_textAppSink)
778         return;
779
780     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
781         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
782
783     GRefPtr<GstSample> sample;
784     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
785     ASSERT(sample);
786
787     if (streamStartEvent) {
788         bool found = FALSE;
789         const gchar* id;
790         gst_event_parse_stream_start(streamStartEvent.get(), &id);
791         for (size_t i = 0; i < m_textTracks.size(); ++i) {
792             RefPtr<InbandTextTrackPrivateGStreamer> track = m_textTracks[i];
793             if (track->streamId() == id) {
794                 track->handleSample(sample);
795                 found = true;
796                 break;
797             }
798         }
799         if (!found)
800             GST_WARNING("Got sample with unknown stream ID.");
801     } else
802         GST_WARNING("Unable to handle sample with no stream start event.");
803 }
804 #endif
805
806 void MediaPlayerPrivateGStreamer::setRate(float rate)
807 {
808     // Higher rate causes crash.
809     rate = clampTo(rate, -20.0, 20.0);
810
811     // Avoid useless playback rate update.
812     if (m_playbackRate == rate) {
813         // and make sure that upper layers were notified if rate was set
814
815         if (!m_changingRate && m_player->rate() != m_playbackRate)
816             m_player->rateChanged();
817         return;
818     }
819
820     if (isLiveStream()) {
821         // notify upper layers that we cannot handle passed rate.
822         m_changingRate = false;
823         m_player->rateChanged();
824         return;
825     }
826
827     GstState state;
828     GstState pending;
829
830     m_playbackRate = rate;
831     m_changingRate = true;
832
833     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
834
835     if (!rate) {
836         m_changingRate = false;
837         m_playbackRatePause = true;
838         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
839             changePipelineState(GST_STATE_PAUSED);
840         return;
841     }
842
843     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
844         || (pending == GST_STATE_PAUSED))
845         return;
846
847     updatePlaybackRate();
848 }
849
850 double MediaPlayerPrivateGStreamer::rate() const
851 {
852     return m_playbackRate;
853 }
854
855 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
856 {
857     m_preservesPitch = preservesPitch;
858 }
859
860 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
861 {
862     auto timeRanges = std::make_unique<PlatformTimeRanges>();
863     if (m_errorOccured || isLiveStream())
864         return timeRanges;
865
866     float mediaDuration(durationMediaTime().toDouble());
867     if (!mediaDuration || std::isinf(mediaDuration))
868         return timeRanges;
869
870     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
871
872     if (!gst_element_query(m_pipeline.get(), query)) {
873         gst_query_unref(query);
874         return timeRanges;
875     }
876
877     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
878     for (guint index = 0; index < numBufferingRanges; index++) {
879         gint64 rangeStart = 0, rangeStop = 0;
880         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop))
881             timeRanges->add(MediaTime::createWithDouble((rangeStart * mediaDuration) / GST_FORMAT_PERCENT_MAX),
882                 MediaTime::createWithDouble((rangeStop * mediaDuration) / GST_FORMAT_PERCENT_MAX));
883     }
884
885     // Fallback to the more general maxTimeLoaded() if no range has
886     // been found.
887     if (!timeRanges->length())
888         if (float loaded = maxTimeLoaded())
889             timeRanges->add(MediaTime::zeroTime(), MediaTime::createWithDouble(loaded));
890
891     gst_query_unref(query);
892
893     return timeRanges;
894 }
895
896 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
897 {
898     GUniqueOutPtr<GError> err;
899     GUniqueOutPtr<gchar> debug;
900     MediaPlayer::NetworkState error;
901     bool issueError = true;
902     bool attemptNextLocation = false;
903     const GstStructure* structure = gst_message_get_structure(message);
904     GstState requestedState, currentState;
905
906     m_canFallBackToLastFinishedSeekPosition = false;
907
908     if (structure) {
909         const gchar* messageTypeName = gst_structure_get_name(structure);
910
911         // Redirect messages are sent from elements, like qtdemux, to
912         // notify of the new location(s) of the media.
913         if (!g_strcmp0(messageTypeName, "redirect")) {
914             mediaLocationChanged(message);
915             return;
916         }
917     }
918
919     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
920     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
921
922     GST_DEBUG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
923     switch (GST_MESSAGE_TYPE(message)) {
924     case GST_MESSAGE_ERROR:
925         if (m_resetPipeline || m_missingPluginsCallback || m_errorOccured)
926             break;
927         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
928         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
929
930         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
931
932         error = MediaPlayer::Empty;
933         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
934             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
935             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
936             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
937             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
938             error = MediaPlayer::FormatError;
939         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
940             // Let the mediaPlayerClient handle the stream error, in
941             // this case the HTMLMediaElement will emit a stalled
942             // event.
943             GST_ERROR("Decode error, let the Media element emit a stalled event.");
944             break;
945         } else if (err->domain == GST_STREAM_ERROR) {
946             error = MediaPlayer::DecodeError;
947             attemptNextLocation = true;
948         } else if (err->domain == GST_RESOURCE_ERROR)
949             error = MediaPlayer::NetworkError;
950
951         if (attemptNextLocation)
952             issueError = !loadNextLocation();
953         if (issueError)
954             loadingFailed(error);
955         break;
956     case GST_MESSAGE_EOS:
957         didEnd();
958         break;
959     case GST_MESSAGE_ASYNC_DONE:
960         if (!messageSourceIsPlaybin || m_delayingLoad)
961             break;
962         asyncStateChangeDone();
963         break;
964     case GST_MESSAGE_STATE_CHANGED: {
965         if (!messageSourceIsPlaybin || m_delayingLoad)
966             break;
967         updateStates();
968
969         // Construct a filename for the graphviz dot file output.
970         GstState newState;
971         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
972         CString dotFileName = String::format("webkit-video.%s_%s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8();
973         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
974
975         break;
976     }
977     case GST_MESSAGE_BUFFERING:
978         processBufferingStats(message);
979         break;
980     case GST_MESSAGE_DURATION_CHANGED:
981         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
982         if (messageSourceIsPlaybin && !isMediaSource())
983             durationChanged();
984         break;
985     case GST_MESSAGE_REQUEST_STATE:
986         gst_message_parse_request_state(message, &requestedState);
987         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
988         if (requestedState < currentState) {
989             GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(message)));
990             GST_INFO("Element %s requested state change to %s", elementName.get(),
991                 gst_element_state_get_name(requestedState));
992             m_requestedState = requestedState;
993             if (!changePipelineState(requestedState))
994                 loadingFailed(MediaPlayer::Empty);
995         }
996         break;
997     case GST_MESSAGE_CLOCK_LOST:
998         // This can only happen in PLAYING state and we should just
999         // get a new clock by moving back to PAUSED and then to
1000         // PLAYING again.
1001         // This can happen if the stream that ends in a sink that
1002         // provides the current clock disappears, for example if
1003         // the audio sink provides the clock and the audio stream
1004         // is disabled. It also happens relatively often with
1005         // HTTP adaptive streams when switching between different
1006         // variants of a stream.
1007         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1008         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1009         break;
1010     case GST_MESSAGE_LATENCY:
1011         // Recalculate the latency, we don't need any special handling
1012         // here other than the GStreamer default.
1013         // This can happen if the latency of live elements changes, or
1014         // for one reason or another a new live element is added or
1015         // removed from the pipeline.
1016         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1017         break;
1018     case GST_MESSAGE_ELEMENT:
1019         if (gst_is_missing_plugin_message(message)) {
1020             if (gst_install_plugins_supported()) {
1021                 m_missingPluginsCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([this](uint32_t result) {
1022                     m_missingPluginsCallback = nullptr;
1023                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1024                         return;
1025
1026                     changePipelineState(GST_STATE_READY);
1027                     changePipelineState(GST_STATE_PAUSED);
1028                 });
1029                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1030                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1031                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), *m_missingPluginsCallback);
1032             }
1033         }
1034 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1035         else {
1036             GstMpegtsSection* section = gst_message_parse_mpegts_section(message);
1037             if (section) {
1038                 processMpegTsSection(section);
1039                 gst_mpegts_section_unref(section);
1040             }
1041         }
1042 #endif
1043         break;
1044 #if ENABLE(VIDEO_TRACK)
1045     case GST_MESSAGE_TOC:
1046         processTableOfContents(message);
1047         break;
1048 #endif
1049     case GST_MESSAGE_TAG: {
1050         GstTagList* tags = nullptr;
1051         GUniqueOutPtr<gchar> tag;
1052         gst_message_parse_tag(message, &tags);
1053         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1054             if (!g_strcmp0(tag.get(), "rotate-90"))
1055                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1056             else if (!g_strcmp0(tag.get(), "rotate-180"))
1057                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1058             else if (!g_strcmp0(tag.get(), "rotate-270"))
1059                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1060         }
1061         gst_tag_list_unref(tags);
1062         break;
1063     }
1064     default:
1065         GST_DEBUG("Unhandled GStreamer message type: %s",
1066                     GST_MESSAGE_TYPE_NAME(message));
1067         break;
1068     }
1069     return;
1070 }
1071
1072 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1073 {
1074     m_buffering = true;
1075     gst_message_parse_buffering(message, &m_bufferingPercentage);
1076
1077     GST_DEBUG("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1078
1079     updateStates();
1080 }
1081
1082 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1083 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1084 {
1085     ASSERT(section);
1086
1087     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1088         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1089         m_metadataTracks.clear();
1090         for (guint i = 0; i < pmt->streams->len; ++i) {
1091             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1092             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1093                 AtomicString pid = String::number(stream->pid);
1094                 RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = InbandMetadataTextTrackPrivateGStreamer::create(
1095                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1096
1097                 // 4.7.10.12.2 Sourcing in-band text tracks
1098                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1099                 // type as follows, based on the type of the media resource:
1100                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1101                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1102                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1103                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1104                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1105                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1106                 // expressed in hexadecimal using uppercase ASCII hex digits.
1107                 String inbandMetadataTrackDispatchType;
1108                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1109                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1110                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1111                     for (guint k = 0; k < descriptor->length; ++k)
1112                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1113                 }
1114                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1115
1116                 m_metadataTracks.add(pid, track);
1117                 m_player->addTextTrack(*track);
1118             }
1119         }
1120     } else {
1121         AtomicString pid = String::number(section->pid);
1122         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1123         if (!track)
1124             return;
1125
1126         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1127         gsize size;
1128         const void* bytes = g_bytes_get_data(data.get(), &size);
1129
1130         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1131     }
1132 }
1133 #endif
1134
1135 #if ENABLE(VIDEO_TRACK)
1136 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1137 {
1138     if (m_chaptersTrack)
1139         m_player->removeTextTrack(*m_chaptersTrack);
1140
1141     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1142     m_player->addTextTrack(*m_chaptersTrack);
1143
1144     GRefPtr<GstToc> toc;
1145     gboolean updated;
1146     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1147     ASSERT(toc);
1148
1149     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1150         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1151 }
1152
1153 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1154 {
1155     ASSERT(entry);
1156
1157     auto cue = GenericCueData::create();
1158
1159     gint64 start = -1, stop = -1;
1160     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1161     if (start != -1)
1162         cue->setStartTime(MediaTime(start, GST_SECOND));
1163     if (stop != -1)
1164         cue->setEndTime(MediaTime(stop, GST_SECOND));
1165
1166     GstTagList* tags = gst_toc_entry_get_tags(entry);
1167     if (tags) {
1168         gchar* title =  nullptr;
1169         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1170         if (title) {
1171             cue->setContent(title);
1172             g_free(title);
1173         }
1174     }
1175
1176     m_chaptersTrack->addGenericCue(cue);
1177
1178     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1179         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1180 }
1181 #endif
1182
1183 void MediaPlayerPrivateGStreamer::fillTimerFired()
1184 {
1185     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1186
1187     if (!gst_element_query(m_pipeline.get(), query)) {
1188         gst_query_unref(query);
1189         return;
1190     }
1191
1192     gint64 start, stop;
1193     gdouble fillStatus = 100.0;
1194
1195     gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
1196     gst_query_unref(query);
1197
1198     if (stop != -1)
1199         fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1200
1201     GST_DEBUG("[Buffering] Download buffer filled up to %f%%", fillStatus);
1202
1203     float mediaDuration = durationMediaTime().toDouble();
1204
1205     // Update maxTimeLoaded only if the media duration is
1206     // available. Otherwise we can't compute it.
1207     if (mediaDuration) {
1208         if (fillStatus == 100.0)
1209             m_maxTimeLoaded = mediaDuration;
1210         else
1211             m_maxTimeLoaded = static_cast<float>((fillStatus * mediaDuration) / 100.0);
1212         GST_DEBUG("[Buffering] Updated maxTimeLoaded: %f", m_maxTimeLoaded);
1213     }
1214
1215     m_downloadFinished = fillStatus == 100.0;
1216     if (!m_downloadFinished) {
1217         updateStates();
1218         return;
1219     }
1220
1221     // Media is now fully loaded. It will play even if network
1222     // connection is cut. Buffering is done, remove the fill source
1223     // from the main loop.
1224     m_fillTimer.stop();
1225     updateStates();
1226 }
1227
1228 float MediaPlayerPrivateGStreamer::maxTimeSeekable() const
1229 {
1230     if (m_errorOccured)
1231         return 0.0f;
1232
1233     float mediaDuration = durationMediaTime().toDouble();
1234     GST_DEBUG("maxTimeSeekable, duration: %f", mediaDuration);
1235     // infinite duration means live stream
1236     if (std::isinf(mediaDuration))
1237         return 0.0f;
1238
1239     return mediaDuration;
1240 }
1241
1242 float MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1243 {
1244     if (m_errorOccured)
1245         return 0.0f;
1246
1247     float loaded = m_maxTimeLoaded;
1248     if (m_isEndReached)
1249         loaded = durationMediaTime().toDouble();
1250     GST_DEBUG("maxTimeLoaded: %f", loaded);
1251     return loaded;
1252 }
1253
1254 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1255 {
1256     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1257         return false;
1258     float currentMaxTimeLoaded = maxTimeLoaded();
1259     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1260     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1261     GST_DEBUG("didLoadingProgress: %d", didLoadingProgress);
1262     return didLoadingProgress;
1263 }
1264
1265 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1266 {
1267     if (m_errorOccured)
1268         return 0;
1269
1270     if (m_totalBytes)
1271         return m_totalBytes;
1272
1273     if (!m_source)
1274         return 0;
1275
1276     GstFormat fmt = GST_FORMAT_BYTES;
1277     gint64 length = 0;
1278     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1279         GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1280         m_totalBytes = static_cast<unsigned long long>(length);
1281         m_isStreaming = !length;
1282         return m_totalBytes;
1283     }
1284
1285     // Fall back to querying the source pads manually.
1286     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1287     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1288     bool done = false;
1289     while (!done) {
1290         GValue item = G_VALUE_INIT;
1291         switch (gst_iterator_next(iter, &item)) {
1292         case GST_ITERATOR_OK: {
1293             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1294             gint64 padLength = 0;
1295             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1296                 length = padLength;
1297             break;
1298         }
1299         case GST_ITERATOR_RESYNC:
1300             gst_iterator_resync(iter);
1301             break;
1302         case GST_ITERATOR_ERROR:
1303             FALLTHROUGH;
1304         case GST_ITERATOR_DONE:
1305             done = true;
1306             break;
1307         }
1308
1309         g_value_unset(&item);
1310     }
1311
1312     gst_iterator_free(iter);
1313
1314     GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1315     m_totalBytes = static_cast<unsigned long long>(length);
1316     m_isStreaming = !length;
1317     return m_totalBytes;
1318 }
1319
1320 void MediaPlayerPrivateGStreamer::sourceChangedCallback(MediaPlayerPrivateGStreamer* player)
1321 {
1322     player->sourceChanged();
1323 }
1324
1325 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1326 {
1327     if (g_strcmp0(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(G_OBJECT(element))), "GstDownloadBuffer"))
1328         return;
1329
1330     player->m_downloadBuffer = element;
1331     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1332     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1333
1334     GUniqueOutPtr<char> oldDownloadTemplate;
1335     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1336
1337     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1338     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1339     GST_TRACE("Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1340
1341     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1342 }
1343
1344 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1345 {
1346     ASSERT(player->m_downloadBuffer);
1347
1348     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1349
1350     GUniqueOutPtr<char> downloadFile;
1351     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1352     player->m_downloadBuffer = nullptr;
1353
1354     if (UNLIKELY(!deleteFile(downloadFile.get()))) {
1355         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1356         return;
1357     }
1358
1359     GST_TRACE("Unlinked media temporary file %s after creation", downloadFile.get());
1360 }
1361
1362 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1363 {
1364     if (!downloadFileTemplate)
1365         return;
1366
1367     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1368     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1369     String templatePattern = String(templateFile.get()).replace("X", "?");
1370
1371     for (auto& filePath : listDirectory(templatePath.get(), templatePattern)) {
1372         if (UNLIKELY(!deleteFile(filePath))) {
1373             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1374             continue;
1375         }
1376
1377         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1378     }
1379 }
1380
1381 void MediaPlayerPrivateGStreamer::sourceChanged()
1382 {
1383     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1384         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1385
1386     m_source.clear();
1387     g_object_get(m_pipeline.get(), "source", &m_source.outPtr(), nullptr);
1388
1389     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1390         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1391         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1392     }
1393 }
1394
1395 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1396 {
1397     if (!m_source)
1398         return false;
1399
1400     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1401         return true;
1402
1403     GUniqueOutPtr<char> originalURI, resolvedURI;
1404     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1405     if (!originalURI || !resolvedURI)
1406         return false;
1407     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1408         return true;
1409
1410     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1411     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1412     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1413 }
1414
1415 void MediaPlayerPrivateGStreamer::cancelLoad()
1416 {
1417     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1418         return;
1419
1420     if (m_pipeline)
1421         changePipelineState(GST_STATE_READY);
1422 }
1423
1424 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1425 {
1426     if (!m_pipeline || m_errorOccured)
1427         return;
1428
1429     if (m_seeking) {
1430         if (m_seekIsPending)
1431             updateStates();
1432         else {
1433             GST_DEBUG("[Seek] seeked to %f", m_seekTime);
1434             m_seeking = false;
1435             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek != -1) {
1436                 seek(m_timeOfOverlappingSeek);
1437                 m_timeOfOverlappingSeek = -1;
1438                 return;
1439             }
1440             m_timeOfOverlappingSeek = -1;
1441
1442             // The pipeline can still have a pending state. In this case a position query will fail.
1443             // Right now we can use m_seekTime as a fallback.
1444             m_canFallBackToLastFinishedSeekPosition = true;
1445             timeChanged();
1446         }
1447     } else
1448         updateStates();
1449 }
1450
1451 void MediaPlayerPrivateGStreamer::updateStates()
1452 {
1453     if (!m_pipeline)
1454         return;
1455
1456     if (m_errorOccured)
1457         return;
1458
1459     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1460     MediaPlayer::ReadyState oldReadyState = m_readyState;
1461     GstState state;
1462     GstState pending;
1463
1464     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1465
1466     bool shouldUpdatePlaybackState = false;
1467     switch (getStateResult) {
1468     case GST_STATE_CHANGE_SUCCESS: {
1469         GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
1470
1471         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1472         // on HTMLMediaElement and properly generate the video 'ended' event.
1473         if (m_isEndReached && state == GST_STATE_READY)
1474             break;
1475
1476         m_resetPipeline = state <= GST_STATE_READY;
1477
1478         bool didBuffering = m_buffering;
1479
1480         // Update ready and network states.
1481         switch (state) {
1482         case GST_STATE_NULL:
1483             m_readyState = MediaPlayer::HaveNothing;
1484             m_networkState = MediaPlayer::Empty;
1485             break;
1486         case GST_STATE_READY:
1487             m_readyState = MediaPlayer::HaveMetadata;
1488             m_networkState = MediaPlayer::Empty;
1489             break;
1490         case GST_STATE_PAUSED:
1491         case GST_STATE_PLAYING:
1492             if (m_buffering) {
1493                 if (m_bufferingPercentage == 100) {
1494                     GST_DEBUG("[Buffering] Complete.");
1495                     m_buffering = false;
1496                     m_readyState = MediaPlayer::HaveEnoughData;
1497                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1498                 } else {
1499                     m_readyState = MediaPlayer::HaveCurrentData;
1500                     m_networkState = MediaPlayer::Loading;
1501                 }
1502             } else if (m_downloadFinished) {
1503                 m_readyState = MediaPlayer::HaveEnoughData;
1504                 m_networkState = MediaPlayer::Loaded;
1505             } else {
1506                 m_readyState = MediaPlayer::HaveFutureData;
1507                 m_networkState = MediaPlayer::Loading;
1508             }
1509
1510             break;
1511         default:
1512             ASSERT_NOT_REACHED();
1513             break;
1514         }
1515
1516         // Sync states where needed.
1517         if (state == GST_STATE_PAUSED) {
1518             if (!m_volumeAndMuteInitialized) {
1519                 notifyPlayerOfVolumeChange();
1520                 notifyPlayerOfMute();
1521                 m_volumeAndMuteInitialized = true;
1522             }
1523
1524             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1525                 GST_DEBUG("[Buffering] Restarting playback.");
1526                 changePipelineState(GST_STATE_PLAYING);
1527             }
1528         } else if (state == GST_STATE_PLAYING) {
1529             m_paused = false;
1530
1531             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1532                 GST_DEBUG("[Buffering] Pausing stream for buffering.");
1533                 changePipelineState(GST_STATE_PAUSED);
1534             }
1535         } else
1536             m_paused = true;
1537
1538         if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
1539             shouldUpdatePlaybackState = true;
1540             GST_DEBUG("Requested state change to %s was completed", gst_element_state_get_name(state));
1541         }
1542
1543         break;
1544     }
1545     case GST_STATE_CHANGE_ASYNC:
1546         GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
1547         // Change in progress.
1548         break;
1549     case GST_STATE_CHANGE_FAILURE:
1550         GST_DEBUG("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
1551         // Change failed
1552         return;
1553     case GST_STATE_CHANGE_NO_PREROLL:
1554         GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
1555
1556         // Live pipelines go in PAUSED without prerolling.
1557         m_isStreaming = true;
1558         setDownloadBuffering();
1559
1560         if (state == GST_STATE_READY)
1561             m_readyState = MediaPlayer::HaveNothing;
1562         else if (state == GST_STATE_PAUSED) {
1563             m_readyState = MediaPlayer::HaveEnoughData;
1564             m_paused = true;
1565         } else if (state == GST_STATE_PLAYING)
1566             m_paused = false;
1567
1568         if (!m_paused && m_playbackRate)
1569             changePipelineState(GST_STATE_PLAYING);
1570
1571         m_networkState = MediaPlayer::Loading;
1572         break;
1573     default:
1574         GST_DEBUG("Else : %d", getStateResult);
1575         break;
1576     }
1577
1578     m_requestedState = GST_STATE_VOID_PENDING;
1579
1580     if (shouldUpdatePlaybackState)
1581         m_player->playbackStateChanged();
1582
1583     if (m_networkState != oldNetworkState) {
1584         GST_DEBUG("Network State Changed from %u to %u", oldNetworkState, m_networkState);
1585         m_player->networkStateChanged();
1586     }
1587     if (m_readyState != oldReadyState) {
1588         GST_DEBUG("Ready State Changed from %u to %u", oldReadyState, m_readyState);
1589         m_player->readyStateChanged();
1590     }
1591
1592     if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) {
1593         updatePlaybackRate();
1594         if (m_seekIsPending) {
1595             GST_DEBUG("[Seek] committing pending seek to %f", m_seekTime);
1596             m_seekIsPending = false;
1597             m_seeking = doSeek(toGstClockTime(m_seekTime), m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
1598             if (!m_seeking)
1599                 GST_DEBUG("[Seek] seeking to %f failed", m_seekTime);
1600         }
1601     }
1602 }
1603
1604 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
1605 {
1606     if (m_mediaLocations)
1607         gst_structure_free(m_mediaLocations);
1608
1609     const GstStructure* structure = gst_message_get_structure(message);
1610     if (structure) {
1611         // This structure can contain:
1612         // - both a new-location string and embedded locations structure
1613         // - or only a new-location string.
1614         m_mediaLocations = gst_structure_copy(structure);
1615         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
1616
1617         if (locations)
1618             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
1619
1620         loadNextLocation();
1621     }
1622 }
1623
1624 bool MediaPlayerPrivateGStreamer::loadNextLocation()
1625 {
1626     if (!m_mediaLocations)
1627         return false;
1628
1629     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
1630     const gchar* newLocation = nullptr;
1631
1632     if (!locations) {
1633         // Fallback on new-location string.
1634         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
1635         if (!newLocation)
1636             return false;
1637     }
1638
1639     if (!newLocation) {
1640         if (m_mediaLocationCurrentIndex < 0) {
1641             m_mediaLocations = nullptr;
1642             return false;
1643         }
1644
1645         const GValue* location = gst_value_list_get_value(locations,
1646                                                           m_mediaLocationCurrentIndex);
1647         const GstStructure* structure = gst_value_get_structure(location);
1648
1649         if (!structure) {
1650             m_mediaLocationCurrentIndex--;
1651             return false;
1652         }
1653
1654         newLocation = gst_structure_get_string(structure, "new-location");
1655     }
1656
1657     if (newLocation) {
1658         // Found a candidate. new-location is not always an absolute url
1659         // though. We need to take the base of the current url and
1660         // append the value of new-location to it.
1661         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
1662         URL newUrl = URL(baseUrl, newLocation);
1663
1664         RefPtr<SecurityOrigin> securityOrigin = SecurityOrigin::create(m_url);
1665         if (securityOrigin->canRequest(newUrl)) {
1666             GST_INFO("New media url: %s", newUrl.string().utf8().data());
1667
1668             // Reset player states.
1669             m_networkState = MediaPlayer::Loading;
1670             m_player->networkStateChanged();
1671             m_readyState = MediaPlayer::HaveNothing;
1672             m_player->readyStateChanged();
1673
1674             // Reset pipeline state.
1675             m_resetPipeline = true;
1676             changePipelineState(GST_STATE_READY);
1677
1678             GstState state;
1679             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1680             if (state <= GST_STATE_READY) {
1681                 // Set the new uri and start playing.
1682                 g_object_set(m_pipeline.get(), "uri", newUrl.string().utf8().data(), nullptr);
1683                 m_url = newUrl;
1684                 changePipelineState(GST_STATE_PLAYING);
1685                 return true;
1686             }
1687         } else
1688             GST_INFO("Not allowed to load new media location: %s", newUrl.string().utf8().data());
1689     }
1690     m_mediaLocationCurrentIndex--;
1691     return false;
1692 }
1693
1694 void MediaPlayerPrivateGStreamer::loadStateChanged()
1695 {
1696     updateStates();
1697 }
1698
1699 void MediaPlayerPrivateGStreamer::timeChanged()
1700 {
1701     updateStates();
1702     m_player->timeChanged();
1703 }
1704
1705 void MediaPlayerPrivateGStreamer::didEnd()
1706 {
1707     // Synchronize position and duration values to not confuse the
1708     // HTMLMediaElement. In some cases like reverse playback the
1709     // position is not always reported as 0 for instance.
1710     MediaTime now = currentMediaTime();
1711     if (now > MediaTime { } && now <= durationMediaTime())
1712         m_player->durationChanged();
1713
1714     m_isEndReached = true;
1715     timeChanged();
1716
1717     if (!m_player->client().mediaPlayerIsLooping()) {
1718         m_paused = true;
1719         m_durationAtEOS = durationMediaTime().toDouble();
1720         changePipelineState(GST_STATE_READY);
1721         m_downloadFinished = false;
1722     }
1723 }
1724
1725 void MediaPlayerPrivateGStreamer::durationChanged()
1726 {
1727     float previousDuration = durationMediaTime().toDouble();
1728
1729     // Avoid emiting durationchanged in the case where the previous
1730     // duration was 0 because that case is already handled by the
1731     // HTMLMediaElement.
1732     if (previousDuration && durationMediaTime().toDouble() != previousDuration)
1733         m_player->durationChanged();
1734 }
1735
1736 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
1737 {
1738     m_errorOccured = true;
1739     if (m_networkState != error) {
1740         m_networkState = error;
1741         m_player->networkStateChanged();
1742     }
1743     if (m_readyState != MediaPlayer::HaveNothing) {
1744         m_readyState = MediaPlayer::HaveNothing;
1745         m_player->readyStateChanged();
1746     }
1747
1748     // Loading failed, remove ready timer.
1749     m_readyTimerHandler.stop();
1750 }
1751
1752 static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeSet()
1753 {
1754     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> mimeTypes = []()
1755     {
1756         MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements();
1757         HashSet<String, ASCIICaseInsensitiveHash> set;
1758
1759         GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
1760         GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
1761         GList* demuxerFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DEMUXER, GST_RANK_MARGINAL);
1762
1763         enum ElementType {
1764             AudioDecoder = 0,
1765             VideoDecoder,
1766             Demuxer
1767         };
1768         struct GstCapsWebKitMapping {
1769             ElementType elementType;
1770             const char* capsString;
1771             Vector<AtomicString> webkitMimeTypes;
1772         };
1773
1774         Vector<GstCapsWebKitMapping> mapping = {
1775             {AudioDecoder, "audio/midi", {"audio/midi", "audio/riff-midi"}},
1776             {AudioDecoder, "audio/x-sbc", { }},
1777             {AudioDecoder, "audio/x-sid", { }},
1778             {AudioDecoder, "audio/x-flac", {"audio/x-flac", "audio/flac"}},
1779             {AudioDecoder, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
1780             {AudioDecoder, "audio/x-wavpack", {"audio/x-wavpack"}},
1781             {AudioDecoder, "audio/x-speex", {"audio/speex", "audio/x-speex"}},
1782             {AudioDecoder, "audio/x-ac3", { }},
1783             {AudioDecoder, "audio/x-eac3", {"audio/x-ac3"}},
1784             {AudioDecoder, "audio/x-dts", { }},
1785             {VideoDecoder, "video/x-h264, profile=(string)high", {"video/mp4", "video/x-m4v"}},
1786             {VideoDecoder, "video/x-msvideocodec", {"video/x-msvideo"}},
1787             {VideoDecoder, "video/x-h263", { }},
1788             {VideoDecoder, "video/mpegts", { }},
1789             {VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", {"video/mpeg"}},
1790             {VideoDecoder, "video/x-dirac", { }},
1791             {VideoDecoder, "video/x-flash-video", {"video/flv", "video/x-flv"}},
1792             {Demuxer, "video/quicktime", { }},
1793             {Demuxer, "video/quicktime, variant=(string)3gpp", {"video/3gpp"}},
1794             {Demuxer, "application/x-3gp", { }},
1795             {Demuxer, "video/x-ms-asf", { }},
1796             {Demuxer, "audio/x-aiff", { }},
1797             {Demuxer, "application/x-pn-realaudio", { }},
1798             {Demuxer, "application/vnd.rn-realmedia", { }},
1799             {Demuxer, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
1800             {Demuxer, "application/x-hls", {"application/vnd.apple.mpegurl", "application/x-mpegurl"}}
1801         };
1802
1803         for (auto& current : mapping) {
1804             GList* factories = demuxerFactories;
1805             if (current.elementType == AudioDecoder)
1806                 factories = audioDecoderFactories;
1807             else if (current.elementType == VideoDecoder)
1808                 factories = videoDecoderFactories;
1809
1810             if (gstRegistryHasElementForMediaType(factories, current.capsString)) {
1811                 if (!current.webkitMimeTypes.isEmpty()) {
1812                     for (const auto& mimeType : current.webkitMimeTypes)
1813                         set.add(mimeType);
1814                 } else
1815                     set.add(AtomicString(current.capsString));
1816             }
1817         }
1818
1819         bool opusSupported = false;
1820         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-opus")) {
1821             opusSupported = true;
1822             set.add(AtomicString("audio/opus"));
1823         }
1824
1825         bool vorbisSupported = false;
1826         if (gstRegistryHasElementForMediaType(demuxerFactories, "application/ogg")) {
1827             set.add(AtomicString("application/ogg"));
1828
1829             vorbisSupported = gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-vorbis");
1830             if (vorbisSupported) {
1831                 set.add(AtomicString("audio/ogg"));
1832                 set.add(AtomicString("audio/x-vorbis+ogg"));
1833             }
1834
1835             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-theora"))
1836                 set.add(AtomicString("video/ogg"));
1837         }
1838
1839         bool audioMpegSupported = false;
1840         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
1841             audioMpegSupported = true;
1842             set.add(AtomicString("audio/mp1"));
1843             set.add(AtomicString("audio/mp3"));
1844             set.add(AtomicString("audio/x-mp3"));
1845         }
1846
1847         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
1848             audioMpegSupported = true;
1849             set.add(AtomicString("audio/aac"));
1850             set.add(AtomicString("audio/mp2"));
1851             set.add(AtomicString("audio/mp4"));
1852             set.add(AtomicString("audio/x-m4a"));
1853         }
1854
1855         if (audioMpegSupported) {
1856             set.add(AtomicString("audio/mpeg"));
1857             set.add(AtomicString("audio/x-mpeg"));
1858         }
1859
1860         if (gstRegistryHasElementForMediaType(demuxerFactories, "video/x-matroska")) {
1861             set.add(AtomicString("video/x-matroska"));
1862
1863             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp8")
1864                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp9")
1865                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp10"))
1866                 set.add(AtomicString("video/webm"));
1867
1868             if (vorbisSupported || opusSupported)
1869                 set.add(AtomicString("audio/webm"));
1870         }
1871
1872         gst_plugin_feature_list_free(audioDecoderFactories);
1873         gst_plugin_feature_list_free(videoDecoderFactories);
1874         gst_plugin_feature_list_free(demuxerFactories);
1875         return set;
1876     }();
1877     return mimeTypes;
1878 }
1879
1880 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
1881 {
1882     types = mimeTypeSet();
1883 }
1884
1885 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
1886 {
1887     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
1888 #if ENABLE(MEDIA_SOURCE)
1889     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
1890     if (parameters.isMediaSource)
1891         return result;
1892 #endif
1893
1894     // MediaStream playback is handled by the OpenWebRTC player.
1895     if (parameters.isMediaStream)
1896         return result;
1897
1898     if (parameters.type.isEmpty())
1899         return result;
1900
1901     // spec says we should not return "probably" if the codecs string is empty
1902     if (mimeTypeSet().contains(parameters.type.containerType()))
1903         result = parameters.type.codecs().isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
1904
1905     return extendedSupportsType(parameters, result);
1906 }
1907
1908 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
1909 {
1910     if (!m_pipeline)
1911         return;
1912
1913     unsigned flags;
1914     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
1915
1916     unsigned flagDownload = getGstPlayFlag("download");
1917
1918     // We don't want to stop downloading if we already started it.
1919     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline)
1920         return;
1921
1922     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
1923     if (shouldDownload) {
1924         GST_DEBUG("Enabling on-disk buffering");
1925         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
1926         m_fillTimer.startRepeating(200_ms);
1927     } else {
1928         GST_DEBUG("Disabling on-disk buffering");
1929         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
1930         m_fillTimer.stop();
1931     }
1932 }
1933
1934 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
1935 {
1936     if (preload == MediaPlayer::Auto && isLiveStream())
1937         return;
1938
1939     m_preload = preload;
1940     setDownloadBuffering();
1941
1942     if (m_delayingLoad && m_preload != MediaPlayer::None) {
1943         m_delayingLoad = false;
1944         commitLoad();
1945     }
1946 }
1947
1948 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
1949 {
1950     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
1951     if (!m_autoAudioSink) {
1952         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
1953         return nullptr;
1954     }
1955
1956     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
1957
1958     GstElement* audioSinkBin;
1959
1960     if (webkitGstCheckVersion(1, 4, 2)) {
1961 #if ENABLE(WEB_AUDIO)
1962         audioSinkBin = gst_bin_new("audio-sink");
1963         ensureAudioSourceProvider();
1964         m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
1965         return audioSinkBin;
1966 #else
1967         return m_autoAudioSink.get();
1968 #endif
1969     }
1970
1971     // Construct audio sink only if pitch preserving is enabled.
1972     // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
1973     if (m_preservesPitch) {
1974         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
1975         if (!scale) {
1976             GST_WARNING("Failed to create scaletempo");
1977             return m_autoAudioSink.get();
1978         }
1979
1980         audioSinkBin = gst_bin_new("audio-sink");
1981         gst_bin_add(GST_BIN(audioSinkBin), scale);
1982         GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
1983         gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
1984
1985 #if ENABLE(WEB_AUDIO)
1986         ensureAudioSourceProvider();
1987         m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
1988 #else
1989         GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
1990         GstElement* resample = gst_element_factory_make("audioresample", nullptr);
1991
1992         gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
1993
1994         if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
1995             GST_WARNING("Failed to link audio sink elements");
1996             gst_object_unref(audioSinkBin);
1997             return m_autoAudioSink.get();
1998         }
1999 #endif
2000         return audioSinkBin;
2001     }
2002
2003 #if ENABLE(WEB_AUDIO)
2004     audioSinkBin = gst_bin_new("audio-sink");
2005     ensureAudioSourceProvider();
2006     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2007     return audioSinkBin;
2008 #endif
2009     ASSERT_NOT_REACHED();
2010     return nullptr;
2011 }
2012
2013 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2014 {
2015     GstElement* sink;
2016     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2017     return sink;
2018 }
2019
2020 #if ENABLE(WEB_AUDIO)
2021 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2022 {
2023     if (!m_audioSourceProvider)
2024         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2025 }
2026
2027 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2028 {
2029     ensureAudioSourceProvider();
2030     return m_audioSourceProvider.get();
2031 }
2032 #endif
2033
2034 void MediaPlayerPrivateGStreamer::createGSTPlayBin()
2035 {
2036     ASSERT(!m_pipeline);
2037
2038     // gst_element_factory_make() returns a floating reference so
2039     // we should not adopt.
2040     setPipeline(gst_element_factory_make("playbin", "play"));
2041     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2042
2043     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2044     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
2045         auto& player = *static_cast<MediaPlayerPrivateGStreamer*>(userData);
2046
2047         if (player.handleSyncMessage(message)) {
2048             gst_message_unref(message);
2049             return GST_BUS_DROP;
2050         }
2051
2052         return GST_BUS_PASS;
2053     }, this, nullptr);
2054
2055     // Let also other listeners subscribe to (application) messages in this bus.
2056     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2057     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2058
2059     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2060
2061     g_signal_connect_swapped(m_pipeline.get(), "notify::source", G_CALLBACK(sourceChangedCallback), this);
2062     g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2063     g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2064 #if ENABLE(VIDEO_TRACK)
2065     g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2066
2067     GstElement* textCombiner = webkitTextCombinerNew();
2068     ASSERT(textCombiner);
2069     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2070
2071     m_textAppSink = webkitTextSinkNew();
2072     ASSERT(m_textAppSink);
2073
2074     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2075     ASSERT(m_textAppSinkPad);
2076
2077     g_object_set(m_textAppSink.get(), "emit-signals", true, "enable-last-sample", false, "caps", gst_caps_new_empty_simple("text/vtt"), nullptr);
2078     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2079
2080     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2081 #endif
2082
2083     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2084
2085     configurePlaySink();
2086
2087     // On 1.4.2 and newer we use the audio-filter property instead.
2088     // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
2089     // the reason for using >= 1.4.2 instead of >= 1.4.0.
2090     if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
2091         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2092
2093         if (!scale)
2094             GST_WARNING("Failed to create scaletempo");
2095         else
2096             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2097     }
2098
2099     if (!m_renderingCanBeAccelerated) {
2100         // If not using accelerated compositing, let GStreamer handle
2101         // the image-orientation tag.
2102         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2103         g_object_set(videoFlip, "method", 8, nullptr);
2104         g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2105     }
2106
2107     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2108     if (videoSinkPad)
2109         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2110 }
2111
2112 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2113 {
2114     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2115     gst_element_post_message(m_pipeline.get(), message);
2116 }
2117
2118 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2119 {
2120     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2121         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2122     return false;
2123 }
2124
2125 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2126 {
2127     if (isLiveStream())
2128         return false;
2129
2130     if (m_url.isLocalFile())
2131         return true;
2132
2133     if (m_url.protocolIsInHTTPFamily())
2134         return true;
2135
2136     return false;
2137 }
2138
2139 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
2140 {
2141     return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
2142 }
2143
2144 }
2145
2146 #endif // USE(GSTREAMER)