[GStreamer] Use of playbin3 when USE_PLAYBIN3 environment variable is set
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "FileSystem.h"
32 #include "GStreamerUtilities.h"
33 #include "HTTPHeaderNames.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "URL.h"
41 #include "WebKitWebSourceGStreamer.h"
42 #include <glib.h>
43 #include <gst/gst.h>
44 #include <gst/pbutils/missing-plugins.h>
45 #include <limits>
46 #include <wtf/HexNumber.h>
47 #include <wtf/MediaTime.h>
48 #include <wtf/NeverDestroyed.h>
49 #include <wtf/StringPrintStream.h>
50 #include <wtf/glib/GUniquePtr.h>
51 #include <wtf/glib/RunLoopSourcePriority.h>
52 #include <wtf/text/CString.h>
53
54 #if ENABLE(VIDEO_TRACK)
55 #include "AudioTrackPrivateGStreamer.h"
56 #include "InbandMetadataTextTrackPrivateGStreamer.h"
57 #include "InbandTextTrackPrivateGStreamer.h"
58 #include "TextCombinerGStreamer.h"
59 #include "TextSinkGStreamer.h"
60 #include "VideoTrackPrivateGStreamer.h"
61 #endif
62
63 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
64 #define GST_USE_UNSTABLE_API
65 #include <gst/mpegts/mpegts.h>
66 #undef GST_USE_UNSTABLE_API
67 #endif
68 #include <gst/audio/streamvolume.h>
69
70 #if ENABLE(MEDIA_SOURCE)
71 #include "MediaSource.h"
72 #include "WebKitMediaSourceGStreamer.h"
73 #endif
74
75 #if ENABLE(WEB_AUDIO)
76 #include "AudioSourceProviderGStreamer.h"
77 #endif
78
79 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
80 #define GST_CAT_DEFAULT webkit_media_player_debug
81
82
83 namespace WebCore {
84 using namespace std;
85
86 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
87 {
88     player->handleMessage(message);
89 }
90
91 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
92 {
93     player->setAudioStreamProperties(object);
94 }
95
96 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
97 {
98     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
99         return;
100
101     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
102     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
103     g_object_set(object, "stream-properties", structure, nullptr);
104     gst_structure_free(structure);
105     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
106     GST_DEBUG("Set media.role as %s at %s", role, elementName.get());
107 }
108
109 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
110 {
111     if (isAvailable())
112         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
113             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
114 }
115
116 bool MediaPlayerPrivateGStreamer::isAvailable()
117 {
118     if (!MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements())
119         return false;
120
121     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
122     return factory;
123 }
124
125 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
126     : MediaPlayerPrivateGStreamerBase(player)
127     , m_buffering(false)
128     , m_bufferingPercentage(0)
129     , m_canFallBackToLastFinishedSeekPosition(false)
130     , m_changingRate(false)
131     , m_downloadFinished(false)
132     , m_errorOccured(false)
133     , m_isEndReached(false)
134     , m_isStreaming(false)
135     , m_durationAtEOS(MediaTime::invalidTime())
136     , m_paused(true)
137     , m_playbackRate(1)
138     , m_requestedState(GST_STATE_VOID_PENDING)
139     , m_resetPipeline(false)
140     , m_seeking(false)
141     , m_seekIsPending(false)
142     , m_seekTime(MediaTime::invalidTime())
143     , m_source(nullptr)
144     , m_volumeAndMuteInitialized(false)
145     , m_mediaLocations(nullptr)
146     , m_mediaLocationCurrentIndex(0)
147     , m_playbackRatePause(false)
148     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
149     , m_lastPlaybackRate(1)
150     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
151     , m_maxTimeLoaded(MediaTime::zeroTime())
152     , m_preload(player->preload())
153     , m_delayingLoad(false)
154     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
155     , m_hasVideo(false)
156     , m_hasAudio(false)
157     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
158     , m_totalBytes(0)
159     , m_preservesPitch(false)
160 {
161 #if USE(GLIB)
162     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
163 #endif
164 }
165
166 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
167 {
168     GST_DEBUG("Disposing player");
169
170 #if ENABLE(VIDEO_TRACK)
171     for (auto& track : m_audioTracks.values())
172         track->disconnect();
173
174     for (auto& track : m_textTracks.values())
175         track->disconnect();
176
177     for (auto& track : m_videoTracks.values())
178         track->disconnect();
179 #endif
180     if (m_fillTimer.isActive())
181         m_fillTimer.stop();
182
183     if (m_mediaLocations) {
184         gst_structure_free(m_mediaLocations);
185         m_mediaLocations = nullptr;
186     }
187
188     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
189         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
190
191     if (m_autoAudioSink)
192         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
193             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
194
195     m_readyTimerHandler.stop();
196     for (auto& missingPluginCallback : m_missingPluginCallbacks) {
197         if (missingPluginCallback)
198             missingPluginCallback->invalidate();
199     }
200     m_missingPluginCallbacks.clear();
201
202     if (m_videoSink) {
203         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
204         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
205     }
206
207     if (m_pipeline) {
208         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
209         ASSERT(bus);
210         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
211         gst_bus_remove_signal_watch(bus.get());
212         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
213         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
214     }
215 }
216
217 static void convertToInternalProtocol(URL& url)
218 {
219     if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
220         url.setProtocol("webkit+" + url.protocol());
221 }
222
223 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
224 {
225     // Clean out everything after file:// url path.
226     String cleanURLString(url.string());
227     if (url.isLocalFile())
228         cleanURLString = cleanURLString.substring(0, url.pathEnd());
229
230     m_url = URL(URL(), cleanURLString);
231     convertToInternalProtocol(m_url);
232
233     GST_INFO("Load %s", m_url.string().utf8().data());
234     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
235 }
236
237 void MediaPlayerPrivateGStreamer::load(const String& urlString)
238 {
239     // FIXME: This method is still called even if supportsType() returned
240     // IsNotSupported. This would deserve more investigation but meanwhile make
241     // sure we don't ever try to play animated gif assets.
242     if (m_player->contentMIMEType() == "image/gif") {
243         loadingFailed(MediaPlayer::FormatError);
244         return;
245     }
246
247     if (!MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements())
248         return;
249
250     URL url(URL(), urlString);
251     if (url.isBlankURL())
252         return;
253
254     if (!m_pipeline)
255         createGSTPlayBin(isMediaSource() ? "playbin" : nullptr);
256
257     if (m_fillTimer.isActive())
258         m_fillTimer.stop();
259
260     ASSERT(m_pipeline);
261
262     setPlaybinURL(url);
263
264     GST_DEBUG("preload: %s", convertEnumerationToString(m_preload).utf8().data());
265     if (m_preload == MediaPlayer::None) {
266         GST_INFO("Delaying load.");
267         m_delayingLoad = true;
268     }
269
270     // Reset network and ready states. Those will be set properly once
271     // the pipeline pre-rolled.
272     m_networkState = MediaPlayer::Loading;
273     m_player->networkStateChanged();
274     m_readyState = MediaPlayer::HaveNothing;
275     m_player->readyStateChanged();
276     m_volumeAndMuteInitialized = false;
277     m_durationAtEOS = MediaTime::invalidTime();
278
279     if (!m_delayingLoad)
280         commitLoad();
281 }
282
283 #if ENABLE(MEDIA_SOURCE)
284 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
285 {
286     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
287     m_networkState = MediaPlayer::FormatError;
288     m_player->networkStateChanged();
289 }
290 #endif
291
292 #if ENABLE(MEDIA_STREAM)
293 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate&)
294 {
295     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
296     m_networkState = MediaPlayer::FormatError;
297     m_player->networkStateChanged();
298     notImplemented();
299 }
300 #endif
301
302 void MediaPlayerPrivateGStreamer::commitLoad()
303 {
304     ASSERT(!m_delayingLoad);
305     GST_DEBUG("Committing load.");
306
307     // GStreamer needs to have the pipeline set to a paused state to
308     // start providing anything useful.
309     changePipelineState(GST_STATE_PAUSED);
310
311     setDownloadBuffering();
312     updateStates();
313 }
314
315 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
316 {
317     if (m_isEndReached) {
318         // Position queries on a null pipeline return 0. If we're at
319         // the end of the stream the pipeline is null but we want to
320         // report either the seek time or the duration because this is
321         // what the Media element spec expects us to do.
322         if (m_seeking)
323             return m_seekTime;
324
325         MediaTime duration = durationMediaTime();
326         return duration.isInvalid() ? MediaTime::zeroTime() : duration;
327     }
328
329     // Position is only available if no async state change is going on and the state is either paused or playing.
330     gint64 position = GST_CLOCK_TIME_NONE;
331     GstQuery* query= gst_query_new_position(GST_FORMAT_TIME);
332     if (gst_element_query(m_pipeline.get(), query))
333         gst_query_parse_position(query, 0, &position);
334     gst_query_unref(query);
335
336     GST_LOG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
337
338     MediaTime playbackPosition = MediaTime::zeroTime();
339     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
340     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
341         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
342     else if (m_canFallBackToLastFinishedSeekPosition)
343         playbackPosition = m_seekTime;
344
345     return playbackPosition;
346 }
347
348 void MediaPlayerPrivateGStreamer::readyTimerFired()
349 {
350     GST_DEBUG("In READY for too long. Releasing pipeline resources.");
351     changePipelineState(GST_STATE_NULL);
352 }
353
354 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
355 {
356     ASSERT(m_pipeline);
357
358     GstState currentState;
359     GstState pending;
360
361     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
362     if (currentState == newState || pending == newState) {
363         GST_DEBUG("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
364             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
365         return true;
366     }
367
368     GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
369         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
370
371 #if USE(GSTREAMER_GL)
372     if (currentState == GST_STATE_READY && newState == GST_STATE_PAUSED)
373         ensureGLVideoSinkContext();
374 #endif
375
376     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
377     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
378     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE) {
379         return false;
380     }
381
382     // Create a timer when entering the READY state so that we can free resources
383     // if we stay for too long on READY.
384     // Also lets remove the timer if we request a state change for any state other than READY.
385     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
386     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
387         // Max interval in seconds to stay in the READY state on manual
388         // state change requests.
389         static const Seconds readyStateTimerDelay { 1_min };
390         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
391     } else if (newState != GST_STATE_READY)
392         m_readyTimerHandler.stop();
393
394     return true;
395 }
396
397 void MediaPlayerPrivateGStreamer::prepareToPlay()
398 {
399     GST_DEBUG("Prepare to play");
400     m_preload = MediaPlayer::Auto;
401     if (m_delayingLoad) {
402         m_delayingLoad = false;
403         commitLoad();
404     }
405 }
406
407 void MediaPlayerPrivateGStreamer::play()
408 {
409     if (!m_playbackRate) {
410         m_playbackRatePause = true;
411         return;
412     }
413
414     if (changePipelineState(GST_STATE_PLAYING)) {
415         m_isEndReached = false;
416         m_delayingLoad = false;
417         m_preload = MediaPlayer::Auto;
418         setDownloadBuffering();
419         GST_INFO("Play");
420     } else {
421         loadingFailed(MediaPlayer::Empty);
422     }
423 }
424
425 void MediaPlayerPrivateGStreamer::pause()
426 {
427     m_playbackRatePause = false;
428     GstState currentState, pendingState;
429     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
430     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
431         return;
432
433     if (changePipelineState(GST_STATE_PAUSED))
434         GST_INFO("Pause");
435     else
436         loadingFailed(MediaPlayer::Empty);
437 }
438
439 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
440 {
441     if (!m_pipeline || m_errorOccured)
442         return MediaTime::invalidTime();
443
444     if (m_durationAtEOS.isValid())
445         return m_durationAtEOS;
446
447     // The duration query would fail on a not-prerolled pipeline.
448     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
449         return MediaTime::invalidTime();
450
451     gint64 timeLength = 0;
452
453     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &timeLength) || !GST_CLOCK_TIME_IS_VALID(timeLength)) {
454         GST_DEBUG("Time duration query failed for %s", m_url.string().utf8().data());
455         return MediaTime::positiveInfiniteTime();
456     }
457
458     GST_LOG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
459
460     return MediaTime(timeLength, GST_SECOND);
461     // FIXME: handle 3.14.9.5 properly
462 }
463
464 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
465 {
466     if (!m_pipeline || m_errorOccured)
467         return MediaTime::invalidTime();
468
469     if (m_seeking)
470         return m_seekTime;
471
472     // Workaround for
473     // https://bugzilla.gnome.org/show_bug.cgi?id=639941 In GStreamer
474     // 0.10.35 basesink reports wrong duration in case of EOS and
475     // negative playback rate. There's no upstream accepted patch for
476     // this bug yet, hence this temporary workaround.
477     if (m_isEndReached && m_playbackRate < 0)
478         return MediaTime::invalidTime();
479
480     return playbackPosition();
481 }
482
483 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
484 {
485     if (!m_pipeline)
486         return;
487
488     if (m_errorOccured)
489         return;
490
491     GST_INFO("[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
492
493     // Avoid useless seeking.
494     if (mediaTime == currentMediaTime())
495         return;
496
497     MediaTime time = std::min(mediaTime, durationMediaTime());
498
499     if (isLiveStream())
500         return;
501
502     GST_INFO("[Seek] seeking to %s", toString(time).utf8().data());
503
504     if (m_seeking) {
505         m_timeOfOverlappingSeek = time;
506         if (m_seekIsPending) {
507             m_seekTime = time;
508             return;
509         }
510     }
511
512     GstState state;
513     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
514     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
515         GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
516         return;
517     }
518     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
519         m_seekIsPending = true;
520         if (m_isEndReached) {
521             GST_DEBUG("[Seek] reset pipeline");
522             m_resetPipeline = true;
523             if (!changePipelineState(GST_STATE_PAUSED))
524                 loadingFailed(MediaPlayer::Empty);
525         }
526     } else {
527         // We can seek now.
528         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
529             GST_DEBUG("[Seek] seeking to %s failed", toString(time).utf8().data());
530             return;
531         }
532     }
533
534     m_seeking = true;
535     m_seekTime = time;
536     m_isEndReached = false;
537 }
538
539 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
540 {
541     // Default values for rate >= 0.
542     MediaTime startTime = position, endTime = MediaTime::invalidTime();
543
544     // TODO: Should do more than that, need to notify the media source
545     // and probably flush the pipeline at least.
546     if (isMediaSource())
547         return true;
548
549     if (rate < 0) {
550         startTime = MediaTime::zeroTime();
551         // If we are at beginning of media, start from the end to
552         // avoid immediate EOS.
553         if (position < MediaTime::zeroTime())
554             endTime = durationMediaTime();
555         else
556             endTime = position;
557     }
558
559     if (!rate)
560         rate = 1.0;
561
562     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
563         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
564 }
565
566 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
567 {
568     if (!m_changingRate)
569         return;
570
571     GST_INFO("Set Rate to %f", m_playbackRate);
572
573     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
574     bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
575
576     GST_INFO(mute ? "Need to mute audio" : "Do not need to mute audio");
577
578     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
579         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
580         m_lastPlaybackRate = m_playbackRate;
581     } else {
582         m_playbackRate = m_lastPlaybackRate;
583         GST_ERROR("Set rate to %f failed", m_playbackRate);
584     }
585
586     if (m_playbackRatePause) {
587         GstState state;
588         GstState pending;
589
590         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
591         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
592             changePipelineState(GST_STATE_PLAYING);
593         m_playbackRatePause = false;
594     }
595
596     m_changingRate = false;
597     m_player->rateChanged();
598 }
599
600 bool MediaPlayerPrivateGStreamer::paused() const
601 {
602     if (m_isEndReached) {
603         GST_DEBUG("Ignoring pause at EOS");
604         return true;
605     }
606
607     if (m_playbackRatePause) {
608         GST_DEBUG("Playback rate is 0, simulating PAUSED state");
609         return false;
610     }
611
612     GstState state;
613     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
614     bool paused = state <= GST_STATE_PAUSED;
615     GST_DEBUG("Paused: %s", toString(paused).utf8().data());
616     return paused;
617 }
618
619 bool MediaPlayerPrivateGStreamer::seeking() const
620 {
621     return m_seeking;
622 }
623
624 #if GST_CHECK_VERSION(1, 10, 0)
625 void MediaPlayerPrivateGStreamer::updateTracks()
626 {
627     ASSERT(!m_isLegacyPlaybin);
628
629     bool useMediaSource = isMediaSource();
630     unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
631     Vector<String> validAudioStreams;
632     Vector<String> validVideoStreams;
633     Vector<String> validTextStreams;
634     for (unsigned i = 0; i < length; i++) {
635         GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
636         String streamId(gst_stream_get_stream_id(stream.get()));
637         GstStreamType type = gst_stream_get_stream_type(stream.get());
638         GST_DEBUG("Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
639         if (type & GST_STREAM_TYPE_AUDIO) {
640             validAudioStreams.append(streamId);
641 #if ENABLE(VIDEO_TRACK)
642             if (!useMediaSource) {
643                 unsigned localIndex = i - validVideoStreams.size() - validTextStreams.size();
644                 if (localIndex < m_audioTracks.size()) {
645                     if (m_audioTracks.contains(streamId))
646                         continue;
647                 }
648
649                 RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(createWeakPtr(), i, stream);
650                 m_audioTracks.add(track->id(), track);
651                 m_player->addAudioTrack(*track);
652             }
653 #endif
654         } else if (type & GST_STREAM_TYPE_VIDEO) {
655             validVideoStreams.append(streamId);
656 #if ENABLE(VIDEO_TRACK)
657             if (!useMediaSource) {
658                 unsigned localIndex = i - validAudioStreams.size() - validTextStreams.size();
659                 if (localIndex < m_videoTracks.size()) {
660                     if (m_videoTracks.contains(streamId))
661                         continue;
662                 }
663
664                 RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(createWeakPtr(), i, stream);
665                 m_videoTracks.add(track->id(), track);
666                 m_player->addVideoTrack(*track);
667             }
668 #endif
669         } else if (type & GST_STREAM_TYPE_TEXT) {
670             validTextStreams.append(streamId);
671 #if ENABLE(VIDEO_TRACK)
672             if (!useMediaSource) {
673                 unsigned localIndex = i - validVideoStreams.size() - validAudioStreams.size();
674                 if (localIndex < m_textTracks.size()) {
675                     if (m_textTracks.contains(streamId))
676                         continue;
677                 }
678
679                 RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(localIndex, stream);
680                 m_textTracks.add(streamId, track);
681                 m_player->addTextTrack(*track);
682             }
683 #endif
684         } else
685             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
686     }
687
688     GST_INFO("Media has %u video tracks, %u audio tracks and %u text tracks", validVideoStreams.size(), validAudioStreams.size(), validTextStreams.size());
689
690     bool oldHasAudio = m_hasAudio;
691     bool oldHasVideo = m_hasVideo;
692     m_hasAudio = !validAudioStreams.isEmpty();
693     m_hasVideo = !validVideoStreams.isEmpty();
694     if ((oldHasVideo != m_hasVideo) || (oldHasAudio != m_hasAudio))
695         m_player->characteristicChanged();
696
697     if (m_hasVideo)
698         m_player->sizeChanged();
699
700     if (useMediaSource) {
701         GST_DEBUG("Tracks managed by source element. Bailing out now.");
702         m_player->client().mediaPlayerEngineUpdated(m_player);
703         return;
704     }
705
706 #if ENABLE(VIDEO_TRACK)
707     purgeInvalidAudioTracks(validAudioStreams);
708     purgeInvalidVideoTracks(validVideoStreams);
709     purgeInvalidTextTracks(validTextStreams);
710 #endif
711
712     m_player->client().mediaPlayerEngineUpdated(m_player);
713 }
714 #endif
715
716 void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
717 {
718     const char* propertyName;
719     const char* trackTypeAsString;
720     GList* selectedStreams = nullptr;
721
722     switch (trackType) {
723     case TrackPrivateBaseGStreamer::TrackType::Audio:
724         propertyName = "current-audio";
725         trackTypeAsString = "audio";
726         if (!m_currentTextStreamId.isEmpty())
727             selectedStreams = g_list_append(selectedStreams, g_strdup(m_currentTextStreamId.utf8().data()));
728         if (!m_currentVideoStreamId.isEmpty())
729             selectedStreams = g_list_append(selectedStreams, g_strdup(m_currentVideoStreamId.utf8().data()));
730         break;
731     case TrackPrivateBaseGStreamer::TrackType::Video:
732         propertyName = "current-video";
733         trackTypeAsString = "video";
734         if (!m_currentAudioStreamId.isEmpty())
735             selectedStreams = g_list_append(selectedStreams, g_strdup(m_currentAudioStreamId.utf8().data()));
736         if (!m_currentTextStreamId.isEmpty())
737             selectedStreams = g_list_append(selectedStreams, g_strdup(m_currentTextStreamId.utf8().data()));
738         break;
739     case TrackPrivateBaseGStreamer::TrackType::Text:
740         propertyName = "current-text";
741         trackTypeAsString = "text";
742         if (!m_currentAudioStreamId.isEmpty())
743             selectedStreams = g_list_append(selectedStreams, g_strdup(m_currentAudioStreamId.utf8().data()));
744         if (!m_currentVideoStreamId.isEmpty())
745             selectedStreams = g_list_append(selectedStreams, g_strdup(m_currentVideoStreamId.utf8().data()));
746         break;
747     case TrackPrivateBaseGStreamer::TrackType::Unknown:
748     default:
749         ASSERT_NOT_REACHED();
750     }
751
752     GST_INFO("Enabling %s track with index: %lu", trackTypeAsString, index);
753     // FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531
754     if (m_isLegacyPlaybin || isMediaSource()) {
755         GstElement* element = isMediaSource() ? m_source.get() : m_pipeline.get();
756         g_object_set(element, propertyName, index, nullptr);
757     }
758 #if GST_CHECK_VERSION(1, 10, 0)
759     else {
760         GstStream* stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
761         if (stream) {
762             String streamId = gst_stream_get_stream_id(stream);
763             selectedStreams = g_list_append(selectedStreams, g_strdup(streamId.utf8().data()));
764         } else
765             GST_WARNING("%s stream %lu not found", trackTypeAsString, index);
766
767         // TODO: MSE GstStream API support: https://bugs.webkit.org/show_bug.cgi?id=182531
768         gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreams));
769     }
770 #endif
771
772     if (selectedStreams)
773         g_list_free_full(selectedStreams, reinterpret_cast<GDestroyNotify>(g_free));
774 }
775
776 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
777 {
778     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] { player->notifyPlayerOfVideo(); });
779 }
780
781 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
782 {
783     if (UNLIKELY(!m_pipeline || !m_source))
784         return;
785
786     ASSERT(m_isLegacyPlaybin || isMediaSource());
787
788     gint numTracks = 0;
789     bool useMediaSource = isMediaSource();
790     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
791     g_object_get(element, "n-video", &numTracks, nullptr);
792
793     GST_INFO("Media has %d video tracks", numTracks);
794
795     bool oldHasVideo = m_hasVideo;
796     m_hasVideo = numTracks > 0;
797     if (oldHasVideo != m_hasVideo)
798         m_player->characteristicChanged();
799
800     if (m_hasVideo)
801         m_player->sizeChanged();
802
803     if (useMediaSource) {
804         GST_DEBUG("Tracks managed by source element. Bailing out now.");
805         m_player->client().mediaPlayerEngineUpdated(m_player);
806         return;
807     }
808
809 #if ENABLE(VIDEO_TRACK)
810     Vector<String> validVideoStreams;
811     for (gint i = 0; i < numTracks; ++i) {
812         GRefPtr<GstPad> pad;
813         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
814         ASSERT(pad);
815
816         String streamId = "V" + String::number(i);
817         validVideoStreams.append(streamId);
818         if (i < static_cast<gint>(m_videoTracks.size())) {
819             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
820             if (existingTrack) {
821                 existingTrack->setIndex(i);
822                 if (existingTrack->pad() == pad)
823                     continue;
824             }
825         }
826
827         RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(createWeakPtr(), i, pad);
828         ASSERT(streamId == track->id());
829         m_videoTracks.add(streamId, track);
830         m_player->addVideoTrack(*track);
831     }
832
833     purgeInvalidVideoTracks(validVideoStreams);
834 #endif
835
836     m_player->client().mediaPlayerEngineUpdated(m_player);
837 }
838
839 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
840 {
841     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] { player->notifyPlayerOfVideoCaps(); });
842 }
843
844 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
845 {
846     m_videoSize = IntSize();
847     m_player->client().mediaPlayerEngineUpdated(m_player);
848 }
849
850 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
851 {
852     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] { player->notifyPlayerOfAudio(); });
853 }
854
855 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
856 {
857     if (UNLIKELY(!m_pipeline || !m_source))
858         return;
859
860     ASSERT(m_isLegacyPlaybin || isMediaSource());
861
862     gint numTracks = 0;
863     bool useMediaSource = isMediaSource();
864     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
865     g_object_get(element, "n-audio", &numTracks, nullptr);
866
867     GST_INFO("Media has %d audio tracks", numTracks);
868     bool oldHasAudio = m_hasAudio;
869     m_hasAudio = numTracks > 0;
870     if (oldHasAudio != m_hasAudio)
871         m_player->characteristicChanged();
872
873     if (useMediaSource) {
874         GST_DEBUG("Tracks managed by source element. Bailing out now.");
875         m_player->client().mediaPlayerEngineUpdated(m_player);
876         return;
877     }
878
879 #if ENABLE(VIDEO_TRACK)
880     Vector<String> validAudioStreams;
881     for (gint i = 0; i < numTracks; ++i) {
882         GRefPtr<GstPad> pad;
883         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
884         ASSERT(pad);
885
886         String streamId = "A" + String::number(i);
887         validAudioStreams.append(streamId);
888         if (i < static_cast<gint>(m_audioTracks.size())) {
889             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
890             if (existingTrack) {
891                 existingTrack->setIndex(i);
892                 if (existingTrack->pad() == pad)
893                     continue;
894             }
895         }
896
897         RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(createWeakPtr(), i, pad);
898         ASSERT(streamId == track->id());
899         m_audioTracks.add(streamId, track);
900         m_player->addAudioTrack(*track);
901     }
902
903     purgeInvalidAudioTracks(validAudioStreams);
904 #endif
905
906     m_player->client().mediaPlayerEngineUpdated(m_player);
907 }
908
909 #if ENABLE(VIDEO_TRACK)
910 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
911 {
912     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] { player->notifyPlayerOfText(); });
913 }
914
915 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
916 {
917     if (UNLIKELY(!m_pipeline || !m_source))
918         return;
919
920     ASSERT(m_isLegacyPlaybin || isMediaSource());
921
922     gint numTracks = 0;
923     bool useMediaSource = isMediaSource();
924     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
925     g_object_get(element, "n-text", &numTracks, nullptr);
926
927     GST_INFO("Media has %d text tracks", numTracks);
928
929     if (useMediaSource) {
930         GST_DEBUG("Tracks managed by source element. Bailing out now.");
931         return;
932     }
933
934     Vector<String> validTextStreams;
935     for (gint i = 0; i < numTracks; ++i) {
936         GRefPtr<GstPad> pad;
937         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
938         ASSERT(pad);
939
940         // We can't assume the pad has a sticky event here like implemented in
941         // InbandTextTrackPrivateGStreamer because it might be emitted after the
942         // track was created. So fallback to a dummy stream ID like in the Audio
943         // and Video tracks.
944         String streamId = "T" + String::number(i);
945
946         validTextStreams.append(streamId);
947         if (i < static_cast<gint>(m_textTracks.size())) {
948             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
949             if (existingTrack) {
950                 existingTrack->setIndex(i);
951                 if (existingTrack->pad() == pad)
952                     continue;
953             }
954         }
955
956         RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(i, pad);
957         m_textTracks.add(streamId, track);
958         m_player->addTextTrack(*track);
959     }
960
961     purgeInvalidTextTracks(validTextStreams);
962 }
963
964 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
965 {
966     player->newTextSample();
967     return GST_FLOW_OK;
968 }
969
970 void MediaPlayerPrivateGStreamer::newTextSample()
971 {
972     if (!m_textAppSink)
973         return;
974
975     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
976         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
977
978     GRefPtr<GstSample> sample;
979     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
980     ASSERT(sample);
981
982     if (streamStartEvent) {
983         bool found = FALSE;
984         const gchar* id;
985         gst_event_parse_stream_start(streamStartEvent.get(), &id);
986         for (auto& track : m_textTracks.values()) {
987             if (!strcmp(track->streamId().utf8().data(), id)) {
988                 track->handleSample(sample);
989                 found = true;
990                 break;
991             }
992         }
993         if (!found)
994             GST_WARNING("Got sample with unknown stream ID %s.", id);
995     } else
996         GST_WARNING("Unable to handle sample with no stream start event.");
997 }
998 #endif
999
1000 void MediaPlayerPrivateGStreamer::setRate(float rate)
1001 {
1002     // Higher rate causes crash.
1003     rate = clampTo(rate, -20.0, 20.0);
1004
1005     // Avoid useless playback rate update.
1006     if (m_playbackRate == rate) {
1007         // and make sure that upper layers were notified if rate was set
1008
1009         if (!m_changingRate && m_player->rate() != m_playbackRate)
1010             m_player->rateChanged();
1011         return;
1012     }
1013
1014     if (isLiveStream()) {
1015         // notify upper layers that we cannot handle passed rate.
1016         m_changingRate = false;
1017         m_player->rateChanged();
1018         return;
1019     }
1020
1021     GstState state;
1022     GstState pending;
1023
1024     m_playbackRate = rate;
1025     m_changingRate = true;
1026
1027     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
1028
1029     if (!rate) {
1030         m_changingRate = false;
1031         m_playbackRatePause = true;
1032         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
1033             changePipelineState(GST_STATE_PAUSED);
1034         return;
1035     }
1036
1037     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
1038         || (pending == GST_STATE_PAUSED))
1039         return;
1040
1041     updatePlaybackRate();
1042 }
1043
1044 double MediaPlayerPrivateGStreamer::rate() const
1045 {
1046     return m_playbackRate;
1047 }
1048
1049 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
1050 {
1051     m_preservesPitch = preservesPitch;
1052 }
1053
1054 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
1055 {
1056     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1057     if (m_errorOccured || isLiveStream())
1058         return timeRanges;
1059
1060     MediaTime mediaDuration = durationMediaTime();
1061     if (!mediaDuration || mediaDuration.isPositiveInfinite())
1062         return timeRanges;
1063
1064     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1065
1066     if (!gst_element_query(m_pipeline.get(), query)) {
1067         gst_query_unref(query);
1068         return timeRanges;
1069     }
1070
1071     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
1072     for (guint index = 0; index < numBufferingRanges; index++) {
1073         gint64 rangeStart = 0, rangeStop = 0;
1074         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop))
1075             timeRanges->add(MediaTime(rangeStart * toGstUnsigned64Time(mediaDuration) / GST_FORMAT_PERCENT_MAX, GST_SECOND),
1076                 MediaTime(rangeStop * toGstUnsigned64Time(mediaDuration) / GST_FORMAT_PERCENT_MAX, GST_SECOND));
1077     }
1078
1079     // Fallback to the more general maxTimeLoaded() if no range has
1080     // been found.
1081     if (!timeRanges->length())
1082         if (MediaTime loaded = maxTimeLoaded())
1083             timeRanges->add(MediaTime::zeroTime(), loaded);
1084
1085     gst_query_unref(query);
1086
1087     return timeRanges;
1088 }
1089
1090 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
1091 {
1092     GUniqueOutPtr<GError> err;
1093     GUniqueOutPtr<gchar> debug;
1094     MediaPlayer::NetworkState error;
1095     bool issueError = true;
1096     bool attemptNextLocation = false;
1097     const GstStructure* structure = gst_message_get_structure(message);
1098     GstState requestedState, currentState;
1099
1100     m_canFallBackToLastFinishedSeekPosition = false;
1101
1102     if (structure) {
1103         const gchar* messageTypeName = gst_structure_get_name(structure);
1104
1105         // Redirect messages are sent from elements, like qtdemux, to
1106         // notify of the new location(s) of the media.
1107         if (!g_strcmp0(messageTypeName, "redirect")) {
1108             mediaLocationChanged(message);
1109             return;
1110         }
1111     }
1112
1113     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
1114     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
1115
1116     GST_LOG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
1117     switch (GST_MESSAGE_TYPE(message)) {
1118     case GST_MESSAGE_ERROR:
1119         if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured)
1120             break;
1121         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
1122         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
1123
1124         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
1125
1126         error = MediaPlayer::Empty;
1127         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
1128             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
1129             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
1130             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
1131             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
1132             error = MediaPlayer::FormatError;
1133         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
1134             // Let the mediaPlayerClient handle the stream error, in
1135             // this case the HTMLMediaElement will emit a stalled
1136             // event.
1137             GST_ERROR("Decode error, let the Media element emit a stalled event.");
1138             m_loadingStalled = true;
1139             break;
1140         } else if (err->domain == GST_STREAM_ERROR) {
1141             error = MediaPlayer::DecodeError;
1142             attemptNextLocation = true;
1143         } else if (err->domain == GST_RESOURCE_ERROR)
1144             error = MediaPlayer::NetworkError;
1145
1146         if (attemptNextLocation)
1147             issueError = !loadNextLocation();
1148         if (issueError)
1149             loadingFailed(error);
1150         break;
1151     case GST_MESSAGE_EOS:
1152         didEnd();
1153         break;
1154     case GST_MESSAGE_ASYNC_DONE:
1155         if (!messageSourceIsPlaybin || m_delayingLoad)
1156             break;
1157         asyncStateChangeDone();
1158         break;
1159     case GST_MESSAGE_STATE_CHANGED: {
1160         if (!messageSourceIsPlaybin || m_delayingLoad)
1161             break;
1162         updateStates();
1163
1164         // Construct a filename for the graphviz dot file output.
1165         GstState newState;
1166         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
1167         CString dotFileName = String::format("webkit-video.%s_%s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8();
1168         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
1169
1170         break;
1171     }
1172     case GST_MESSAGE_BUFFERING:
1173         processBufferingStats(message);
1174         break;
1175     case GST_MESSAGE_DURATION_CHANGED:
1176         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
1177         if (messageSourceIsPlaybin && !isMediaSource())
1178             durationChanged();
1179         break;
1180     case GST_MESSAGE_REQUEST_STATE:
1181         gst_message_parse_request_state(message, &requestedState);
1182         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1183         if (requestedState < currentState) {
1184             GST_INFO("Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
1185                 gst_element_state_get_name(requestedState));
1186             m_requestedState = requestedState;
1187             if (!changePipelineState(requestedState))
1188                 loadingFailed(MediaPlayer::Empty);
1189         }
1190         break;
1191     case GST_MESSAGE_CLOCK_LOST:
1192         // This can only happen in PLAYING state and we should just
1193         // get a new clock by moving back to PAUSED and then to
1194         // PLAYING again.
1195         // This can happen if the stream that ends in a sink that
1196         // provides the current clock disappears, for example if
1197         // the audio sink provides the clock and the audio stream
1198         // is disabled. It also happens relatively often with
1199         // HTTP adaptive streams when switching between different
1200         // variants of a stream.
1201         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1202         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1203         break;
1204     case GST_MESSAGE_LATENCY:
1205         // Recalculate the latency, we don't need any special handling
1206         // here other than the GStreamer default.
1207         // This can happen if the latency of live elements changes, or
1208         // for one reason or another a new live element is added or
1209         // removed from the pipeline.
1210         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1211         break;
1212     case GST_MESSAGE_ELEMENT:
1213         if (gst_is_missing_plugin_message(message)) {
1214             if (gst_install_plugins_supported()) {
1215                 RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = createWeakPtr()](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
1216                     if (!weakThis) {
1217                         GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
1218                         return;
1219                     }
1220
1221                     GST_DEBUG("got missing plugin installation callback with result %u", result);
1222                     RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
1223                     weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
1224                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1225                         return;
1226
1227                     weakThis->changePipelineState(GST_STATE_READY);
1228                     weakThis->changePipelineState(GST_STATE_PAUSED);
1229                 });
1230                 m_missingPluginCallbacks.append(missingPluginCallback);
1231                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1232                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1233                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), *missingPluginCallback);
1234             }
1235         }
1236 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1237         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
1238             processMpegTsSection(section);
1239             gst_mpegts_section_unref(section);
1240         }
1241 #endif
1242 #if ENABLE(ENCRYPTED_MEDIA)
1243         else if (gst_structure_has_name(structure, "drm-key-needed")) {
1244             GST_DEBUG("drm-key-needed message from %s", GST_MESSAGE_SRC_NAME(message));
1245             GRefPtr<GstEvent> event;
1246             gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr);
1247             handleProtectionEvent(event.get());
1248         }
1249 #endif
1250         else if (gst_structure_has_name(structure, "http-headers")) {
1251             GstStructure* responseHeaders;
1252             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders, nullptr)) {
1253                 if (!gst_structure_has_field(responseHeaders, httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data())) {
1254                     GST_INFO("Live stream detected. Disabling on-disk buffering");
1255                     m_isStreaming = true;
1256                     setDownloadBuffering();
1257                 }
1258                 gst_structure_free(responseHeaders);
1259             }
1260         } else
1261             GST_DEBUG("Unhandled element message: %" GST_PTR_FORMAT, structure);
1262         break;
1263 #if ENABLE(VIDEO_TRACK)
1264     case GST_MESSAGE_TOC:
1265         processTableOfContents(message);
1266         break;
1267 #endif
1268     case GST_MESSAGE_TAG: {
1269         GstTagList* tags = nullptr;
1270         GUniqueOutPtr<gchar> tag;
1271         gst_message_parse_tag(message, &tags);
1272         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1273             if (!g_strcmp0(tag.get(), "rotate-90"))
1274                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1275             else if (!g_strcmp0(tag.get(), "rotate-180"))
1276                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1277             else if (!g_strcmp0(tag.get(), "rotate-270"))
1278                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1279         }
1280         gst_tag_list_unref(tags);
1281         break;
1282     }
1283 #if GST_CHECK_VERSION(1, 10, 0)
1284     case GST_MESSAGE_STREAM_COLLECTION: {
1285         GRefPtr<GstStreamCollection> collection;
1286         gst_message_parse_stream_collection(message, &collection.outPtr());
1287
1288         if (collection) {
1289             m_streamCollection.swap(collection);
1290             m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
1291                 this->updateTracks();
1292             });
1293         }
1294         break;
1295     }
1296     case GST_MESSAGE_STREAMS_SELECTED: {
1297         GRefPtr<GstStreamCollection> collection;
1298         gst_message_parse_streams_selected(message, &collection.outPtr());
1299
1300         if (!collection)
1301             break;
1302
1303         m_streamCollection.swap(collection);
1304         m_currentAudioStreamId = "";
1305         m_currentVideoStreamId = "";
1306         m_currentTextStreamId = "";
1307
1308         unsigned length = gst_message_streams_selected_get_size(message);
1309         for (unsigned i = 0; i < length; i++) {
1310             GRefPtr<GstStream> stream = adoptGRef(gst_message_streams_selected_get_stream(message, i));
1311             if (!stream)
1312                 continue;
1313             GstStreamType type = gst_stream_get_stream_type(stream.get());
1314             String streamId(gst_stream_get_stream_id(stream.get()));
1315
1316             GST_DEBUG("Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
1317             // Playbin3 can send more than one selected stream of the same type
1318             // but there's no priority or ordering system in place, so we assume
1319             // the selected stream is the last one as reported by playbin3.
1320             if (type & GST_STREAM_TYPE_AUDIO) {
1321                 m_currentAudioStreamId = streamId;
1322                 auto track = m_audioTracks.get(m_currentAudioStreamId);
1323                 ASSERT(track);
1324                 track->markAsActive();
1325             } else if (type & GST_STREAM_TYPE_VIDEO) {
1326                 m_currentVideoStreamId = streamId;
1327                 auto track = m_videoTracks.get(m_currentVideoStreamId);
1328                 ASSERT(track);
1329                 track->markAsActive();
1330             } else if (type & GST_STREAM_TYPE_TEXT)
1331                 m_currentTextStreamId = streamId;
1332             else
1333                 GST_WARNING("Unknown stream type with stream-id %s", streamId);
1334         }
1335         break;
1336     }
1337 #endif
1338     default:
1339         GST_DEBUG("Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
1340         break;
1341     }
1342 }
1343
1344 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1345 {
1346     m_buffering = true;
1347     gst_message_parse_buffering(message, &m_bufferingPercentage);
1348
1349     GST_DEBUG("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1350
1351     if (m_bufferingPercentage == 100)
1352         updateStates();
1353 }
1354
1355 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1356 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1357 {
1358     ASSERT(section);
1359
1360     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1361         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1362         m_metadataTracks.clear();
1363         for (guint i = 0; i < pmt->streams->len; ++i) {
1364             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1365             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1366                 AtomicString pid = String::number(stream->pid);
1367                 RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = InbandMetadataTextTrackPrivateGStreamer::create(
1368                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1369
1370                 // 4.7.10.12.2 Sourcing in-band text tracks
1371                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1372                 // type as follows, based on the type of the media resource:
1373                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1374                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1375                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1376                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1377                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1378                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1379                 // expressed in hexadecimal using uppercase ASCII hex digits.
1380                 String inbandMetadataTrackDispatchType;
1381                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1382                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1383                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1384                     for (guint k = 0; k < descriptor->length; ++k)
1385                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1386                 }
1387                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1388
1389                 m_metadataTracks.add(pid, track);
1390                 m_player->addTextTrack(*track);
1391             }
1392         }
1393     } else {
1394         AtomicString pid = String::number(section->pid);
1395         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1396         if (!track)
1397             return;
1398
1399         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1400         gsize size;
1401         const void* bytes = g_bytes_get_data(data.get(), &size);
1402
1403         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1404     }
1405 }
1406 #endif
1407
1408 #if ENABLE(VIDEO_TRACK)
1409 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1410 {
1411     if (m_chaptersTrack)
1412         m_player->removeTextTrack(*m_chaptersTrack);
1413
1414     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1415     m_player->addTextTrack(*m_chaptersTrack);
1416
1417     GRefPtr<GstToc> toc;
1418     gboolean updated;
1419     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1420     ASSERT(toc);
1421
1422     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1423         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1424 }
1425
1426 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1427 {
1428     ASSERT(entry);
1429
1430     auto cue = GenericCueData::create();
1431
1432     gint64 start = -1, stop = -1;
1433     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1434     if (start != -1)
1435         cue->setStartTime(MediaTime(start, GST_SECOND));
1436     if (stop != -1)
1437         cue->setEndTime(MediaTime(stop, GST_SECOND));
1438
1439     GstTagList* tags = gst_toc_entry_get_tags(entry);
1440     if (tags) {
1441         gchar* title =  nullptr;
1442         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1443         if (title) {
1444             cue->setContent(title);
1445             g_free(title);
1446         }
1447     }
1448
1449     m_chaptersTrack->addGenericCue(cue);
1450
1451     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1452         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1453 }
1454
1455 void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
1456 {
1457     m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
1458         return !validTrackIds.contains(keyAndValue.key);
1459     });
1460 }
1461
1462 void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
1463 {
1464     m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
1465         return !validTrackIds.contains(keyAndValue.key);
1466     });
1467 }
1468
1469 void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
1470 {
1471     m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
1472         return !validTrackIds.contains(keyAndValue.key);
1473     });
1474 }
1475 #endif
1476
1477 static int findHLSQueue(const GValue* item)
1478 {
1479     GstElement* element = GST_ELEMENT(g_value_get_object(item));
1480     if (g_str_has_prefix(GST_ELEMENT_NAME(element), "queue")) {
1481         GstElement* parent = GST_ELEMENT(GST_ELEMENT_PARENT(element));
1482         if (!GST_IS_OBJECT(parent))
1483             return 1;
1484
1485         if (g_str_has_prefix(GST_ELEMENT_NAME(GST_ELEMENT_PARENT(parent)), "hlsdemux"))
1486             return 0;
1487     }
1488
1489     return 1;
1490 }
1491
1492 static bool isHLSProgressing(GstElement* playbin, GstQuery* query)
1493 {
1494     GValue item = { };
1495     GstIterator* binIterator = gst_bin_iterate_recurse(GST_BIN(playbin));
1496     bool foundHLSQueue = gst_iterator_find_custom(binIterator, reinterpret_cast<GCompareFunc>(findHLSQueue), &item, nullptr);
1497     gst_iterator_free(binIterator);
1498
1499     if (!foundHLSQueue)
1500         return false;
1501
1502     GstElement* queueElement = GST_ELEMENT(g_value_get_object(&item));
1503     bool queryResult = gst_element_query(queueElement, query);
1504     g_value_unset(&item);
1505
1506     return queryResult;
1507 }
1508
1509 void MediaPlayerPrivateGStreamer::fillTimerFired()
1510 {
1511     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1512
1513     if (G_UNLIKELY(!gst_element_query(m_pipeline.get(), query))) {
1514         // This query always fails for live pipelines. In the case of HLS, try and find
1515         // the queue inside the HLS element to get a proxy measure of progress. Note
1516         // that the percentage value is rather meaningless as used below.
1517         // This is a hack, see https://bugs.webkit.org/show_bug.cgi?id=141469.
1518         if (!isHLSProgressing(m_pipeline.get(), query)) {
1519             gst_query_unref(query);
1520             return;
1521         }
1522     }
1523
1524     gint64 start, stop;
1525     gdouble fillStatus = 100.0;
1526
1527     gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
1528     gst_query_unref(query);
1529
1530     if (stop != -1)
1531         fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1532
1533     GST_DEBUG("[Buffering] Download buffer filled up to %f%%", fillStatus);
1534
1535     MediaTime mediaDuration = durationMediaTime();
1536
1537     // Update maxTimeLoaded only if the media duration is
1538     // available. Otherwise we can't compute it.
1539     if (mediaDuration) {
1540         if (fillStatus == 100.0)
1541             m_maxTimeLoaded = mediaDuration;
1542         else
1543             m_maxTimeLoaded = MediaTime(fillStatus * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
1544         GST_DEBUG("[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
1545     }
1546
1547     m_downloadFinished = fillStatus == 100.0;
1548     if (!m_downloadFinished) {
1549         updateStates();
1550         return;
1551     }
1552
1553     // Media is now fully loaded. It will play even if network
1554     // connection is cut. Buffering is done, remove the fill source
1555     // from the main loop.
1556     m_fillTimer.stop();
1557     updateStates();
1558 }
1559
1560 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
1561 {
1562     if (m_errorOccured)
1563         return MediaTime::zeroTime();
1564
1565     MediaTime duration = durationMediaTime();
1566     GST_DEBUG("maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
1567     // infinite duration means live stream
1568     if (duration.isPositiveInfinite())
1569         return MediaTime::zeroTime();
1570
1571     return duration;
1572 }
1573
1574 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1575 {
1576     if (m_errorOccured)
1577         return MediaTime::zeroTime();
1578
1579     MediaTime loaded = m_maxTimeLoaded;
1580     if (m_isEndReached)
1581         loaded = durationMediaTime();
1582     GST_LOG("maxTimeLoaded: %s", toString(loaded).utf8().data());
1583     return loaded;
1584 }
1585
1586 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1587 {
1588     if (m_errorOccured || m_loadingStalled)
1589         return false;
1590
1591     if (isLiveStream())
1592         return true;
1593
1594     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1595         return false;
1596
1597     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1598     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1599     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1600     GST_LOG("didLoadingProgress: %s", toString(didLoadingProgress).utf8().data());
1601     return didLoadingProgress;
1602 }
1603
1604 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1605 {
1606     if (m_errorOccured)
1607         return 0;
1608
1609     if (m_totalBytes)
1610         return m_totalBytes;
1611
1612     if (!m_source)
1613         return 0;
1614
1615     if (isLiveStream())
1616         return 0;
1617
1618     GstFormat fmt = GST_FORMAT_BYTES;
1619     gint64 length = 0;
1620     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1621         GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1622         m_totalBytes = static_cast<unsigned long long>(length);
1623         m_isStreaming = !length;
1624         return m_totalBytes;
1625     }
1626
1627     // Fall back to querying the source pads manually.
1628     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1629     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1630     bool done = false;
1631     while (!done) {
1632         GValue item = G_VALUE_INIT;
1633         switch (gst_iterator_next(iter, &item)) {
1634         case GST_ITERATOR_OK: {
1635             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1636             gint64 padLength = 0;
1637             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1638                 length = padLength;
1639             break;
1640         }
1641         case GST_ITERATOR_RESYNC:
1642             gst_iterator_resync(iter);
1643             break;
1644         case GST_ITERATOR_ERROR:
1645             FALLTHROUGH;
1646         case GST_ITERATOR_DONE:
1647             done = true;
1648             break;
1649         }
1650
1651         g_value_unset(&item);
1652     }
1653
1654     gst_iterator_free(iter);
1655
1656     GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1657     m_totalBytes = static_cast<unsigned long long>(length);
1658     m_isStreaming = !length;
1659     return m_totalBytes;
1660 }
1661
1662 void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1663 {
1664     player->sourceSetup(sourceElement);
1665 }
1666
1667 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1668 {
1669     if (g_strcmp0(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(G_OBJECT(element))), "GstDownloadBuffer"))
1670         return;
1671
1672     player->m_downloadBuffer = element;
1673     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1674     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1675
1676     GUniqueOutPtr<char> oldDownloadTemplate;
1677     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1678
1679     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1680     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1681     GST_DEBUG("Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1682
1683     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1684 }
1685
1686 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1687 {
1688     ASSERT(player->m_downloadBuffer);
1689
1690     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1691
1692     GUniqueOutPtr<char> downloadFile;
1693     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1694     player->m_downloadBuffer = nullptr;
1695
1696     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
1697         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1698         return;
1699     }
1700
1701     GST_DEBUG("Unlinked media temporary file %s after creation", downloadFile.get());
1702 }
1703
1704 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1705 {
1706     if (!downloadFileTemplate)
1707         return;
1708
1709     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1710     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1711     String templatePattern = String(templateFile.get()).replace("X", "?");
1712
1713     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
1714         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
1715             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1716             continue;
1717         }
1718
1719         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1720     }
1721 }
1722
1723 void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1724 {
1725     GST_DEBUG("Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1726
1727     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1728         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1729
1730     m_source = sourceElement;
1731
1732     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1733         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1734         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1735     }
1736 }
1737
1738 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1739 {
1740     if (!m_source)
1741         return false;
1742
1743     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1744         return true;
1745
1746     GUniqueOutPtr<char> originalURI, resolvedURI;
1747     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1748     if (!originalURI || !resolvedURI)
1749         return false;
1750     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1751         return true;
1752
1753     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1754     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1755     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1756 }
1757
1758 void MediaPlayerPrivateGStreamer::cancelLoad()
1759 {
1760     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1761         return;
1762
1763     if (m_pipeline)
1764         changePipelineState(GST_STATE_READY);
1765 }
1766
1767 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1768 {
1769     if (!m_pipeline || m_errorOccured)
1770         return;
1771
1772     if (m_seeking) {
1773         if (m_seekIsPending)
1774             updateStates();
1775         else {
1776             GST_DEBUG("[Seek] seeked to %s", toString(m_seekTime).utf8().data());
1777             m_seeking = false;
1778             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
1779                 seek(m_timeOfOverlappingSeek);
1780                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
1781                 return;
1782             }
1783             m_timeOfOverlappingSeek = MediaTime::invalidTime();
1784
1785             // The pipeline can still have a pending state. In this case a position query will fail.
1786             // Right now we can use m_seekTime as a fallback.
1787             m_canFallBackToLastFinishedSeekPosition = true;
1788             timeChanged();
1789         }
1790     } else
1791         updateStates();
1792 }
1793
1794 void MediaPlayerPrivateGStreamer::updateStates()
1795 {
1796     if (!m_pipeline)
1797         return;
1798
1799     if (m_errorOccured)
1800         return;
1801
1802     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1803     MediaPlayer::ReadyState oldReadyState = m_readyState;
1804     GstState pending;
1805     GstState state;
1806     bool stateReallyChanged = false;
1807
1808     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1809     if (state != m_currentState) {
1810         m_oldState = m_currentState;
1811         m_currentState = state;
1812         stateReallyChanged = true;
1813     }
1814
1815     bool shouldUpdatePlaybackState = false;
1816     switch (getStateResult) {
1817     case GST_STATE_CHANGE_SUCCESS: {
1818         GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1819
1820         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1821         // on HTMLMediaElement and properly generate the video 'ended' event.
1822         if (m_isEndReached && m_currentState == GST_STATE_READY)
1823             break;
1824
1825         m_resetPipeline = m_currentState <= GST_STATE_READY;
1826
1827         bool didBuffering = m_buffering;
1828
1829         // Update ready and network states.
1830         switch (m_currentState) {
1831         case GST_STATE_NULL:
1832             m_readyState = MediaPlayer::HaveNothing;
1833             m_networkState = MediaPlayer::Empty;
1834             break;
1835         case GST_STATE_READY:
1836             m_readyState = MediaPlayer::HaveMetadata;
1837             m_networkState = MediaPlayer::Empty;
1838             break;
1839         case GST_STATE_PAUSED:
1840         case GST_STATE_PLAYING:
1841             if (m_buffering) {
1842                 if (m_bufferingPercentage == 100) {
1843                     GST_DEBUG("[Buffering] Complete.");
1844                     m_buffering = false;
1845                     m_readyState = MediaPlayer::HaveEnoughData;
1846                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1847                 } else {
1848                     m_readyState = MediaPlayer::HaveCurrentData;
1849                     m_networkState = MediaPlayer::Loading;
1850                 }
1851             } else if (m_downloadFinished) {
1852                 m_readyState = MediaPlayer::HaveEnoughData;
1853                 m_networkState = MediaPlayer::Loaded;
1854             } else {
1855                 m_readyState = MediaPlayer::HaveFutureData;
1856                 m_networkState = MediaPlayer::Loading;
1857             }
1858
1859             break;
1860         default:
1861             ASSERT_NOT_REACHED();
1862             break;
1863         }
1864
1865         // Sync states where needed.
1866         if (m_currentState == GST_STATE_PAUSED) {
1867             if (!m_volumeAndMuteInitialized) {
1868                 notifyPlayerOfVolumeChange();
1869                 notifyPlayerOfMute();
1870                 m_volumeAndMuteInitialized = true;
1871             }
1872
1873             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1874                 GST_DEBUG("[Buffering] Restarting playback.");
1875                 changePipelineState(GST_STATE_PLAYING);
1876             }
1877         } else if (m_currentState == GST_STATE_PLAYING) {
1878             m_paused = false;
1879
1880             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1881                 GST_DEBUG("[Buffering] Pausing stream for buffering.");
1882                 changePipelineState(GST_STATE_PAUSED);
1883             }
1884         } else
1885             m_paused = true;
1886
1887         GST_DEBUG("Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
1888         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
1889             shouldUpdatePlaybackState = true;
1890             GST_INFO("Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
1891         }
1892
1893         // Emit play state change notification only when going to PLAYING so that
1894         // the media element gets a chance to enable its page sleep disabler.
1895         // Emitting this notification in more cases triggers unwanted code paths
1896         // and test timeouts.
1897         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
1898             GST_INFO("Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
1899             shouldUpdatePlaybackState = true;
1900         }
1901
1902         break;
1903     }
1904     case GST_STATE_CHANGE_ASYNC:
1905         GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1906         // Change in progress.
1907         break;
1908     case GST_STATE_CHANGE_FAILURE:
1909         GST_DEBUG("Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1910         // Change failed
1911         return;
1912     case GST_STATE_CHANGE_NO_PREROLL:
1913         GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1914
1915         // Live pipelines go in PAUSED without prerolling.
1916         m_isStreaming = true;
1917         setDownloadBuffering();
1918
1919         if (m_currentState == GST_STATE_READY)
1920             m_readyState = MediaPlayer::HaveNothing;
1921         else if (m_currentState == GST_STATE_PAUSED) {
1922             m_readyState = MediaPlayer::HaveEnoughData;
1923             m_paused = true;
1924         } else if (m_currentState == GST_STATE_PLAYING)
1925             m_paused = false;
1926
1927         if (!m_paused && m_playbackRate)
1928             changePipelineState(GST_STATE_PLAYING);
1929
1930         m_networkState = MediaPlayer::Loading;
1931         break;
1932     default:
1933         GST_DEBUG("Else : %d", getStateResult);
1934         break;
1935     }
1936
1937     m_requestedState = GST_STATE_VOID_PENDING;
1938
1939     if (shouldUpdatePlaybackState)
1940         m_player->playbackStateChanged();
1941
1942     if (m_networkState != oldNetworkState) {
1943         GST_DEBUG("Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
1944         m_player->networkStateChanged();
1945     }
1946     if (m_readyState != oldReadyState) {
1947         GST_DEBUG("Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
1948         m_player->readyStateChanged();
1949     }
1950
1951     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
1952         updatePlaybackRate();
1953         if (m_seekIsPending) {
1954             GST_DEBUG("[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
1955             m_seekIsPending = false;
1956             m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
1957             if (!m_seeking)
1958                 GST_DEBUG("[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
1959         }
1960     }
1961 }
1962
1963 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
1964 {
1965     if (m_mediaLocations)
1966         gst_structure_free(m_mediaLocations);
1967
1968     const GstStructure* structure = gst_message_get_structure(message);
1969     if (structure) {
1970         // This structure can contain:
1971         // - both a new-location string and embedded locations structure
1972         // - or only a new-location string.
1973         m_mediaLocations = gst_structure_copy(structure);
1974         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
1975
1976         if (locations)
1977             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
1978
1979         loadNextLocation();
1980     }
1981 }
1982
1983 bool MediaPlayerPrivateGStreamer::loadNextLocation()
1984 {
1985     if (!m_mediaLocations)
1986         return false;
1987
1988     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
1989     const gchar* newLocation = nullptr;
1990
1991     if (!locations) {
1992         // Fallback on new-location string.
1993         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
1994         if (!newLocation)
1995             return false;
1996     }
1997
1998     if (!newLocation) {
1999         if (m_mediaLocationCurrentIndex < 0) {
2000             m_mediaLocations = nullptr;
2001             return false;
2002         }
2003
2004         const GValue* location = gst_value_list_get_value(locations,
2005                                                           m_mediaLocationCurrentIndex);
2006         const GstStructure* structure = gst_value_get_structure(location);
2007
2008         if (!structure) {
2009             m_mediaLocationCurrentIndex--;
2010             return false;
2011         }
2012
2013         newLocation = gst_structure_get_string(structure, "new-location");
2014     }
2015
2016     if (newLocation) {
2017         // Found a candidate. new-location is not always an absolute url
2018         // though. We need to take the base of the current url and
2019         // append the value of new-location to it.
2020         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2021         URL newUrl = URL(baseUrl, newLocation);
2022         convertToInternalProtocol(newUrl);
2023
2024         RefPtr<SecurityOrigin> securityOrigin = SecurityOrigin::create(m_url);
2025         if (securityOrigin->canRequest(newUrl)) {
2026             GST_INFO("New media url: %s", newUrl.string().utf8().data());
2027
2028             // Reset player states.
2029             m_networkState = MediaPlayer::Loading;
2030             m_player->networkStateChanged();
2031             m_readyState = MediaPlayer::HaveNothing;
2032             m_player->readyStateChanged();
2033
2034             // Reset pipeline state.
2035             m_resetPipeline = true;
2036             changePipelineState(GST_STATE_READY);
2037
2038             GstState state;
2039             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2040             if (state <= GST_STATE_READY) {
2041                 // Set the new uri and start playing.
2042                 setPlaybinURL(newUrl);
2043                 changePipelineState(GST_STATE_PLAYING);
2044                 return true;
2045             }
2046         } else
2047             GST_INFO("Not allowed to load new media location: %s", newUrl.string().utf8().data());
2048     }
2049     m_mediaLocationCurrentIndex--;
2050     return false;
2051 }
2052
2053 void MediaPlayerPrivateGStreamer::loadStateChanged()
2054 {
2055     updateStates();
2056 }
2057
2058 void MediaPlayerPrivateGStreamer::timeChanged()
2059 {
2060     updateStates();
2061     m_player->timeChanged();
2062 }
2063
2064 void MediaPlayerPrivateGStreamer::didEnd()
2065 {
2066     GST_INFO("Playback ended");
2067
2068     // Synchronize position and duration values to not confuse the
2069     // HTMLMediaElement. In some cases like reverse playback the
2070     // position is not always reported as 0 for instance.
2071     MediaTime now = currentMediaTime();
2072     if (now > MediaTime { } && now <= durationMediaTime())
2073         m_player->durationChanged();
2074
2075     m_isEndReached = true;
2076     timeChanged();
2077
2078     if (!m_player->client().mediaPlayerIsLooping()) {
2079         m_paused = true;
2080         m_durationAtEOS = durationMediaTime();
2081         changePipelineState(GST_STATE_READY);
2082         m_downloadFinished = false;
2083     }
2084 }
2085
2086 void MediaPlayerPrivateGStreamer::durationChanged()
2087 {
2088     MediaTime previousDuration = durationMediaTime();
2089
2090     // FIXME: Check if this method is still useful, because it's not doing its work at all
2091     // since bug #159458 removed a cacheDuration() call here.
2092
2093     // Avoid emiting durationchanged in the case where the previous
2094     // duration was 0 because that case is already handled by the
2095     // HTMLMediaElement.
2096     if (previousDuration && durationMediaTime() != previousDuration)
2097         m_player->durationChanged();
2098 }
2099
2100 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
2101 {
2102     GST_WARNING("Loading failed, error: %d", error);
2103
2104     m_errorOccured = true;
2105     if (m_networkState != error) {
2106         m_networkState = error;
2107         m_player->networkStateChanged();
2108     }
2109     if (m_readyState != MediaPlayer::HaveNothing) {
2110         m_readyState = MediaPlayer::HaveNothing;
2111         m_player->readyStateChanged();
2112     }
2113
2114     // Loading failed, remove ready timer.
2115     m_readyTimerHandler.stop();
2116 }
2117
2118 static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeSet()
2119 {
2120     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> mimeTypes = []()
2121     {
2122         MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements();
2123         HashSet<String, ASCIICaseInsensitiveHash> set;
2124
2125         GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
2126         GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
2127         GList* demuxerFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DEMUXER, GST_RANK_MARGINAL);
2128
2129         enum ElementType {
2130             AudioDecoder = 0,
2131             VideoDecoder,
2132             Demuxer
2133         };
2134         struct GstCapsWebKitMapping {
2135             ElementType elementType;
2136             const char* capsString;
2137             Vector<AtomicString> webkitMimeTypes;
2138         };
2139
2140         Vector<GstCapsWebKitMapping> mapping = {
2141             {AudioDecoder, "audio/midi", {"audio/midi", "audio/riff-midi"}},
2142             {AudioDecoder, "audio/x-sbc", { }},
2143             {AudioDecoder, "audio/x-sid", { }},
2144             {AudioDecoder, "audio/x-flac", {"audio/x-flac", "audio/flac"}},
2145             {AudioDecoder, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
2146             {AudioDecoder, "audio/x-wavpack", {"audio/x-wavpack"}},
2147             {AudioDecoder, "audio/x-speex", {"audio/speex", "audio/x-speex"}},
2148             {AudioDecoder, "audio/x-ac3", { }},
2149             {AudioDecoder, "audio/x-eac3", {"audio/x-ac3"}},
2150             {AudioDecoder, "audio/x-dts", { }},
2151             {VideoDecoder, "video/x-h264, profile=(string)high", {"video/mp4", "video/x-m4v"}},
2152             {VideoDecoder, "video/x-msvideocodec", {"video/x-msvideo"}},
2153             {VideoDecoder, "video/x-h263", { }},
2154             {VideoDecoder, "video/mpegts", { }},
2155             {VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", {"video/mpeg"}},
2156             {VideoDecoder, "video/x-dirac", { }},
2157             {VideoDecoder, "video/x-flash-video", {"video/flv", "video/x-flv"}},
2158             {Demuxer, "video/quicktime", { }},
2159             {Demuxer, "video/quicktime, variant=(string)3gpp", {"video/3gpp"}},
2160             {Demuxer, "application/x-3gp", { }},
2161             {Demuxer, "video/x-ms-asf", { }},
2162             {Demuxer, "audio/x-aiff", { }},
2163             {Demuxer, "application/x-pn-realaudio", { }},
2164             {Demuxer, "application/vnd.rn-realmedia", { }},
2165             {Demuxer, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
2166             {Demuxer, "application/x-hls", {"application/vnd.apple.mpegurl", "application/x-mpegurl"}}
2167         };
2168
2169         for (auto& current : mapping) {
2170             GList* factories = demuxerFactories;
2171             if (current.elementType == AudioDecoder)
2172                 factories = audioDecoderFactories;
2173             else if (current.elementType == VideoDecoder)
2174                 factories = videoDecoderFactories;
2175
2176             if (gstRegistryHasElementForMediaType(factories, current.capsString)) {
2177                 if (!current.webkitMimeTypes.isEmpty()) {
2178                     for (const auto& mimeType : current.webkitMimeTypes)
2179                         set.add(mimeType);
2180                 } else
2181                     set.add(AtomicString(current.capsString));
2182             }
2183         }
2184
2185         bool opusSupported = false;
2186         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-opus")) {
2187             opusSupported = true;
2188             set.add(AtomicString("audio/opus"));
2189         }
2190
2191         bool vorbisSupported = false;
2192         if (gstRegistryHasElementForMediaType(demuxerFactories, "application/ogg")) {
2193             set.add(AtomicString("application/ogg"));
2194
2195             vorbisSupported = gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-vorbis");
2196             if (vorbisSupported) {
2197                 set.add(AtomicString("audio/ogg"));
2198                 set.add(AtomicString("audio/x-vorbis+ogg"));
2199             }
2200
2201             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-theora"))
2202                 set.add(AtomicString("video/ogg"));
2203         }
2204
2205         bool audioMpegSupported = false;
2206         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
2207             audioMpegSupported = true;
2208             set.add(AtomicString("audio/mp1"));
2209             set.add(AtomicString("audio/mp3"));
2210             set.add(AtomicString("audio/x-mp3"));
2211         }
2212
2213         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
2214             audioMpegSupported = true;
2215             set.add(AtomicString("audio/aac"));
2216             set.add(AtomicString("audio/mp2"));
2217             set.add(AtomicString("audio/mp4"));
2218             set.add(AtomicString("audio/x-m4a"));
2219         }
2220
2221         if (audioMpegSupported) {
2222             set.add(AtomicString("audio/mpeg"));
2223             set.add(AtomicString("audio/x-mpeg"));
2224         }
2225
2226         if (gstRegistryHasElementForMediaType(demuxerFactories, "video/x-matroska")) {
2227             set.add(AtomicString("video/x-matroska"));
2228
2229             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp8")
2230                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp9")
2231                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp10"))
2232                 set.add(AtomicString("video/webm"));
2233
2234             if (vorbisSupported || opusSupported)
2235                 set.add(AtomicString("audio/webm"));
2236         }
2237
2238         gst_plugin_feature_list_free(audioDecoderFactories);
2239         gst_plugin_feature_list_free(videoDecoderFactories);
2240         gst_plugin_feature_list_free(demuxerFactories);
2241         return set;
2242     }();
2243     return mimeTypes;
2244 }
2245
2246 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2247 {
2248     types = mimeTypeSet();
2249 }
2250
2251 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2252 {
2253     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
2254 #if ENABLE(MEDIA_SOURCE)
2255     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2256     if (parameters.isMediaSource)
2257         return result;
2258 #endif
2259
2260     if (parameters.isMediaStream)
2261         return result;
2262
2263     if (parameters.type.isEmpty())
2264         return result;
2265
2266     // spec says we should not return "probably" if the codecs string is empty
2267     if (mimeTypeSet().contains(parameters.type.containerType()))
2268         result = parameters.type.codecs().isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
2269
2270     return extendedSupportsType(parameters, result);
2271 }
2272
2273 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
2274 {
2275     if (!m_pipeline)
2276         return;
2277
2278     unsigned flags;
2279     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2280
2281     unsigned flagDownload = getGstPlayFlag("download");
2282
2283     // We don't want to stop downloading if we already started it.
2284     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline) {
2285         GST_DEBUG("Download already started, not starting again");
2286         return;
2287     }
2288
2289     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
2290     if (shouldDownload) {
2291         GST_INFO("Enabling on-disk buffering");
2292         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2293         m_fillTimer.startRepeating(200_ms);
2294     } else {
2295         GST_INFO("Disabling on-disk buffering");
2296         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2297         m_fillTimer.stop();
2298     }
2299 }
2300
2301 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
2302 {
2303     GST_DEBUG("Setting preload to %s", convertEnumerationToString(preload).utf8().data());
2304     if (preload == MediaPlayer::Auto && isLiveStream())
2305         return;
2306
2307     m_preload = preload;
2308     setDownloadBuffering();
2309
2310     if (m_delayingLoad && m_preload != MediaPlayer::None) {
2311         m_delayingLoad = false;
2312         commitLoad();
2313     }
2314 }
2315
2316 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
2317 {
2318     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
2319     if (!m_autoAudioSink) {
2320         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
2321         return nullptr;
2322     }
2323
2324     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
2325
2326     GstElement* audioSinkBin;
2327
2328     if (webkitGstCheckVersion(1, 4, 2)) {
2329 #if ENABLE(WEB_AUDIO)
2330         audioSinkBin = gst_bin_new("audio-sink");
2331         ensureAudioSourceProvider();
2332         m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2333         return audioSinkBin;
2334 #else
2335         return m_autoAudioSink.get();
2336 #endif
2337     }
2338
2339     // Construct audio sink only if pitch preserving is enabled.
2340     // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
2341     if (m_preservesPitch) {
2342         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2343         if (!scale) {
2344             GST_WARNING("Failed to create scaletempo");
2345             return m_autoAudioSink.get();
2346         }
2347
2348         audioSinkBin = gst_bin_new("audio-sink");
2349         gst_bin_add(GST_BIN(audioSinkBin), scale);
2350         GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
2351         gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
2352
2353 #if ENABLE(WEB_AUDIO)
2354         ensureAudioSourceProvider();
2355         m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
2356 #else
2357         GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
2358         GstElement* resample = gst_element_factory_make("audioresample", nullptr);
2359
2360         gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
2361
2362         if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
2363             GST_WARNING("Failed to link audio sink elements");
2364             gst_object_unref(audioSinkBin);
2365             return m_autoAudioSink.get();
2366         }
2367 #endif
2368         return audioSinkBin;
2369     }
2370
2371 #if ENABLE(WEB_AUDIO)
2372     audioSinkBin = gst_bin_new("audio-sink");
2373     ensureAudioSourceProvider();
2374     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2375     return audioSinkBin;
2376 #endif
2377     ASSERT_NOT_REACHED();
2378     return nullptr;
2379 }
2380
2381 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2382 {
2383     GstElement* sink;
2384     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2385     return sink;
2386 }
2387
2388 #if ENABLE(WEB_AUDIO)
2389 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2390 {
2391     if (!m_audioSourceProvider)
2392         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2393 }
2394
2395 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2396 {
2397     ensureAudioSourceProvider();
2398     return m_audioSourceProvider.get();
2399 }
2400 #endif
2401
2402 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const gchar* playbinName)
2403 {
2404     if (m_pipeline) {
2405         if (!playbinName) {
2406             GST_INFO_OBJECT(pipeline(), "Keeping same playbin as nothing forced");
2407             return;
2408         }
2409
2410         if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2411             GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2412             return;
2413         }
2414
2415         GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.",
2416             playbinName);
2417         changePipelineState(GST_STATE_NULL);
2418         m_pipeline = nullptr;
2419     }
2420
2421     ASSERT(!m_pipeline);
2422
2423 #if GST_CHECK_VERSION(1, 10, 0)
2424     m_isLegacyPlaybin = !g_getenv("USE_PLAYBIN3");
2425     if (!m_isLegacyPlaybin)
2426         playbinName = "playbin3";
2427 #endif
2428
2429     if (!playbinName)
2430         playbinName = "playbin";
2431
2432     // gst_element_factory_make() returns a floating reference so
2433     // we should not adopt.
2434     setPipeline(gst_element_factory_make(playbinName, "play"));
2435     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2436
2437     GST_INFO("Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2438
2439     // Let also other listeners subscribe to (application) messages in this bus.
2440     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2441     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2442     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2443
2444     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2445
2446     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2447     if (m_isLegacyPlaybin) {
2448         g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2449         g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2450     }
2451
2452 #if ENABLE(VIDEO_TRACK)
2453     if (m_isLegacyPlaybin)
2454         g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2455
2456     GstElement* textCombiner = webkitTextCombinerNew();
2457     ASSERT(textCombiner);
2458     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2459
2460     m_textAppSink = webkitTextSinkNew();
2461     ASSERT(m_textAppSink);
2462
2463     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2464     ASSERT(m_textAppSinkPad);
2465
2466     GRefPtr<GstCaps> textCaps;
2467     if (webkitGstCheckVersion(1, 13, 0))
2468         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
2469     else
2470         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
2471     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
2472     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2473
2474     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2475 #endif
2476
2477     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2478
2479     configurePlaySink();
2480
2481     // On 1.4.2 and newer we use the audio-filter property instead.
2482     // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
2483     // the reason for using >= 1.4.2 instead of >= 1.4.0.
2484     if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
2485         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2486
2487         if (!scale)
2488             GST_WARNING("Failed to create scaletempo");
2489         else
2490             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2491     }
2492
2493     if (!m_renderingCanBeAccelerated) {
2494         // If not using accelerated compositing, let GStreamer handle
2495         // the image-orientation tag.
2496         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2497         if (videoFlip) {
2498             g_object_set(videoFlip, "method", 8, nullptr);
2499             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2500         } else
2501             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
2502     }
2503
2504     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2505     if (videoSinkPad)
2506         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2507 }
2508
2509 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2510 {
2511     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2512     gst_element_post_message(m_pipeline.get(), message);
2513 }
2514
2515 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2516 {
2517     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2518         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2519     return false;
2520 }
2521
2522 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2523 {
2524     if (isLiveStream())
2525         return false;
2526
2527     if (m_url.isLocalFile())
2528         return true;
2529
2530     if (m_url.protocolIsInHTTPFamily())
2531         return true;
2532
2533     return false;
2534 }
2535
2536 }
2537
2538 #endif // USE(GSTREAMER)