[GStreamer] WebVTT caps changed in GStreamer 1.14
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "FileSystem.h"
32 #include "GStreamerUtilities.h"
33 #include "HTTPHeaderNames.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "URL.h"
41 #include "WebKitWebSourceGStreamer.h"
42 #include <glib.h>
43 #include <gst/gst.h>
44 #include <gst/pbutils/missing-plugins.h>
45 #include <limits>
46 #include <wtf/HexNumber.h>
47 #include <wtf/MediaTime.h>
48 #include <wtf/NeverDestroyed.h>
49 #include <wtf/StringPrintStream.h>
50 #include <wtf/glib/GUniquePtr.h>
51 #include <wtf/glib/RunLoopSourcePriority.h>
52 #include <wtf/text/CString.h>
53
54 #if ENABLE(VIDEO_TRACK)
55 #include "AudioTrackPrivateGStreamer.h"
56 #include "InbandMetadataTextTrackPrivateGStreamer.h"
57 #include "InbandTextTrackPrivateGStreamer.h"
58 #include "TextCombinerGStreamer.h"
59 #include "TextSinkGStreamer.h"
60 #include "VideoTrackPrivateGStreamer.h"
61 #endif
62
63 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
64 #define GST_USE_UNSTABLE_API
65 #include <gst/mpegts/mpegts.h>
66 #undef GST_USE_UNSTABLE_API
67 #endif
68 #include <gst/audio/streamvolume.h>
69
70 #if ENABLE(MEDIA_SOURCE)
71 #include "MediaSource.h"
72 #include "WebKitMediaSourceGStreamer.h"
73 #endif
74
75 #if ENABLE(WEB_AUDIO)
76 #include "AudioSourceProviderGStreamer.h"
77 #endif
78
79 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
80 #define GST_CAT_DEFAULT webkit_media_player_debug
81
82
83 namespace WebCore {
84 using namespace std;
85
86 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
87 {
88     player->handleMessage(message);
89 }
90
91 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
92 {
93     player->setAudioStreamProperties(object);
94 }
95
96 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
97 {
98     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
99         return;
100
101     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
102     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
103     g_object_set(object, "stream-properties", structure, nullptr);
104     gst_structure_free(structure);
105     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
106     GST_DEBUG("Set media.role as %s at %s", role, elementName.get());
107 }
108
109 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
110 {
111     if (isAvailable())
112         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
113             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
114 }
115
116 bool MediaPlayerPrivateGStreamer::isAvailable()
117 {
118     if (!MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements())
119         return false;
120
121     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
122     return factory;
123 }
124
125 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
126     : MediaPlayerPrivateGStreamerBase(player)
127     , m_buffering(false)
128     , m_bufferingPercentage(0)
129     , m_canFallBackToLastFinishedSeekPosition(false)
130     , m_changingRate(false)
131     , m_downloadFinished(false)
132     , m_errorOccured(false)
133     , m_isEndReached(false)
134     , m_isStreaming(false)
135     , m_durationAtEOS(MediaTime::invalidTime())
136     , m_paused(true)
137     , m_playbackRate(1)
138     , m_requestedState(GST_STATE_VOID_PENDING)
139     , m_resetPipeline(false)
140     , m_seeking(false)
141     , m_seekIsPending(false)
142     , m_seekTime(MediaTime::invalidTime())
143     , m_source(nullptr)
144     , m_volumeAndMuteInitialized(false)
145     , m_mediaLocations(nullptr)
146     , m_mediaLocationCurrentIndex(0)
147     , m_playbackRatePause(false)
148     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
149     , m_lastPlaybackRate(1)
150     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
151     , m_maxTimeLoaded(MediaTime::zeroTime())
152     , m_preload(player->preload())
153     , m_delayingLoad(false)
154     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
155     , m_hasVideo(false)
156     , m_hasAudio(false)
157     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
158     , m_totalBytes(0)
159     , m_preservesPitch(false)
160 {
161 #if USE(GLIB)
162     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
163 #endif
164 }
165
166 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
167 {
168 #if ENABLE(VIDEO_TRACK)
169     for (size_t i = 0; i < m_audioTracks.size(); ++i)
170         m_audioTracks[i]->disconnect();
171
172     for (size_t i = 0; i < m_textTracks.size(); ++i)
173         m_textTracks[i]->disconnect();
174
175     for (size_t i = 0; i < m_videoTracks.size(); ++i)
176         m_videoTracks[i]->disconnect();
177 #endif
178     if (m_fillTimer.isActive())
179         m_fillTimer.stop();
180
181     if (m_mediaLocations) {
182         gst_structure_free(m_mediaLocations);
183         m_mediaLocations = nullptr;
184     }
185
186     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
187         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
188
189     if (m_autoAudioSink)
190         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
191             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
192
193     m_readyTimerHandler.stop();
194     if (m_missingPluginsCallback) {
195         m_missingPluginsCallback->invalidate();
196         m_missingPluginsCallback = nullptr;
197     }
198
199     if (m_videoSink) {
200         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
201         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
202     }
203
204     if (m_pipeline) {
205         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
206         ASSERT(bus);
207         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
208         gst_bus_remove_signal_watch(bus.get());
209         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
210         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
211     }
212 }
213
214 static void convertToInternalProtocol(URL& url)
215 {
216     if (url.protocolIsInHTTPFamily())
217         url.setProtocol("webkit+" + url.protocol());
218 }
219
220 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
221 {
222     // Clean out everything after file:// url path.
223     String cleanURLString(url.string());
224     if (url.isLocalFile())
225         cleanURLString = cleanURLString.substring(0, url.pathEnd());
226
227     m_url = URL(URL(), cleanURLString);
228     convertToInternalProtocol(m_url);
229
230     GST_INFO("Load %s", m_url.string().utf8().data());
231     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
232 }
233
234 void MediaPlayerPrivateGStreamer::load(const String& urlString)
235 {
236     // FIXME: This method is still called even if supportsType() returned
237     // IsNotSupported. This would deserve more investigation but meanwhile make
238     // sure we don't ever try to play animated gif assets.
239     if (m_player->contentMIMEType() == "image/gif") {
240         loadingFailed(MediaPlayer::FormatError);
241         return;
242     }
243
244     if (!MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements())
245         return;
246
247     URL url(URL(), urlString);
248     if (url.isBlankURL())
249         return;
250
251     if (!m_pipeline)
252         createGSTPlayBin();
253
254     if (m_fillTimer.isActive())
255         m_fillTimer.stop();
256
257     ASSERT(m_pipeline);
258
259     setPlaybinURL(url);
260
261     GST_DEBUG("preload: %s", convertEnumerationToString(m_preload).utf8().data());
262     if (m_preload == MediaPlayer::None) {
263         GST_INFO("Delaying load.");
264         m_delayingLoad = true;
265     }
266
267     // Reset network and ready states. Those will be set properly once
268     // the pipeline pre-rolled.
269     m_networkState = MediaPlayer::Loading;
270     m_player->networkStateChanged();
271     m_readyState = MediaPlayer::HaveNothing;
272     m_player->readyStateChanged();
273     m_volumeAndMuteInitialized = false;
274     m_durationAtEOS = MediaTime::invalidTime();
275
276     if (!m_delayingLoad)
277         commitLoad();
278 }
279
280 #if ENABLE(MEDIA_SOURCE)
281 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
282 {
283     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
284     m_networkState = MediaPlayer::FormatError;
285     m_player->networkStateChanged();
286 }
287 #endif
288
289 #if ENABLE(MEDIA_STREAM)
290 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate&)
291 {
292     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
293     m_networkState = MediaPlayer::FormatError;
294     m_player->networkStateChanged();
295     notImplemented();
296 }
297 #endif
298
299 void MediaPlayerPrivateGStreamer::commitLoad()
300 {
301     ASSERT(!m_delayingLoad);
302     GST_DEBUG("Committing load.");
303
304     // GStreamer needs to have the pipeline set to a paused state to
305     // start providing anything useful.
306     changePipelineState(GST_STATE_PAUSED);
307
308     setDownloadBuffering();
309     updateStates();
310 }
311
312 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
313 {
314     if (m_isEndReached) {
315         // Position queries on a null pipeline return 0. If we're at
316         // the end of the stream the pipeline is null but we want to
317         // report either the seek time or the duration because this is
318         // what the Media element spec expects us to do.
319         if (m_seeking)
320             return m_seekTime;
321
322         MediaTime duration = durationMediaTime();
323         return duration.isInvalid() ? MediaTime::zeroTime() : duration;
324     }
325
326     // Position is only available if no async state change is going on and the state is either paused or playing.
327     gint64 position = GST_CLOCK_TIME_NONE;
328     GstQuery* query= gst_query_new_position(GST_FORMAT_TIME);
329     if (gst_element_query(m_pipeline.get(), query))
330         gst_query_parse_position(query, 0, &position);
331     gst_query_unref(query);
332
333     GST_LOG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
334
335     MediaTime playbackPosition = MediaTime::zeroTime();
336     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
337     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
338         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
339     else if (m_canFallBackToLastFinishedSeekPosition)
340         playbackPosition = m_seekTime;
341
342     return playbackPosition;
343 }
344
345 void MediaPlayerPrivateGStreamer::readyTimerFired()
346 {
347     changePipelineState(GST_STATE_NULL);
348 }
349
350 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
351 {
352     ASSERT(m_pipeline);
353
354     GstState currentState;
355     GstState pending;
356
357     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
358     if (currentState == newState || pending == newState) {
359         GST_DEBUG("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
360             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
361         return true;
362     }
363
364     GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
365         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
366
367     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
368     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
369     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE) {
370         return false;
371     }
372
373     // Create a timer when entering the READY state so that we can free resources
374     // if we stay for too long on READY.
375     // Also lets remove the timer if we request a state change for any state other than READY.
376     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
377     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
378         // Max interval in seconds to stay in the READY state on manual
379         // state change requests.
380         static const Seconds readyStateTimerDelay { 1_min };
381         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
382     } else if (newState != GST_STATE_READY)
383         m_readyTimerHandler.stop();
384
385     return true;
386 }
387
388 void MediaPlayerPrivateGStreamer::prepareToPlay()
389 {
390     GST_DEBUG("Prepare to play");
391     m_preload = MediaPlayer::Auto;
392     if (m_delayingLoad) {
393         m_delayingLoad = false;
394         commitLoad();
395     }
396 }
397
398 void MediaPlayerPrivateGStreamer::play()
399 {
400     if (!m_playbackRate) {
401         m_playbackRatePause = true;
402         return;
403     }
404
405     if (changePipelineState(GST_STATE_PLAYING)) {
406         m_isEndReached = false;
407         m_delayingLoad = false;
408         m_preload = MediaPlayer::Auto;
409         setDownloadBuffering();
410         GST_INFO("Play");
411     } else {
412         loadingFailed(MediaPlayer::Empty);
413     }
414 }
415
416 void MediaPlayerPrivateGStreamer::pause()
417 {
418     m_playbackRatePause = false;
419     GstState currentState, pendingState;
420     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
421     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
422         return;
423
424     if (changePipelineState(GST_STATE_PAUSED))
425         GST_INFO("Pause");
426     else
427         loadingFailed(MediaPlayer::Empty);
428 }
429
430 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
431 {
432     if (!m_pipeline || m_errorOccured)
433         return MediaTime::invalidTime();
434
435     if (m_durationAtEOS.isValid())
436         return m_durationAtEOS;
437
438     // The duration query would fail on a not-prerolled pipeline.
439     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
440         return MediaTime::invalidTime();
441
442     gint64 timeLength = 0;
443
444     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &timeLength) || !GST_CLOCK_TIME_IS_VALID(timeLength)) {
445         GST_DEBUG("Time duration query failed for %s", m_url.string().utf8().data());
446         return MediaTime::positiveInfiniteTime();
447     }
448
449     GST_LOG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
450
451     return MediaTime(timeLength, GST_SECOND);
452     // FIXME: handle 3.14.9.5 properly
453 }
454
455 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
456 {
457     if (!m_pipeline || m_errorOccured)
458         return MediaTime::invalidTime();
459
460     if (m_seeking)
461         return m_seekTime;
462
463     // Workaround for
464     // https://bugzilla.gnome.org/show_bug.cgi?id=639941 In GStreamer
465     // 0.10.35 basesink reports wrong duration in case of EOS and
466     // negative playback rate. There's no upstream accepted patch for
467     // this bug yet, hence this temporary workaround.
468     if (m_isEndReached && m_playbackRate < 0)
469         return MediaTime::invalidTime();
470
471     return playbackPosition();
472 }
473
474 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
475 {
476     if (!m_pipeline)
477         return;
478
479     if (m_errorOccured)
480         return;
481
482     GST_INFO("[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
483
484     // Avoid useless seeking.
485     if (mediaTime == currentMediaTime())
486         return;
487
488     MediaTime time = std::min(mediaTime, durationMediaTime());
489
490     if (isLiveStream())
491         return;
492
493     GST_INFO("[Seek] seeking to %s", toString(time).utf8().data());
494
495     if (m_seeking) {
496         m_timeOfOverlappingSeek = time;
497         if (m_seekIsPending) {
498             m_seekTime = time;
499             return;
500         }
501     }
502
503     GstState state;
504     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
505     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
506         GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
507         return;
508     }
509     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
510         m_seekIsPending = true;
511         if (m_isEndReached) {
512             GST_DEBUG("[Seek] reset pipeline");
513             m_resetPipeline = true;
514             if (!changePipelineState(GST_STATE_PAUSED))
515                 loadingFailed(MediaPlayer::Empty);
516         }
517     } else {
518         // We can seek now.
519         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
520             GST_DEBUG("[Seek] seeking to %s failed", toString(time).utf8().data());
521             return;
522         }
523     }
524
525     m_seeking = true;
526     m_seekTime = time;
527     m_isEndReached = false;
528 }
529
530 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
531 {
532     // Default values for rate >= 0.
533     MediaTime startTime = position, endTime = MediaTime::invalidTime();
534
535     // TODO: Should do more than that, need to notify the media source
536     // and probably flush the pipeline at least.
537     if (isMediaSource())
538         return true;
539
540     if (rate < 0) {
541         startTime = MediaTime::zeroTime();
542         // If we are at beginning of media, start from the end to
543         // avoid immediate EOS.
544         if (position < MediaTime::zeroTime())
545             endTime = durationMediaTime();
546         else
547             endTime = position;
548     }
549
550     if (!rate)
551         rate = 1.0;
552
553     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
554         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
555 }
556
557 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
558 {
559     if (!m_changingRate)
560         return;
561
562     GST_INFO("Set Rate to %f", m_playbackRate);
563
564     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
565     bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
566
567     GST_INFO(mute ? "Need to mute audio" : "Do not need to mute audio");
568
569     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
570         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
571         m_lastPlaybackRate = m_playbackRate;
572     } else {
573         m_playbackRate = m_lastPlaybackRate;
574         GST_ERROR("Set rate to %f failed", m_playbackRate);
575     }
576
577     if (m_playbackRatePause) {
578         GstState state;
579         GstState pending;
580
581         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
582         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
583             changePipelineState(GST_STATE_PLAYING);
584         m_playbackRatePause = false;
585     }
586
587     m_changingRate = false;
588     m_player->rateChanged();
589 }
590
591 bool MediaPlayerPrivateGStreamer::paused() const
592 {
593     if (m_isEndReached) {
594         GST_DEBUG("Ignoring pause at EOS");
595         return true;
596     }
597
598     if (m_playbackRatePause) {
599         GST_DEBUG("Playback rate is 0, simulating PAUSED state");
600         return false;
601     }
602
603     GstState state;
604     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
605     bool paused = state <= GST_STATE_PAUSED;
606     GST_DEBUG("Paused: %s", toString(paused).utf8().data());
607     return paused;
608 }
609
610 bool MediaPlayerPrivateGStreamer::seeking() const
611 {
612     return m_seeking;
613 }
614
615 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
616 {
617     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] { player->notifyPlayerOfVideo(); });
618 }
619
620 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
621 {
622     if (UNLIKELY(!m_pipeline || !m_source))
623         return;
624
625     gint numTracks = 0;
626     bool useMediaSource = isMediaSource();
627     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
628     g_object_get(element, "n-video", &numTracks, nullptr);
629
630     GST_INFO("Media has %d video tracks", numTracks);
631
632     bool oldHasVideo = m_hasVideo;
633     m_hasVideo = numTracks > 0;
634     if (oldHasVideo != m_hasVideo)
635         m_player->characteristicChanged();
636
637     if (m_hasVideo)
638         m_player->sizeChanged();
639
640     if (useMediaSource) {
641         GST_DEBUG("Tracks managed by source element. Bailing out now.");
642         m_player->client().mediaPlayerEngineUpdated(m_player);
643         return;
644     }
645
646 #if ENABLE(VIDEO_TRACK)
647     for (gint i = 0; i < numTracks; ++i) {
648         GRefPtr<GstPad> pad;
649         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
650         ASSERT(pad);
651
652         if (i < static_cast<gint>(m_videoTracks.size())) {
653             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks[i];
654             existingTrack->setIndex(i);
655             if (existingTrack->pad() == pad)
656                 continue;
657         }
658
659         RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(m_pipeline, i, pad);
660         m_videoTracks.append(track);
661         m_player->addVideoTrack(*track);
662     }
663
664     while (static_cast<gint>(m_videoTracks.size()) > numTracks) {
665         RefPtr<VideoTrackPrivateGStreamer> track = m_videoTracks.last();
666         track->disconnect();
667         m_videoTracks.removeLast();
668         m_player->removeVideoTrack(*track);
669     }
670 #endif
671
672     m_player->client().mediaPlayerEngineUpdated(m_player);
673 }
674
675 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
676 {
677     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] { player->notifyPlayerOfVideoCaps(); });
678 }
679
680 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
681 {
682     m_videoSize = IntSize();
683     m_player->client().mediaPlayerEngineUpdated(m_player);
684 }
685
686 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
687 {
688     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] { player->notifyPlayerOfAudio(); });
689 }
690
691 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
692 {
693     if (UNLIKELY(!m_pipeline || !m_source))
694         return;
695
696     gint numTracks = 0;
697     bool useMediaSource = isMediaSource();
698     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
699     g_object_get(element, "n-audio", &numTracks, nullptr);
700
701     GST_INFO("Media has %d audio tracks", numTracks);
702     bool oldHasAudio = m_hasAudio;
703     m_hasAudio = numTracks > 0;
704     if (oldHasAudio != m_hasAudio)
705         m_player->characteristicChanged();
706
707     if (useMediaSource) {
708         GST_DEBUG("Tracks managed by source element. Bailing out now.");
709         m_player->client().mediaPlayerEngineUpdated(m_player);
710         return;
711     }
712
713 #if ENABLE(VIDEO_TRACK)
714     for (gint i = 0; i < numTracks; ++i) {
715         GRefPtr<GstPad> pad;
716         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
717         ASSERT(pad);
718
719         if (i < static_cast<gint>(m_audioTracks.size())) {
720             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks[i];
721             existingTrack->setIndex(i);
722             if (existingTrack->pad() == pad)
723                 continue;
724         }
725
726         RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(m_pipeline, i, pad);
727         m_audioTracks.insert(i, track);
728         m_player->addAudioTrack(*track);
729     }
730
731     while (static_cast<gint>(m_audioTracks.size()) > numTracks) {
732         RefPtr<AudioTrackPrivateGStreamer> track = m_audioTracks.last();
733         track->disconnect();
734         m_audioTracks.removeLast();
735         m_player->removeAudioTrack(*track);
736     }
737 #endif
738
739     m_player->client().mediaPlayerEngineUpdated(m_player);
740 }
741
742 #if ENABLE(VIDEO_TRACK)
743 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
744 {
745     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] { player->notifyPlayerOfText(); });
746 }
747
748 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
749 {
750     if (UNLIKELY(!m_pipeline || !m_source))
751         return;
752
753     gint numTracks = 0;
754     bool useMediaSource = isMediaSource();
755     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
756     g_object_get(element, "n-text", &numTracks, nullptr);
757
758     if (useMediaSource) {
759         GST_DEBUG("Tracks managed by source element. Bailing out now.");
760         return;
761     }
762
763     for (gint i = 0; i < numTracks; ++i) {
764         GRefPtr<GstPad> pad;
765         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
766         ASSERT(pad);
767
768         if (i < static_cast<gint>(m_textTracks.size())) {
769             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks[i];
770             existingTrack->setIndex(i);
771             if (existingTrack->pad() == pad)
772                 continue;
773         }
774
775         RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(i, pad);
776         m_textTracks.insert(i, track);
777         m_player->addTextTrack(*track);
778     }
779
780     while (static_cast<gint>(m_textTracks.size()) > numTracks) {
781         RefPtr<InbandTextTrackPrivateGStreamer> track = m_textTracks.last();
782         track->disconnect();
783         m_textTracks.removeLast();
784         m_player->removeTextTrack(*track);
785     }
786 }
787
788 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
789 {
790     player->newTextSample();
791     return GST_FLOW_OK;
792 }
793
794 void MediaPlayerPrivateGStreamer::newTextSample()
795 {
796     if (!m_textAppSink)
797         return;
798
799     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
800         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
801
802     GRefPtr<GstSample> sample;
803     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
804     ASSERT(sample);
805
806     if (streamStartEvent) {
807         bool found = FALSE;
808         const gchar* id;
809         gst_event_parse_stream_start(streamStartEvent.get(), &id);
810         for (size_t i = 0; i < m_textTracks.size(); ++i) {
811             RefPtr<InbandTextTrackPrivateGStreamer> track = m_textTracks[i];
812             if (track->streamId() == id) {
813                 track->handleSample(sample);
814                 found = true;
815                 break;
816             }
817         }
818         if (!found)
819             GST_WARNING("Got sample with unknown stream ID.");
820     } else
821         GST_WARNING("Unable to handle sample with no stream start event.");
822 }
823 #endif
824
825 void MediaPlayerPrivateGStreamer::setRate(float rate)
826 {
827     // Higher rate causes crash.
828     rate = clampTo(rate, -20.0, 20.0);
829
830     // Avoid useless playback rate update.
831     if (m_playbackRate == rate) {
832         // and make sure that upper layers were notified if rate was set
833
834         if (!m_changingRate && m_player->rate() != m_playbackRate)
835             m_player->rateChanged();
836         return;
837     }
838
839     if (isLiveStream()) {
840         // notify upper layers that we cannot handle passed rate.
841         m_changingRate = false;
842         m_player->rateChanged();
843         return;
844     }
845
846     GstState state;
847     GstState pending;
848
849     m_playbackRate = rate;
850     m_changingRate = true;
851
852     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
853
854     if (!rate) {
855         m_changingRate = false;
856         m_playbackRatePause = true;
857         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
858             changePipelineState(GST_STATE_PAUSED);
859         return;
860     }
861
862     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
863         || (pending == GST_STATE_PAUSED))
864         return;
865
866     updatePlaybackRate();
867 }
868
869 double MediaPlayerPrivateGStreamer::rate() const
870 {
871     return m_playbackRate;
872 }
873
874 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
875 {
876     m_preservesPitch = preservesPitch;
877 }
878
879 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
880 {
881     auto timeRanges = std::make_unique<PlatformTimeRanges>();
882     if (m_errorOccured || isLiveStream())
883         return timeRanges;
884
885     MediaTime mediaDuration = durationMediaTime();
886     if (!mediaDuration || mediaDuration.isPositiveInfinite())
887         return timeRanges;
888
889     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
890
891     if (!gst_element_query(m_pipeline.get(), query)) {
892         gst_query_unref(query);
893         return timeRanges;
894     }
895
896     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
897     for (guint index = 0; index < numBufferingRanges; index++) {
898         gint64 rangeStart = 0, rangeStop = 0;
899         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop))
900             timeRanges->add(MediaTime(rangeStart * toGstUnsigned64Time(mediaDuration) / GST_FORMAT_PERCENT_MAX, GST_SECOND),
901                 MediaTime(rangeStop * toGstUnsigned64Time(mediaDuration) / GST_FORMAT_PERCENT_MAX, GST_SECOND));
902     }
903
904     // Fallback to the more general maxTimeLoaded() if no range has
905     // been found.
906     if (!timeRanges->length())
907         if (MediaTime loaded = maxTimeLoaded())
908             timeRanges->add(MediaTime::zeroTime(), loaded);
909
910     gst_query_unref(query);
911
912     return timeRanges;
913 }
914
915 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
916 {
917     GUniqueOutPtr<GError> err;
918     GUniqueOutPtr<gchar> debug;
919     MediaPlayer::NetworkState error;
920     bool issueError = true;
921     bool attemptNextLocation = false;
922     const GstStructure* structure = gst_message_get_structure(message);
923     GstState requestedState, currentState;
924
925     m_canFallBackToLastFinishedSeekPosition = false;
926
927     if (structure) {
928         const gchar* messageTypeName = gst_structure_get_name(structure);
929
930         // Redirect messages are sent from elements, like qtdemux, to
931         // notify of the new location(s) of the media.
932         if (!g_strcmp0(messageTypeName, "redirect")) {
933             mediaLocationChanged(message);
934             return;
935         }
936     }
937
938     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
939     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
940
941     GST_LOG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
942     switch (GST_MESSAGE_TYPE(message)) {
943     case GST_MESSAGE_ERROR:
944         if (m_resetPipeline || m_missingPluginsCallback || m_errorOccured)
945             break;
946         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
947         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
948
949         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
950
951         error = MediaPlayer::Empty;
952         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
953             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
954             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
955             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
956             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
957             error = MediaPlayer::FormatError;
958         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
959             // Let the mediaPlayerClient handle the stream error, in
960             // this case the HTMLMediaElement will emit a stalled
961             // event.
962             GST_ERROR("Decode error, let the Media element emit a stalled event.");
963             m_loadingStalled = true;
964             break;
965         } else if (err->domain == GST_STREAM_ERROR) {
966             error = MediaPlayer::DecodeError;
967             attemptNextLocation = true;
968         } else if (err->domain == GST_RESOURCE_ERROR)
969             error = MediaPlayer::NetworkError;
970
971         if (attemptNextLocation)
972             issueError = !loadNextLocation();
973         if (issueError)
974             loadingFailed(error);
975         break;
976     case GST_MESSAGE_EOS:
977         didEnd();
978         break;
979     case GST_MESSAGE_ASYNC_DONE:
980         if (!messageSourceIsPlaybin || m_delayingLoad)
981             break;
982         asyncStateChangeDone();
983         break;
984     case GST_MESSAGE_STATE_CHANGED: {
985         if (!messageSourceIsPlaybin || m_delayingLoad)
986             break;
987         updateStates();
988
989         // Construct a filename for the graphviz dot file output.
990         GstState newState;
991         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
992         CString dotFileName = String::format("webkit-video.%s_%s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8();
993         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
994
995         break;
996     }
997     case GST_MESSAGE_BUFFERING:
998         processBufferingStats(message);
999         break;
1000     case GST_MESSAGE_DURATION_CHANGED:
1001         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
1002         if (messageSourceIsPlaybin && !isMediaSource())
1003             durationChanged();
1004         break;
1005     case GST_MESSAGE_REQUEST_STATE:
1006         gst_message_parse_request_state(message, &requestedState);
1007         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1008         if (requestedState < currentState) {
1009             GST_INFO("Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
1010                 gst_element_state_get_name(requestedState));
1011             m_requestedState = requestedState;
1012             if (!changePipelineState(requestedState))
1013                 loadingFailed(MediaPlayer::Empty);
1014         }
1015         break;
1016     case GST_MESSAGE_CLOCK_LOST:
1017         // This can only happen in PLAYING state and we should just
1018         // get a new clock by moving back to PAUSED and then to
1019         // PLAYING again.
1020         // This can happen if the stream that ends in a sink that
1021         // provides the current clock disappears, for example if
1022         // the audio sink provides the clock and the audio stream
1023         // is disabled. It also happens relatively often with
1024         // HTTP adaptive streams when switching between different
1025         // variants of a stream.
1026         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1027         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1028         break;
1029     case GST_MESSAGE_LATENCY:
1030         // Recalculate the latency, we don't need any special handling
1031         // here other than the GStreamer default.
1032         // This can happen if the latency of live elements changes, or
1033         // for one reason or another a new live element is added or
1034         // removed from the pipeline.
1035         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1036         break;
1037     case GST_MESSAGE_ELEMENT:
1038         if (gst_is_missing_plugin_message(message)) {
1039             if (gst_install_plugins_supported()) {
1040                 m_missingPluginsCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([this](uint32_t result) {
1041                     m_missingPluginsCallback = nullptr;
1042                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1043                         return;
1044
1045                     changePipelineState(GST_STATE_READY);
1046                     changePipelineState(GST_STATE_PAUSED);
1047                 });
1048                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1049                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1050                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), *m_missingPluginsCallback);
1051             }
1052         }
1053 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1054         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
1055             processMpegTsSection(section);
1056             gst_mpegts_section_unref(section);
1057         }
1058 #endif
1059 #if ENABLE(ENCRYPTED_MEDIA)
1060         else if (gst_structure_has_name(structure, "drm-key-needed")) {
1061             GST_DEBUG("drm-key-needed message from %s", GST_MESSAGE_SRC_NAME(message));
1062             GRefPtr<GstEvent> event;
1063             gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr);
1064             handleProtectionEvent(event.get());
1065         }
1066 #endif
1067         else if (gst_structure_has_name(structure, "http-headers")) {
1068             GstStructure* responseHeaders;
1069             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders, nullptr)) {
1070                 if (!gst_structure_has_field(responseHeaders, httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data())) {
1071                     GST_INFO("Live stream detected. Disabling on-disk buffering");
1072                     m_isStreaming = true;
1073                     setDownloadBuffering();
1074                 }
1075                 gst_structure_free(responseHeaders);
1076             }
1077         } else
1078             GST_DEBUG("Unhandled element message: %" GST_PTR_FORMAT, structure);
1079         break;
1080 #if ENABLE(VIDEO_TRACK)
1081     case GST_MESSAGE_TOC:
1082         processTableOfContents(message);
1083         break;
1084 #endif
1085     case GST_MESSAGE_TAG: {
1086         GstTagList* tags = nullptr;
1087         GUniqueOutPtr<gchar> tag;
1088         gst_message_parse_tag(message, &tags);
1089         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1090             if (!g_strcmp0(tag.get(), "rotate-90"))
1091                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1092             else if (!g_strcmp0(tag.get(), "rotate-180"))
1093                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1094             else if (!g_strcmp0(tag.get(), "rotate-270"))
1095                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1096         }
1097         gst_tag_list_unref(tags);
1098         break;
1099     }
1100     default:
1101         GST_DEBUG("Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
1102         break;
1103     }
1104 }
1105
1106 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1107 {
1108     m_buffering = true;
1109     gst_message_parse_buffering(message, &m_bufferingPercentage);
1110
1111     GST_DEBUG("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1112
1113     if (m_bufferingPercentage == 100)
1114         updateStates();
1115 }
1116
1117 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1118 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1119 {
1120     ASSERT(section);
1121
1122     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1123         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1124         m_metadataTracks.clear();
1125         for (guint i = 0; i < pmt->streams->len; ++i) {
1126             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1127             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1128                 AtomicString pid = String::number(stream->pid);
1129                 RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = InbandMetadataTextTrackPrivateGStreamer::create(
1130                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1131
1132                 // 4.7.10.12.2 Sourcing in-band text tracks
1133                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1134                 // type as follows, based on the type of the media resource:
1135                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1136                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1137                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1138                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1139                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1140                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1141                 // expressed in hexadecimal using uppercase ASCII hex digits.
1142                 String inbandMetadataTrackDispatchType;
1143                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1144                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1145                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1146                     for (guint k = 0; k < descriptor->length; ++k)
1147                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1148                 }
1149                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1150
1151                 m_metadataTracks.add(pid, track);
1152                 m_player->addTextTrack(*track);
1153             }
1154         }
1155     } else {
1156         AtomicString pid = String::number(section->pid);
1157         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1158         if (!track)
1159             return;
1160
1161         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1162         gsize size;
1163         const void* bytes = g_bytes_get_data(data.get(), &size);
1164
1165         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1166     }
1167 }
1168 #endif
1169
1170 #if ENABLE(VIDEO_TRACK)
1171 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1172 {
1173     if (m_chaptersTrack)
1174         m_player->removeTextTrack(*m_chaptersTrack);
1175
1176     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1177     m_player->addTextTrack(*m_chaptersTrack);
1178
1179     GRefPtr<GstToc> toc;
1180     gboolean updated;
1181     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1182     ASSERT(toc);
1183
1184     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1185         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1186 }
1187
1188 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1189 {
1190     ASSERT(entry);
1191
1192     auto cue = GenericCueData::create();
1193
1194     gint64 start = -1, stop = -1;
1195     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1196     if (start != -1)
1197         cue->setStartTime(MediaTime(start, GST_SECOND));
1198     if (stop != -1)
1199         cue->setEndTime(MediaTime(stop, GST_SECOND));
1200
1201     GstTagList* tags = gst_toc_entry_get_tags(entry);
1202     if (tags) {
1203         gchar* title =  nullptr;
1204         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1205         if (title) {
1206             cue->setContent(title);
1207             g_free(title);
1208         }
1209     }
1210
1211     m_chaptersTrack->addGenericCue(cue);
1212
1213     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1214         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1215 }
1216 #endif
1217
1218 static int findHLSQueue(const GValue* item)
1219 {
1220     GstElement* element = GST_ELEMENT(g_value_get_object(item));
1221     if (g_str_has_prefix(GST_ELEMENT_NAME(element), "queue")) {
1222         GstElement* parent = GST_ELEMENT(GST_ELEMENT_PARENT(element));
1223         if (!GST_IS_OBJECT(parent))
1224             return 1;
1225
1226         if (g_str_has_prefix(GST_ELEMENT_NAME(GST_ELEMENT_PARENT(parent)), "hlsdemux"))
1227             return 0;
1228     }
1229
1230     return 1;
1231 }
1232
1233 static bool isHLSProgressing(GstElement* playbin, GstQuery* query)
1234 {
1235     GValue item = { };
1236     GstIterator* binIterator = gst_bin_iterate_recurse(GST_BIN(playbin));
1237     bool foundHLSQueue = gst_iterator_find_custom(binIterator, reinterpret_cast<GCompareFunc>(findHLSQueue), &item, nullptr);
1238     gst_iterator_free(binIterator);
1239
1240     if (!foundHLSQueue)
1241         return false;
1242
1243     GstElement* queueElement = GST_ELEMENT(g_value_get_object(&item));
1244     bool queryResult = gst_element_query(queueElement, query);
1245     g_value_unset(&item);
1246
1247     return queryResult;
1248 }
1249
1250 void MediaPlayerPrivateGStreamer::fillTimerFired()
1251 {
1252     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1253
1254     if (G_UNLIKELY(!gst_element_query(m_pipeline.get(), query))) {
1255         // This query always fails for live pipelines. In the case of HLS, try and find
1256         // the queue inside the HLS element to get a proxy measure of progress. Note
1257         // that the percentage value is rather meaningless as used below.
1258         // This is a hack, see https://bugs.webkit.org/show_bug.cgi?id=141469.
1259         if (!isHLSProgressing(m_pipeline.get(), query)) {
1260             gst_query_unref(query);
1261             return;
1262         }
1263     }
1264
1265     gint64 start, stop;
1266     gdouble fillStatus = 100.0;
1267
1268     gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
1269     gst_query_unref(query);
1270
1271     if (stop != -1)
1272         fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1273
1274     GST_DEBUG("[Buffering] Download buffer filled up to %f%%", fillStatus);
1275
1276     MediaTime mediaDuration = durationMediaTime();
1277
1278     // Update maxTimeLoaded only if the media duration is
1279     // available. Otherwise we can't compute it.
1280     if (mediaDuration) {
1281         if (fillStatus == 100.0)
1282             m_maxTimeLoaded = mediaDuration;
1283         else
1284             m_maxTimeLoaded = MediaTime(fillStatus * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
1285         GST_DEBUG("[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
1286     }
1287
1288     m_downloadFinished = fillStatus == 100.0;
1289     if (!m_downloadFinished) {
1290         updateStates();
1291         return;
1292     }
1293
1294     // Media is now fully loaded. It will play even if network
1295     // connection is cut. Buffering is done, remove the fill source
1296     // from the main loop.
1297     m_fillTimer.stop();
1298     updateStates();
1299 }
1300
1301 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
1302 {
1303     if (m_errorOccured)
1304         return MediaTime::zeroTime();
1305
1306     MediaTime duration = durationMediaTime();
1307     GST_DEBUG("maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
1308     // infinite duration means live stream
1309     if (duration.isPositiveInfinite())
1310         return MediaTime::zeroTime();
1311
1312     return duration;
1313 }
1314
1315 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1316 {
1317     if (m_errorOccured)
1318         return MediaTime::zeroTime();
1319
1320     MediaTime loaded = m_maxTimeLoaded;
1321     if (m_isEndReached)
1322         loaded = durationMediaTime();
1323     GST_LOG("maxTimeLoaded: %s", toString(loaded).utf8().data());
1324     return loaded;
1325 }
1326
1327 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1328 {
1329     if (m_errorOccured || m_loadingStalled)
1330         return false;
1331
1332     if (isLiveStream())
1333         return true;
1334
1335     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1336         return false;
1337
1338     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1339     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1340     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1341     GST_LOG("didLoadingProgress: %s", toString(didLoadingProgress).utf8().data());
1342     return didLoadingProgress;
1343 }
1344
1345 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1346 {
1347     if (m_errorOccured)
1348         return 0;
1349
1350     if (m_totalBytes)
1351         return m_totalBytes;
1352
1353     if (!m_source)
1354         return 0;
1355
1356     if (isLiveStream())
1357         return 0;
1358
1359     GstFormat fmt = GST_FORMAT_BYTES;
1360     gint64 length = 0;
1361     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1362         GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1363         m_totalBytes = static_cast<unsigned long long>(length);
1364         m_isStreaming = !length;
1365         return m_totalBytes;
1366     }
1367
1368     // Fall back to querying the source pads manually.
1369     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1370     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1371     bool done = false;
1372     while (!done) {
1373         GValue item = G_VALUE_INIT;
1374         switch (gst_iterator_next(iter, &item)) {
1375         case GST_ITERATOR_OK: {
1376             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1377             gint64 padLength = 0;
1378             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1379                 length = padLength;
1380             break;
1381         }
1382         case GST_ITERATOR_RESYNC:
1383             gst_iterator_resync(iter);
1384             break;
1385         case GST_ITERATOR_ERROR:
1386             FALLTHROUGH;
1387         case GST_ITERATOR_DONE:
1388             done = true;
1389             break;
1390         }
1391
1392         g_value_unset(&item);
1393     }
1394
1395     gst_iterator_free(iter);
1396
1397     GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1398     m_totalBytes = static_cast<unsigned long long>(length);
1399     m_isStreaming = !length;
1400     return m_totalBytes;
1401 }
1402
1403 void MediaPlayerPrivateGStreamer::sourceChangedCallback(MediaPlayerPrivateGStreamer* player)
1404 {
1405     player->sourceChanged();
1406 }
1407
1408 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1409 {
1410     if (g_strcmp0(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(G_OBJECT(element))), "GstDownloadBuffer"))
1411         return;
1412
1413     player->m_downloadBuffer = element;
1414     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1415     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1416
1417     GUniqueOutPtr<char> oldDownloadTemplate;
1418     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1419
1420     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1421     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1422     GST_TRACE("Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1423
1424     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1425 }
1426
1427 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1428 {
1429     ASSERT(player->m_downloadBuffer);
1430
1431     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1432
1433     GUniqueOutPtr<char> downloadFile;
1434     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1435     player->m_downloadBuffer = nullptr;
1436
1437     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
1438         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1439         return;
1440     }
1441
1442     GST_TRACE("Unlinked media temporary file %s after creation", downloadFile.get());
1443 }
1444
1445 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1446 {
1447     if (!downloadFileTemplate)
1448         return;
1449
1450     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1451     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1452     String templatePattern = String(templateFile.get()).replace("X", "?");
1453
1454     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
1455         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
1456             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1457             continue;
1458         }
1459
1460         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1461     }
1462 }
1463
1464 void MediaPlayerPrivateGStreamer::sourceChanged()
1465 {
1466     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1467         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1468
1469     m_source.clear();
1470     g_object_get(m_pipeline.get(), "source", &m_source.outPtr(), nullptr);
1471
1472     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1473         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1474         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1475     }
1476 }
1477
1478 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1479 {
1480     if (!m_source)
1481         return false;
1482
1483     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1484         return true;
1485
1486     GUniqueOutPtr<char> originalURI, resolvedURI;
1487     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1488     if (!originalURI || !resolvedURI)
1489         return false;
1490     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1491         return true;
1492
1493     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1494     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1495     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1496 }
1497
1498 void MediaPlayerPrivateGStreamer::cancelLoad()
1499 {
1500     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1501         return;
1502
1503     if (m_pipeline)
1504         changePipelineState(GST_STATE_READY);
1505 }
1506
1507 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1508 {
1509     if (!m_pipeline || m_errorOccured)
1510         return;
1511
1512     if (m_seeking) {
1513         if (m_seekIsPending)
1514             updateStates();
1515         else {
1516             GST_DEBUG("[Seek] seeked to %s", toString(m_seekTime).utf8().data());
1517             m_seeking = false;
1518             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
1519                 seek(m_timeOfOverlappingSeek);
1520                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
1521                 return;
1522             }
1523             m_timeOfOverlappingSeek = MediaTime::invalidTime();
1524
1525             // The pipeline can still have a pending state. In this case a position query will fail.
1526             // Right now we can use m_seekTime as a fallback.
1527             m_canFallBackToLastFinishedSeekPosition = true;
1528             timeChanged();
1529         }
1530     } else
1531         updateStates();
1532 }
1533
1534 void MediaPlayerPrivateGStreamer::updateStates()
1535 {
1536     if (!m_pipeline)
1537         return;
1538
1539     if (m_errorOccured)
1540         return;
1541
1542     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1543     MediaPlayer::ReadyState oldReadyState = m_readyState;
1544     GstState pending;
1545     GstState state;
1546     bool stateReallyChanged = false;
1547
1548     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1549     if (state != m_currentState) {
1550         m_oldState = m_currentState;
1551         m_currentState = state;
1552         stateReallyChanged = true;
1553     }
1554
1555     bool shouldUpdatePlaybackState = false;
1556     switch (getStateResult) {
1557     case GST_STATE_CHANGE_SUCCESS: {
1558         GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1559
1560         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1561         // on HTMLMediaElement and properly generate the video 'ended' event.
1562         if (m_isEndReached && m_currentState == GST_STATE_READY)
1563             break;
1564
1565         m_resetPipeline = m_currentState <= GST_STATE_READY;
1566
1567         bool didBuffering = m_buffering;
1568
1569         // Update ready and network states.
1570         switch (m_currentState) {
1571         case GST_STATE_NULL:
1572             m_readyState = MediaPlayer::HaveNothing;
1573             m_networkState = MediaPlayer::Empty;
1574             break;
1575         case GST_STATE_READY:
1576             m_readyState = MediaPlayer::HaveMetadata;
1577             m_networkState = MediaPlayer::Empty;
1578             break;
1579         case GST_STATE_PAUSED:
1580         case GST_STATE_PLAYING:
1581             if (m_buffering) {
1582                 if (m_bufferingPercentage == 100) {
1583                     GST_DEBUG("[Buffering] Complete.");
1584                     m_buffering = false;
1585                     m_readyState = MediaPlayer::HaveEnoughData;
1586                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1587                 } else {
1588                     m_readyState = MediaPlayer::HaveCurrentData;
1589                     m_networkState = MediaPlayer::Loading;
1590                 }
1591             } else if (m_downloadFinished) {
1592                 m_readyState = MediaPlayer::HaveEnoughData;
1593                 m_networkState = MediaPlayer::Loaded;
1594             } else {
1595                 m_readyState = MediaPlayer::HaveFutureData;
1596                 m_networkState = MediaPlayer::Loading;
1597             }
1598
1599             break;
1600         default:
1601             ASSERT_NOT_REACHED();
1602             break;
1603         }
1604
1605         // Sync states where needed.
1606         if (m_currentState == GST_STATE_PAUSED) {
1607             if (!m_volumeAndMuteInitialized) {
1608                 notifyPlayerOfVolumeChange();
1609                 notifyPlayerOfMute();
1610                 m_volumeAndMuteInitialized = true;
1611             }
1612
1613             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1614                 GST_DEBUG("[Buffering] Restarting playback.");
1615                 changePipelineState(GST_STATE_PLAYING);
1616             }
1617         } else if (m_currentState == GST_STATE_PLAYING) {
1618             m_paused = false;
1619
1620             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1621                 GST_DEBUG("[Buffering] Pausing stream for buffering.");
1622                 changePipelineState(GST_STATE_PAUSED);
1623             }
1624         } else
1625             m_paused = true;
1626
1627         GST_DEBUG("Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
1628         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
1629             shouldUpdatePlaybackState = true;
1630             GST_INFO("Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
1631         }
1632
1633         // Emit play state change notification only when going to PLAYING so that
1634         // the media element gets a chance to enable its page sleep disabler.
1635         // Emitting this notification in more cases triggers unwanted code paths
1636         // and test timeouts.
1637         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
1638             GST_INFO("Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
1639             shouldUpdatePlaybackState = true;
1640         }
1641
1642         break;
1643     }
1644     case GST_STATE_CHANGE_ASYNC:
1645         GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1646         // Change in progress.
1647         break;
1648     case GST_STATE_CHANGE_FAILURE:
1649         GST_DEBUG("Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1650         // Change failed
1651         return;
1652     case GST_STATE_CHANGE_NO_PREROLL:
1653         GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1654
1655         // Live pipelines go in PAUSED without prerolling.
1656         m_isStreaming = true;
1657         setDownloadBuffering();
1658
1659         if (m_currentState == GST_STATE_READY)
1660             m_readyState = MediaPlayer::HaveNothing;
1661         else if (m_currentState == GST_STATE_PAUSED) {
1662             m_readyState = MediaPlayer::HaveEnoughData;
1663             m_paused = true;
1664         } else if (m_currentState == GST_STATE_PLAYING)
1665             m_paused = false;
1666
1667         if (!m_paused && m_playbackRate)
1668             changePipelineState(GST_STATE_PLAYING);
1669
1670         m_networkState = MediaPlayer::Loading;
1671         break;
1672     default:
1673         GST_DEBUG("Else : %d", getStateResult);
1674         break;
1675     }
1676
1677     m_requestedState = GST_STATE_VOID_PENDING;
1678
1679     if (shouldUpdatePlaybackState)
1680         m_player->playbackStateChanged();
1681
1682     if (m_networkState != oldNetworkState) {
1683         GST_DEBUG("Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
1684         m_player->networkStateChanged();
1685     }
1686     if (m_readyState != oldReadyState) {
1687         GST_DEBUG("Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
1688         m_player->readyStateChanged();
1689     }
1690
1691     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
1692         updatePlaybackRate();
1693         if (m_seekIsPending) {
1694             GST_DEBUG("[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
1695             m_seekIsPending = false;
1696             m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
1697             if (!m_seeking)
1698                 GST_DEBUG("[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
1699         }
1700     }
1701 }
1702
1703 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
1704 {
1705     if (m_mediaLocations)
1706         gst_structure_free(m_mediaLocations);
1707
1708     const GstStructure* structure = gst_message_get_structure(message);
1709     if (structure) {
1710         // This structure can contain:
1711         // - both a new-location string and embedded locations structure
1712         // - or only a new-location string.
1713         m_mediaLocations = gst_structure_copy(structure);
1714         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
1715
1716         if (locations)
1717             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
1718
1719         loadNextLocation();
1720     }
1721 }
1722
1723 bool MediaPlayerPrivateGStreamer::loadNextLocation()
1724 {
1725     if (!m_mediaLocations)
1726         return false;
1727
1728     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
1729     const gchar* newLocation = nullptr;
1730
1731     if (!locations) {
1732         // Fallback on new-location string.
1733         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
1734         if (!newLocation)
1735             return false;
1736     }
1737
1738     if (!newLocation) {
1739         if (m_mediaLocationCurrentIndex < 0) {
1740             m_mediaLocations = nullptr;
1741             return false;
1742         }
1743
1744         const GValue* location = gst_value_list_get_value(locations,
1745                                                           m_mediaLocationCurrentIndex);
1746         const GstStructure* structure = gst_value_get_structure(location);
1747
1748         if (!structure) {
1749             m_mediaLocationCurrentIndex--;
1750             return false;
1751         }
1752
1753         newLocation = gst_structure_get_string(structure, "new-location");
1754     }
1755
1756     if (newLocation) {
1757         // Found a candidate. new-location is not always an absolute url
1758         // though. We need to take the base of the current url and
1759         // append the value of new-location to it.
1760         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
1761         URL newUrl = URL(baseUrl, newLocation);
1762         convertToInternalProtocol(newUrl);
1763
1764         RefPtr<SecurityOrigin> securityOrigin = SecurityOrigin::create(m_url);
1765         if (securityOrigin->canRequest(newUrl)) {
1766             GST_INFO("New media url: %s", newUrl.string().utf8().data());
1767
1768             // Reset player states.
1769             m_networkState = MediaPlayer::Loading;
1770             m_player->networkStateChanged();
1771             m_readyState = MediaPlayer::HaveNothing;
1772             m_player->readyStateChanged();
1773
1774             // Reset pipeline state.
1775             m_resetPipeline = true;
1776             changePipelineState(GST_STATE_READY);
1777
1778             GstState state;
1779             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1780             if (state <= GST_STATE_READY) {
1781                 // Set the new uri and start playing.
1782                 setPlaybinURL(newUrl);
1783                 changePipelineState(GST_STATE_PLAYING);
1784                 return true;
1785             }
1786         } else
1787             GST_INFO("Not allowed to load new media location: %s", newUrl.string().utf8().data());
1788     }
1789     m_mediaLocationCurrentIndex--;
1790     return false;
1791 }
1792
1793 void MediaPlayerPrivateGStreamer::loadStateChanged()
1794 {
1795     updateStates();
1796 }
1797
1798 void MediaPlayerPrivateGStreamer::timeChanged()
1799 {
1800     updateStates();
1801     m_player->timeChanged();
1802 }
1803
1804 void MediaPlayerPrivateGStreamer::didEnd()
1805 {
1806     // Synchronize position and duration values to not confuse the
1807     // HTMLMediaElement. In some cases like reverse playback the
1808     // position is not always reported as 0 for instance.
1809     MediaTime now = currentMediaTime();
1810     if (now > MediaTime { } && now <= durationMediaTime())
1811         m_player->durationChanged();
1812
1813     m_isEndReached = true;
1814     timeChanged();
1815
1816     if (!m_player->client().mediaPlayerIsLooping()) {
1817         m_paused = true;
1818         m_durationAtEOS = durationMediaTime();
1819         changePipelineState(GST_STATE_READY);
1820         m_downloadFinished = false;
1821     }
1822 }
1823
1824 void MediaPlayerPrivateGStreamer::durationChanged()
1825 {
1826     MediaTime previousDuration = durationMediaTime();
1827
1828     // FIXME: Check if this method is still useful, because it's not doing its work at all
1829     // since bug #159458 removed a cacheDuration() call here.
1830
1831     // Avoid emiting durationchanged in the case where the previous
1832     // duration was 0 because that case is already handled by the
1833     // HTMLMediaElement.
1834     if (previousDuration && durationMediaTime() != previousDuration)
1835         m_player->durationChanged();
1836 }
1837
1838 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
1839 {
1840     GST_WARNING("Loading failed, error: %d", error);
1841
1842     m_errorOccured = true;
1843     if (m_networkState != error) {
1844         m_networkState = error;
1845         m_player->networkStateChanged();
1846     }
1847     if (m_readyState != MediaPlayer::HaveNothing) {
1848         m_readyState = MediaPlayer::HaveNothing;
1849         m_player->readyStateChanged();
1850     }
1851
1852     // Loading failed, remove ready timer.
1853     m_readyTimerHandler.stop();
1854 }
1855
1856 static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeSet()
1857 {
1858     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> mimeTypes = []()
1859     {
1860         MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements();
1861         HashSet<String, ASCIICaseInsensitiveHash> set;
1862
1863         GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
1864         GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
1865         GList* demuxerFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DEMUXER, GST_RANK_MARGINAL);
1866
1867         enum ElementType {
1868             AudioDecoder = 0,
1869             VideoDecoder,
1870             Demuxer
1871         };
1872         struct GstCapsWebKitMapping {
1873             ElementType elementType;
1874             const char* capsString;
1875             Vector<AtomicString> webkitMimeTypes;
1876         };
1877
1878         Vector<GstCapsWebKitMapping> mapping = {
1879             {AudioDecoder, "audio/midi", {"audio/midi", "audio/riff-midi"}},
1880             {AudioDecoder, "audio/x-sbc", { }},
1881             {AudioDecoder, "audio/x-sid", { }},
1882             {AudioDecoder, "audio/x-flac", {"audio/x-flac", "audio/flac"}},
1883             {AudioDecoder, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
1884             {AudioDecoder, "audio/x-wavpack", {"audio/x-wavpack"}},
1885             {AudioDecoder, "audio/x-speex", {"audio/speex", "audio/x-speex"}},
1886             {AudioDecoder, "audio/x-ac3", { }},
1887             {AudioDecoder, "audio/x-eac3", {"audio/x-ac3"}},
1888             {AudioDecoder, "audio/x-dts", { }},
1889             {VideoDecoder, "video/x-h264, profile=(string)high", {"video/mp4", "video/x-m4v"}},
1890             {VideoDecoder, "video/x-msvideocodec", {"video/x-msvideo"}},
1891             {VideoDecoder, "video/x-h263", { }},
1892             {VideoDecoder, "video/mpegts", { }},
1893             {VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", {"video/mpeg"}},
1894             {VideoDecoder, "video/x-dirac", { }},
1895             {VideoDecoder, "video/x-flash-video", {"video/flv", "video/x-flv"}},
1896             {Demuxer, "video/quicktime", { }},
1897             {Demuxer, "video/quicktime, variant=(string)3gpp", {"video/3gpp"}},
1898             {Demuxer, "application/x-3gp", { }},
1899             {Demuxer, "video/x-ms-asf", { }},
1900             {Demuxer, "audio/x-aiff", { }},
1901             {Demuxer, "application/x-pn-realaudio", { }},
1902             {Demuxer, "application/vnd.rn-realmedia", { }},
1903             {Demuxer, "audio/x-wav", {"audio/x-wav", "audio/wav", "audio/vnd.wave"}},
1904             {Demuxer, "application/x-hls", {"application/vnd.apple.mpegurl", "application/x-mpegurl"}}
1905         };
1906
1907         for (auto& current : mapping) {
1908             GList* factories = demuxerFactories;
1909             if (current.elementType == AudioDecoder)
1910                 factories = audioDecoderFactories;
1911             else if (current.elementType == VideoDecoder)
1912                 factories = videoDecoderFactories;
1913
1914             if (gstRegistryHasElementForMediaType(factories, current.capsString)) {
1915                 if (!current.webkitMimeTypes.isEmpty()) {
1916                     for (const auto& mimeType : current.webkitMimeTypes)
1917                         set.add(mimeType);
1918                 } else
1919                     set.add(AtomicString(current.capsString));
1920             }
1921         }
1922
1923         bool opusSupported = false;
1924         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-opus")) {
1925             opusSupported = true;
1926             set.add(AtomicString("audio/opus"));
1927         }
1928
1929         bool vorbisSupported = false;
1930         if (gstRegistryHasElementForMediaType(demuxerFactories, "application/ogg")) {
1931             set.add(AtomicString("application/ogg"));
1932
1933             vorbisSupported = gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-vorbis");
1934             if (vorbisSupported) {
1935                 set.add(AtomicString("audio/ogg"));
1936                 set.add(AtomicString("audio/x-vorbis+ogg"));
1937             }
1938
1939             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-theora"))
1940                 set.add(AtomicString("video/ogg"));
1941         }
1942
1943         bool audioMpegSupported = false;
1944         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
1945             audioMpegSupported = true;
1946             set.add(AtomicString("audio/mp1"));
1947             set.add(AtomicString("audio/mp3"));
1948             set.add(AtomicString("audio/x-mp3"));
1949         }
1950
1951         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
1952             audioMpegSupported = true;
1953             set.add(AtomicString("audio/aac"));
1954             set.add(AtomicString("audio/mp2"));
1955             set.add(AtomicString("audio/mp4"));
1956             set.add(AtomicString("audio/x-m4a"));
1957         }
1958
1959         if (audioMpegSupported) {
1960             set.add(AtomicString("audio/mpeg"));
1961             set.add(AtomicString("audio/x-mpeg"));
1962         }
1963
1964         if (gstRegistryHasElementForMediaType(demuxerFactories, "video/x-matroska")) {
1965             set.add(AtomicString("video/x-matroska"));
1966
1967             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp8")
1968                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp9")
1969                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp10"))
1970                 set.add(AtomicString("video/webm"));
1971
1972             if (vorbisSupported || opusSupported)
1973                 set.add(AtomicString("audio/webm"));
1974         }
1975
1976         gst_plugin_feature_list_free(audioDecoderFactories);
1977         gst_plugin_feature_list_free(videoDecoderFactories);
1978         gst_plugin_feature_list_free(demuxerFactories);
1979         return set;
1980     }();
1981     return mimeTypes;
1982 }
1983
1984 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
1985 {
1986     types = mimeTypeSet();
1987 }
1988
1989 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
1990 {
1991     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
1992 #if ENABLE(MEDIA_SOURCE)
1993     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
1994     if (parameters.isMediaSource)
1995         return result;
1996 #endif
1997
1998     if (parameters.isMediaStream)
1999         return result;
2000
2001     if (parameters.type.isEmpty())
2002         return result;
2003
2004     // spec says we should not return "probably" if the codecs string is empty
2005     if (mimeTypeSet().contains(parameters.type.containerType()))
2006         result = parameters.type.codecs().isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
2007
2008     return extendedSupportsType(parameters, result);
2009 }
2010
2011 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
2012 {
2013     if (!m_pipeline)
2014         return;
2015
2016     unsigned flags;
2017     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2018
2019     unsigned flagDownload = getGstPlayFlag("download");
2020
2021     // We don't want to stop downloading if we already started it.
2022     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline)
2023         return;
2024
2025     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
2026     if (shouldDownload) {
2027         GST_INFO("Enabling on-disk buffering");
2028         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2029         m_fillTimer.startRepeating(200_ms);
2030     } else {
2031         GST_INFO("Disabling on-disk buffering");
2032         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2033         m_fillTimer.stop();
2034     }
2035 }
2036
2037 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
2038 {
2039     if (preload == MediaPlayer::Auto && isLiveStream())
2040         return;
2041
2042     m_preload = preload;
2043     setDownloadBuffering();
2044
2045     if (m_delayingLoad && m_preload != MediaPlayer::None) {
2046         m_delayingLoad = false;
2047         commitLoad();
2048     }
2049 }
2050
2051 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
2052 {
2053     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
2054     if (!m_autoAudioSink) {
2055         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
2056         return nullptr;
2057     }
2058
2059     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
2060
2061     GstElement* audioSinkBin;
2062
2063     if (webkitGstCheckVersion(1, 4, 2)) {
2064 #if ENABLE(WEB_AUDIO)
2065         audioSinkBin = gst_bin_new("audio-sink");
2066         ensureAudioSourceProvider();
2067         m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2068         return audioSinkBin;
2069 #else
2070         return m_autoAudioSink.get();
2071 #endif
2072     }
2073
2074     // Construct audio sink only if pitch preserving is enabled.
2075     // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
2076     if (m_preservesPitch) {
2077         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2078         if (!scale) {
2079             GST_WARNING("Failed to create scaletempo");
2080             return m_autoAudioSink.get();
2081         }
2082
2083         audioSinkBin = gst_bin_new("audio-sink");
2084         gst_bin_add(GST_BIN(audioSinkBin), scale);
2085         GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
2086         gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
2087
2088 #if ENABLE(WEB_AUDIO)
2089         ensureAudioSourceProvider();
2090         m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
2091 #else
2092         GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
2093         GstElement* resample = gst_element_factory_make("audioresample", nullptr);
2094
2095         gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
2096
2097         if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
2098             GST_WARNING("Failed to link audio sink elements");
2099             gst_object_unref(audioSinkBin);
2100             return m_autoAudioSink.get();
2101         }
2102 #endif
2103         return audioSinkBin;
2104     }
2105
2106 #if ENABLE(WEB_AUDIO)
2107     audioSinkBin = gst_bin_new("audio-sink");
2108     ensureAudioSourceProvider();
2109     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2110     return audioSinkBin;
2111 #endif
2112     ASSERT_NOT_REACHED();
2113     return nullptr;
2114 }
2115
2116 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2117 {
2118     GstElement* sink;
2119     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2120     return sink;
2121 }
2122
2123 #if ENABLE(WEB_AUDIO)
2124 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2125 {
2126     if (!m_audioSourceProvider)
2127         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2128 }
2129
2130 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2131 {
2132     ensureAudioSourceProvider();
2133     return m_audioSourceProvider.get();
2134 }
2135 #endif
2136
2137 void MediaPlayerPrivateGStreamer::createGSTPlayBin()
2138 {
2139     ASSERT(!m_pipeline);
2140
2141     // gst_element_factory_make() returns a floating reference so
2142     // we should not adopt.
2143     setPipeline(gst_element_factory_make("playbin", "play"));
2144     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2145
2146     // Let also other listeners subscribe to (application) messages in this bus.
2147     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2148     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2149     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2150
2151     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2152
2153     g_signal_connect_swapped(m_pipeline.get(), "notify::source", G_CALLBACK(sourceChangedCallback), this);
2154     g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2155     g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2156 #if ENABLE(VIDEO_TRACK)
2157     g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2158
2159     GstElement* textCombiner = webkitTextCombinerNew();
2160     ASSERT(textCombiner);
2161     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2162
2163     m_textAppSink = webkitTextSinkNew();
2164     ASSERT(m_textAppSink);
2165
2166     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2167     ASSERT(m_textAppSinkPad);
2168
2169     GRefPtr<GstCaps> textCaps;
2170     if (webkitGstCheckVersion(1, 13, 0))
2171         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
2172     else
2173         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
2174     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
2175     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2176
2177     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2178 #endif
2179
2180     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2181
2182     configurePlaySink();
2183
2184     // On 1.4.2 and newer we use the audio-filter property instead.
2185     // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
2186     // the reason for using >= 1.4.2 instead of >= 1.4.0.
2187     if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
2188         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2189
2190         if (!scale)
2191             GST_WARNING("Failed to create scaletempo");
2192         else
2193             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2194     }
2195
2196     if (!m_renderingCanBeAccelerated) {
2197         // If not using accelerated compositing, let GStreamer handle
2198         // the image-orientation tag.
2199         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2200         if (videoFlip) {
2201             g_object_set(videoFlip, "method", 8, nullptr);
2202             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2203         } else
2204             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
2205     }
2206
2207     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2208     if (videoSinkPad)
2209         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2210 }
2211
2212 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2213 {
2214     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2215     gst_element_post_message(m_pipeline.get(), message);
2216 }
2217
2218 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2219 {
2220     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2221         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2222     return false;
2223 }
2224
2225 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2226 {
2227     if (isLiveStream())
2228         return false;
2229
2230     if (m_url.isLocalFile())
2231         return true;
2232
2233     if (m_url.protocolIsInHTTPFamily())
2234         return true;
2235
2236     return false;
2237 }
2238
2239 }
2240
2241 #endif // USE(GSTREAMER)