Unreviewed, rolling out r241559 and r241566.
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "GStreamerCommon.h"
32 #include "GStreamerRegistryScanner.h"
33 #include "HTTPHeaderNames.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <glib.h>
42 #include <gst/gst.h>
43 #include <gst/pbutils/missing-plugins.h>
44 #include <limits>
45 #include <wtf/FileSystem.h>
46 #include <wtf/HexNumber.h>
47 #include <wtf/MediaTime.h>
48 #include <wtf/NeverDestroyed.h>
49 #include <wtf/StringPrintStream.h>
50 #include <wtf/URL.h>
51 #include <wtf/WallTime.h>
52 #include <wtf/glib/GUniquePtr.h>
53 #include <wtf/glib/RunLoopSourcePriority.h>
54 #include <wtf/text/CString.h>
55
56 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
57 #include "GStreamerMediaStreamSource.h"
58 #endif
59
60 #if ENABLE(VIDEO_TRACK)
61 #include "AudioTrackPrivateGStreamer.h"
62 #include "InbandMetadataTextTrackPrivateGStreamer.h"
63 #include "InbandTextTrackPrivateGStreamer.h"
64 #include "TextCombinerGStreamer.h"
65 #include "TextSinkGStreamer.h"
66 #include "VideoTrackPrivateGStreamer.h"
67 #endif
68
69 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
70 #define GST_USE_UNSTABLE_API
71 #include <gst/mpegts/mpegts.h>
72 #undef GST_USE_UNSTABLE_API
73 #endif
74 #include <gst/audio/streamvolume.h>
75
76 #if ENABLE(MEDIA_SOURCE)
77 #include "MediaSource.h"
78 #include "WebKitMediaSourceGStreamer.h"
79 #endif
80
81 #if ENABLE(WEB_AUDIO)
82 #include "AudioSourceProviderGStreamer.h"
83 #endif
84
85 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
86 #define GST_CAT_DEFAULT webkit_media_player_debug
87
88
89 namespace WebCore {
90 using namespace std;
91
92 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
93 {
94     player->handleMessage(message);
95 }
96
97 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
98 {
99     player->setAudioStreamProperties(object);
100 }
101
102 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
103 {
104     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
105         return;
106
107     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
108     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
109     g_object_set(object, "stream-properties", structure, nullptr);
110     gst_structure_free(structure);
111     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
112     GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get());
113 }
114
115 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
116 {
117     MediaPlayerPrivateGStreamerBase::initializeDebugCategory();
118     if (isAvailable()) {
119         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
120             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
121     }
122 }
123
124 bool MediaPlayerPrivateGStreamer::isAvailable()
125 {
126     if (!initializeGStreamerAndRegisterWebKitElements())
127         return false;
128
129     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
130     return factory;
131 }
132
133 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
134     : MediaPlayerPrivateGStreamerBase(player)
135     , m_buffering(false)
136     , m_bufferingPercentage(0)
137     , m_cachedPosition(MediaTime::invalidTime())
138     , m_canFallBackToLastFinishedSeekPosition(false)
139     , m_changingRate(false)
140     , m_downloadFinished(false)
141     , m_errorOccured(false)
142     , m_isEndReached(false)
143     , m_isStreaming(false)
144     , m_durationAtEOS(MediaTime::invalidTime())
145     , m_paused(true)
146     , m_playbackRate(1)
147     , m_requestedState(GST_STATE_VOID_PENDING)
148     , m_resetPipeline(false)
149     , m_seeking(false)
150     , m_seekIsPending(false)
151     , m_seekTime(MediaTime::invalidTime())
152     , m_source(nullptr)
153     , m_volumeAndMuteInitialized(false)
154     , m_mediaLocations(nullptr)
155     , m_mediaLocationCurrentIndex(0)
156     , m_playbackRatePause(false)
157     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
158     , m_lastPlaybackRate(1)
159     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
160     , m_maxTimeLoaded(MediaTime::zeroTime())
161     , m_preload(player->preload())
162     , m_delayingLoad(false)
163     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
164     , m_hasVideo(false)
165     , m_hasAudio(false)
166     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
167     , m_totalBytes(0)
168     , m_preservesPitch(false)
169 {
170 #if USE(GLIB)
171     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
172 #endif
173 }
174
175 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
176 {
177     GST_DEBUG_OBJECT(pipeline(), "Disposing player");
178
179 #if ENABLE(VIDEO_TRACK)
180     for (auto& track : m_audioTracks.values())
181         track->disconnect();
182
183     for (auto& track : m_textTracks.values())
184         track->disconnect();
185
186     for (auto& track : m_videoTracks.values())
187         track->disconnect();
188 #endif
189     if (m_fillTimer.isActive())
190         m_fillTimer.stop();
191
192     if (m_mediaLocations) {
193         gst_structure_free(m_mediaLocations);
194         m_mediaLocations = nullptr;
195     }
196
197     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
198         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
199
200     if (m_autoAudioSink) {
201         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
202             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
203     }
204
205     m_readyTimerHandler.stop();
206     for (auto& missingPluginCallback : m_missingPluginCallbacks) {
207         if (missingPluginCallback)
208             missingPluginCallback->invalidate();
209     }
210     m_missingPluginCallbacks.clear();
211
212     if (m_videoSink) {
213         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
214         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
215     }
216
217     if (m_pipeline) {
218         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
219         ASSERT(bus);
220         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
221         gst_bus_remove_signal_watch(bus.get());
222         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
223         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
224     }
225 }
226
227 static void convertToInternalProtocol(URL& url)
228 {
229     if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
230         url.setProtocol("webkit+" + url.protocol());
231 }
232
233 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
234 {
235     // Clean out everything after file:// url path.
236     String cleanURLString(url.string());
237     if (url.isLocalFile())
238         cleanURLString = cleanURLString.substring(0, url.pathEnd());
239
240     m_url = URL(URL(), cleanURLString);
241     convertToInternalProtocol(m_url);
242
243     GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data());
244     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
245 }
246
247 void MediaPlayerPrivateGStreamer::load(const String& urlString)
248 {
249     loadFull(urlString, nullptr, String());
250 }
251
252 static void setSyncOnClock(GstElement *element, bool sync)
253 {
254     if (!GST_IS_BIN(element)) {
255         g_object_set(element, "sync", sync, NULL);
256         return;
257     }
258
259     GstIterator* it = gst_bin_iterate_sinks(GST_BIN(element));
260     while (gst_iterator_foreach(it, (GstIteratorForeachFunction)([](const GValue* item, void* syncPtr) {
261         bool* sync = static_cast<bool*>(syncPtr);
262         setSyncOnClock(GST_ELEMENT(g_value_get_object(item)), *sync);
263     }), &sync) == GST_ITERATOR_RESYNC)
264         gst_iterator_resync(it);
265     gst_iterator_free(it);
266 }
267
268 void MediaPlayerPrivateGStreamer::syncOnClock(bool sync)
269 {
270     setSyncOnClock(videoSink(), sync);
271     setSyncOnClock(audioSink(), sync);
272 }
273
274 void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const gchar* playbinName,
275     const String& pipelineName)
276 {
277     // FIXME: This method is still called even if supportsType() returned
278     // IsNotSupported. This would deserve more investigation but meanwhile make
279     // sure we don't ever try to play animated gif assets.
280     if (m_player->contentMIMEType() == "image/gif") {
281         loadingFailed(MediaPlayer::FormatError);
282         return;
283     }
284
285     URL url(URL(), urlString);
286     if (url.protocolIsAbout())
287         return;
288
289     if (!m_pipeline)
290         createGSTPlayBin(isMediaSource() ? "playbin" : playbinName, pipelineName);
291     syncOnClock(true);
292     if (m_fillTimer.isActive())
293         m_fillTimer.stop();
294
295     ASSERT(m_pipeline);
296
297     setPlaybinURL(url);
298
299     GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data());
300     if (m_preload == MediaPlayer::None) {
301         GST_INFO_OBJECT(pipeline(), "Delaying load.");
302         m_delayingLoad = true;
303     }
304
305     // Reset network and ready states. Those will be set properly once
306     // the pipeline pre-rolled.
307     m_networkState = MediaPlayer::Loading;
308     m_player->networkStateChanged();
309     m_readyState = MediaPlayer::HaveNothing;
310     m_player->readyStateChanged();
311     m_volumeAndMuteInitialized = false;
312     m_durationAtEOS = MediaTime::invalidTime();
313     m_hasTaintedOrigin = WTF::nullopt;
314
315     if (!m_delayingLoad)
316         commitLoad();
317 }
318
319 #if ENABLE(MEDIA_SOURCE)
320 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
321 {
322     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
323     m_networkState = MediaPlayer::FormatError;
324     m_player->networkStateChanged();
325 }
326 #endif
327
328 #if ENABLE(MEDIA_STREAM)
329 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream)
330 {
331 #if GST_CHECK_VERSION(1, 10, 0)
332     m_streamPrivate = &stream;
333     auto pipelineName = String::format("mediastream_%s_%p",
334         (stream.hasCaptureVideoSource() || stream.hasCaptureAudioSource()) ? "Local" : "Remote", this);
335
336     loadFull(String("mediastream://") + stream.id(), "playbin3", pipelineName);
337     syncOnClock(false);
338
339 #if USE(GSTREAMER_GL)
340     ensureGLVideoSinkContext();
341 #endif
342     m_player->play();
343 #else
344     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
345     m_networkState = MediaPlayer::FormatError;
346     m_player->networkStateChanged();
347     notImplemented();
348 #endif
349 }
350 #endif
351
352 void MediaPlayerPrivateGStreamer::commitLoad()
353 {
354     ASSERT(!m_delayingLoad);
355     GST_DEBUG_OBJECT(pipeline(), "Committing load.");
356
357     // GStreamer needs to have the pipeline set to a paused state to
358     // start providing anything useful.
359     changePipelineState(GST_STATE_PAUSED);
360
361     setDownloadBuffering();
362     updateStates();
363 }
364
365 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
366 {
367     if (m_isEndReached && m_seeking)
368         return m_seekTime;
369
370     // This constant should remain lower than HTMLMediaElement's maxTimeupdateEventFrequency.
371     static const Seconds positionCacheThreshold = 200_ms;
372     Seconds now = WTF::WallTime::now().secondsSinceEpoch();
373     if (m_lastQueryTime && (now - m_lastQueryTime.value()) < positionCacheThreshold && m_cachedPosition.isValid())
374         return m_cachedPosition;
375
376     m_lastQueryTime = now;
377
378     // Position is only available if no async state change is going on and the state is either paused or playing.
379     gint64 position = GST_CLOCK_TIME_NONE;
380     GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
381     if (gst_element_query(m_pipeline.get(), query))
382         gst_query_parse_position(query, 0, &position);
383     gst_query_unref(query);
384
385     GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
386
387     MediaTime playbackPosition = MediaTime::zeroTime();
388     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
389     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
390         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
391     else if (m_canFallBackToLastFinishedSeekPosition)
392         playbackPosition = m_seekTime;
393
394     m_cachedPosition = playbackPosition;
395     return playbackPosition;
396 }
397
398 void MediaPlayerPrivateGStreamer::readyTimerFired()
399 {
400     GST_DEBUG_OBJECT(pipeline(), "In READY for too long. Releasing pipeline resources.");
401     changePipelineState(GST_STATE_NULL);
402 }
403
404 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
405 {
406     ASSERT(m_pipeline);
407
408     GstState currentState;
409     GstState pending;
410
411     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
412     if (currentState == newState || pending == newState) {
413         GST_DEBUG_OBJECT(pipeline(), "Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
414             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
415         return true;
416     }
417
418     GST_DEBUG_OBJECT(pipeline(), "Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
419         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
420
421 #if USE(GSTREAMER_GL)
422     if (currentState == GST_STATE_READY && newState == GST_STATE_PAUSED)
423         ensureGLVideoSinkContext();
424 #endif
425
426     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
427     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
428     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
429         return false;
430
431     // Create a timer when entering the READY state so that we can free resources
432     // if we stay for too long on READY.
433     // Also lets remove the timer if we request a state change for any state other than READY.
434     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
435     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
436         // Max interval in seconds to stay in the READY state on manual
437         // state change requests.
438         static const Seconds readyStateTimerDelay { 1_min };
439         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
440     } else if (newState != GST_STATE_READY)
441         m_readyTimerHandler.stop();
442
443     return true;
444 }
445
446 void MediaPlayerPrivateGStreamer::prepareToPlay()
447 {
448     GST_DEBUG_OBJECT(pipeline(), "Prepare to play");
449     m_preload = MediaPlayer::Auto;
450     if (m_delayingLoad) {
451         m_delayingLoad = false;
452         commitLoad();
453     }
454 }
455
456 void MediaPlayerPrivateGStreamer::play()
457 {
458     if (!m_playbackRate) {
459         m_playbackRatePause = true;
460         return;
461     }
462
463     if (changePipelineState(GST_STATE_PLAYING)) {
464         m_isEndReached = false;
465         m_delayingLoad = false;
466         m_preload = MediaPlayer::Auto;
467         setDownloadBuffering();
468         GST_INFO_OBJECT(pipeline(), "Play");
469     } else
470         loadingFailed(MediaPlayer::Empty);
471 }
472
473 void MediaPlayerPrivateGStreamer::pause()
474 {
475     m_playbackRatePause = false;
476     GstState currentState, pendingState;
477     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
478     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
479         return;
480
481     if (changePipelineState(GST_STATE_PAUSED))
482         GST_INFO_OBJECT(pipeline(), "Pause");
483     else
484         loadingFailed(MediaPlayer::Empty);
485 }
486
487 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
488 {
489     if (!m_pipeline || m_errorOccured)
490         return MediaTime::invalidTime();
491
492     if (m_durationAtEOS.isValid())
493         return m_durationAtEOS;
494
495     // The duration query would fail on a not-prerolled pipeline.
496     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
497         return MediaTime::positiveInfiniteTime();
498
499     gint64 timeLength = 0;
500
501     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &timeLength) || !GST_CLOCK_TIME_IS_VALID(timeLength)) {
502         GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
503         return MediaTime::positiveInfiniteTime();
504     }
505
506     GST_LOG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
507
508     return MediaTime(timeLength, GST_SECOND);
509     // FIXME: handle 3.14.9.5 properly
510 }
511
512 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
513 {
514     if (!m_pipeline || m_errorOccured)
515         return MediaTime::invalidTime();
516
517     if (m_seeking)
518         return m_seekTime;
519
520     return playbackPosition();
521 }
522
523 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
524 {
525     if (!m_pipeline)
526         return;
527
528     if (m_errorOccured)
529         return;
530
531     GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
532
533     // Avoid useless seeking.
534     if (mediaTime == currentMediaTime())
535         return;
536
537     MediaTime time = std::min(mediaTime, durationMediaTime());
538
539     if (isLiveStream())
540         return;
541
542     GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data());
543
544     if (m_seeking) {
545         m_timeOfOverlappingSeek = time;
546         if (m_seekIsPending) {
547             m_seekTime = time;
548             return;
549         }
550     }
551
552     GstState state;
553     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
554     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
555         GST_DEBUG_OBJECT(pipeline(), "[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
556         return;
557     }
558     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
559         m_seekIsPending = true;
560         if (m_isEndReached) {
561             GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline");
562             m_resetPipeline = true;
563             if (!changePipelineState(GST_STATE_PAUSED))
564                 loadingFailed(MediaPlayer::Empty);
565         }
566     } else {
567         // We can seek now.
568         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
569             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(time).utf8().data());
570             return;
571         }
572     }
573
574     m_seeking = true;
575     m_seekTime = time;
576     m_isEndReached = false;
577 }
578
579 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
580 {
581     // Default values for rate >= 0.
582     MediaTime startTime = position, endTime = MediaTime::invalidTime();
583
584     // TODO: Should do more than that, need to notify the media source
585     // and probably flush the pipeline at least.
586     if (isMediaSource())
587         return true;
588
589     if (rate < 0) {
590         startTime = MediaTime::zeroTime();
591         // If we are at beginning of media, start from the end to
592         // avoid immediate EOS.
593         if (position < MediaTime::zeroTime())
594             endTime = durationMediaTime();
595         else
596             endTime = position;
597     }
598
599     if (!rate)
600         rate = 1.0;
601
602     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
603         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
604 }
605
606 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
607 {
608     if (!m_changingRate)
609         return;
610
611     GST_INFO_OBJECT(pipeline(), "Set Rate to %f", m_playbackRate);
612
613     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
614     bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
615
616     GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio");
617
618     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
619         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
620         m_lastPlaybackRate = m_playbackRate;
621     } else {
622         m_playbackRate = m_lastPlaybackRate;
623         GST_ERROR("Set rate to %f failed", m_playbackRate);
624     }
625
626     if (m_playbackRatePause) {
627         GstState state;
628         GstState pending;
629
630         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
631         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
632             changePipelineState(GST_STATE_PLAYING);
633         m_playbackRatePause = false;
634     }
635
636     m_changingRate = false;
637     m_player->rateChanged();
638 }
639
640 bool MediaPlayerPrivateGStreamer::paused() const
641 {
642     if (m_isEndReached) {
643         GST_DEBUG_OBJECT(pipeline(), "Ignoring pause at EOS");
644         return true;
645     }
646
647     if (m_playbackRatePause) {
648         GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state");
649         return false;
650     }
651
652     GstState state;
653     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
654     bool paused = state <= GST_STATE_PAUSED;
655     GST_DEBUG_OBJECT(pipeline(), "Paused: %s", toString(paused).utf8().data());
656     return paused;
657 }
658
659 bool MediaPlayerPrivateGStreamer::seeking() const
660 {
661     return m_seeking;
662 }
663
664 #if GST_CHECK_VERSION(1, 10, 0)
665 #define CLEAR_TRACKS(tracks, method) \
666     for (auto& track : tracks.values())\
667         method(*track);\
668     tracks.clear();
669
670 void MediaPlayerPrivateGStreamer::clearTracks()
671 {
672 #if ENABLE(VIDEO_TRACK)
673     CLEAR_TRACKS(m_audioTracks, m_player->removeAudioTrack);
674     CLEAR_TRACKS(m_videoTracks, m_player->removeVideoTrack);
675     CLEAR_TRACKS(m_textTracks, m_player->removeTextTrack);
676 #endif // ENABLE(VIDEO_TRACK)
677 }
678 #undef CLEAR_TRACKS
679
680 #if ENABLE(VIDEO_TRACK)
681 #define CREATE_TRACK(type, Type) \
682     m_has##Type = true; \
683     if (!useMediaSource) {\
684         RefPtr<Type##TrackPrivateGStreamer> track = Type##TrackPrivateGStreamer::create(makeWeakPtr(*this), i, stream); \
685         m_##type##Tracks.add(track->id(), track); \
686         m_player->add##Type##Track(*track);\
687         if (gst_stream_get_stream_flags(stream.get()) & GST_STREAM_FLAG_SELECT) {                                    \
688             m_current##Type##StreamId = String(gst_stream_get_stream_id(stream.get()));                              \
689         }                                                                                                            \
690     }
691
692 FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
693 {
694 #if ENABLE(MEDIA_STREAM)
695     if (!m_isLegacyPlaybin && !m_currentVideoStreamId.isEmpty()) {
696         RefPtr<VideoTrackPrivateGStreamer> videoTrack = m_videoTracks.get(m_currentVideoStreamId);
697
698         if (videoTrack) {
699             auto tags = adoptGRef(gst_stream_get_tags(videoTrack->stream()));
700             gint width, height;
701
702             if (tags && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_WIDTH, &width) && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_HEIGHT, &height))
703                 return FloatSize(width, height);
704         }
705     }
706 #endif // ENABLE(MEDIA_STREAM)
707
708     return MediaPlayerPrivateGStreamerBase::naturalSize();
709 }
710 #else
711 #define CREATE_TRACK(type, _id, tracks, method, stream) m_has##Type## = true;
712 #endif // ENABLE(VIDEO_TRACK)
713
714 void MediaPlayerPrivateGStreamer::updateTracks()
715 {
716     ASSERT(!m_isLegacyPlaybin);
717
718     bool useMediaSource = isMediaSource();
719     unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
720
721     bool oldHasAudio = m_hasAudio;
722     bool oldHasVideo = m_hasVideo;
723     // New stream collections override previous ones.
724     clearTracks();
725     unsigned textTrackIndex = 0;
726     for (unsigned i = 0; i < length; i++) {
727         GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
728         String streamId(gst_stream_get_stream_id(stream.get()));
729         GstStreamType type = gst_stream_get_stream_type(stream.get());
730
731         GST_DEBUG_OBJECT(pipeline(), "Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
732         if (type & GST_STREAM_TYPE_AUDIO) {
733             CREATE_TRACK(audio, Audio)
734         } else if (type & GST_STREAM_TYPE_VIDEO) {
735             CREATE_TRACK(video, Video)
736         } else if (type & GST_STREAM_TYPE_TEXT && !useMediaSource) {
737 #if ENABLE(VIDEO_TRACK)
738             auto track = InbandTextTrackPrivateGStreamer::create(textTrackIndex++, stream);
739             m_textTracks.add(streamId, track.copyRef());
740             m_player->addTextTrack(track.get());
741 #endif
742         } else
743             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
744     }
745
746     if ((oldHasVideo != m_hasVideo) || (oldHasAudio != m_hasAudio))
747         m_player->characteristicChanged();
748
749     if (m_hasVideo)
750         m_player->sizeChanged();
751
752     m_player->client().mediaPlayerEngineUpdated(m_player);
753 }
754 #endif // GST_CHECK_VERSION(1, 10, 0)
755
756 void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
757 {
758     // FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531.
759     if (isMediaSource()) {
760         GST_FIXME_OBJECT(m_pipeline.get(), "Audio/Video/Text track switching is not yet supported by the MSE backend.");
761         return;
762     }
763
764     const char* propertyName;
765     const char* trackTypeAsString;
766     Vector<String> selectedStreams;
767     String selectedStreamId;
768
769 #if GST_CHECK_VERSION(1, 10, 0)
770     GstStream* stream = nullptr;
771
772     if (!m_isLegacyPlaybin) {
773         stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
774         if (!stream) {
775             GST_WARNING_OBJECT(pipeline(), "No stream to select at index %u", index);
776             return;
777         }
778         selectedStreamId = String::fromUTF8(gst_stream_get_stream_id(stream));
779         selectedStreams.append(selectedStreamId);
780     }
781 #endif // GST_CHECK_VERSION(1,0,0)
782
783     switch (trackType) {
784     case TrackPrivateBaseGStreamer::TrackType::Audio:
785         propertyName = "current-audio";
786         trackTypeAsString = "audio";
787         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentAudioStreamId) {
788             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
789             return;
790         }
791
792         if (!m_currentTextStreamId.isEmpty())
793             selectedStreams.append(m_currentTextStreamId);
794         if (!m_currentVideoStreamId.isEmpty())
795             selectedStreams.append(m_currentVideoStreamId);
796         break;
797     case TrackPrivateBaseGStreamer::TrackType::Video:
798         propertyName = "current-video";
799         trackTypeAsString = "video";
800         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentVideoStreamId) {
801             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
802             return;
803         }
804
805         if (!m_currentAudioStreamId.isEmpty())
806             selectedStreams.append(m_currentAudioStreamId);
807         if (!m_currentTextStreamId.isEmpty())
808             selectedStreams.append(m_currentTextStreamId);
809         break;
810     case TrackPrivateBaseGStreamer::TrackType::Text:
811         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentTextStreamId) {
812             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
813             return;
814         }
815
816         propertyName = "current-text";
817         trackTypeAsString = "text";
818         if (!m_currentAudioStreamId.isEmpty())
819             selectedStreams.append(m_currentAudioStreamId);
820         if (!m_currentVideoStreamId.isEmpty())
821             selectedStreams.append(m_currentVideoStreamId);
822         break;
823     case TrackPrivateBaseGStreamer::TrackType::Unknown:
824     default:
825         ASSERT_NOT_REACHED();
826     }
827
828     GST_INFO_OBJECT(pipeline(), "Enabling %s track with index: %u", trackTypeAsString, index);
829     if (m_isLegacyPlaybin)
830         g_object_set(m_pipeline.get(), propertyName, index, nullptr);
831 #if GST_CHECK_VERSION(1, 10, 0)
832     else {
833         GList* selectedStreamsList = nullptr;
834
835         for (const auto& streamId : selectedStreams)
836             selectedStreamsList = g_list_append(selectedStreamsList, g_strdup(streamId.utf8().data()));
837
838         // TODO: MSE GstStream API support: https://bugs.webkit.org/show_bug.cgi?id=182531
839         gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreamsList));
840         g_list_free_full(selectedStreamsList, reinterpret_cast<GDestroyNotify>(g_free));
841     }
842 #endif
843 }
844
845 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
846 {
847     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] {
848         player->notifyPlayerOfVideo();
849     });
850 }
851
852 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
853 {
854     if (UNLIKELY(!m_pipeline || !m_source))
855         return;
856
857     ASSERT(m_isLegacyPlaybin || isMediaSource());
858
859     gint numTracks = 0;
860     bool useMediaSource = isMediaSource();
861     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
862     g_object_get(element, "n-video", &numTracks, nullptr);
863
864     GST_INFO_OBJECT(pipeline(), "Media has %d video tracks", numTracks);
865
866     bool oldHasVideo = m_hasVideo;
867     m_hasVideo = numTracks > 0;
868     if (oldHasVideo != m_hasVideo)
869         m_player->characteristicChanged();
870
871     if (m_hasVideo)
872         m_player->sizeChanged();
873
874     if (useMediaSource) {
875         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
876         m_player->client().mediaPlayerEngineUpdated(m_player);
877         return;
878     }
879
880 #if ENABLE(VIDEO_TRACK)
881     Vector<String> validVideoStreams;
882     for (gint i = 0; i < numTracks; ++i) {
883         GRefPtr<GstPad> pad;
884         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
885         ASSERT(pad);
886
887         String streamId = "V" + String::number(i);
888         validVideoStreams.append(streamId);
889         if (i < static_cast<gint>(m_videoTracks.size())) {
890             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
891             if (existingTrack) {
892                 existingTrack->setIndex(i);
893                 if (existingTrack->pad() == pad)
894                     continue;
895             }
896         }
897
898         auto track = VideoTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
899         ASSERT(streamId == track->id());
900         m_videoTracks.add(streamId, track.copyRef());
901         m_player->addVideoTrack(track.get());
902     }
903
904     purgeInvalidVideoTracks(validVideoStreams);
905 #endif
906
907     m_player->client().mediaPlayerEngineUpdated(m_player);
908 }
909
910 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
911 {
912     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
913         player->notifyPlayerOfVideoCaps();
914     });
915 }
916
917 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
918 {
919     m_videoSize = IntSize();
920     m_player->client().mediaPlayerEngineUpdated(m_player);
921 }
922
923 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
924 {
925     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] {
926         player->notifyPlayerOfAudio();
927     });
928 }
929
930 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
931 {
932     if (UNLIKELY(!m_pipeline || !m_source))
933         return;
934
935     ASSERT(m_isLegacyPlaybin || isMediaSource());
936
937     gint numTracks = 0;
938     bool useMediaSource = isMediaSource();
939     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
940     g_object_get(element, "n-audio", &numTracks, nullptr);
941
942     GST_INFO_OBJECT(pipeline(), "Media has %d audio tracks", numTracks);
943     bool oldHasAudio = m_hasAudio;
944     m_hasAudio = numTracks > 0;
945     if (oldHasAudio != m_hasAudio)
946         m_player->characteristicChanged();
947
948     if (useMediaSource) {
949         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
950         m_player->client().mediaPlayerEngineUpdated(m_player);
951         return;
952     }
953
954 #if ENABLE(VIDEO_TRACK)
955     Vector<String> validAudioStreams;
956     for (gint i = 0; i < numTracks; ++i) {
957         GRefPtr<GstPad> pad;
958         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
959         ASSERT(pad);
960
961         String streamId = "A" + String::number(i);
962         validAudioStreams.append(streamId);
963         if (i < static_cast<gint>(m_audioTracks.size())) {
964             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
965             if (existingTrack) {
966                 existingTrack->setIndex(i);
967                 if (existingTrack->pad() == pad)
968                     continue;
969             }
970         }
971
972         auto track = AudioTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
973         ASSERT(streamId == track->id());
974         m_audioTracks.add(streamId, track);
975         m_player->addAudioTrack(*track);
976     }
977
978     purgeInvalidAudioTracks(validAudioStreams);
979 #endif
980
981     m_player->client().mediaPlayerEngineUpdated(m_player);
982 }
983
984 #if ENABLE(VIDEO_TRACK)
985 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
986 {
987     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] {
988         player->notifyPlayerOfText();
989     });
990 }
991
992 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
993 {
994     if (UNLIKELY(!m_pipeline || !m_source))
995         return;
996
997     ASSERT(m_isLegacyPlaybin || isMediaSource());
998
999     gint numTracks = 0;
1000     bool useMediaSource = isMediaSource();
1001     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1002     g_object_get(element, "n-text", &numTracks, nullptr);
1003
1004     GST_INFO_OBJECT(pipeline(), "Media has %d text tracks", numTracks);
1005
1006     if (useMediaSource) {
1007         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1008         return;
1009     }
1010
1011     Vector<String> validTextStreams;
1012     for (gint i = 0; i < numTracks; ++i) {
1013         GRefPtr<GstPad> pad;
1014         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
1015         ASSERT(pad);
1016
1017         // We can't assume the pad has a sticky event here like implemented in
1018         // InbandTextTrackPrivateGStreamer because it might be emitted after the
1019         // track was created. So fallback to a dummy stream ID like in the Audio
1020         // and Video tracks.
1021         String streamId = "T" + String::number(i);
1022
1023         validTextStreams.append(streamId);
1024         if (i < static_cast<gint>(m_textTracks.size())) {
1025             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
1026             if (existingTrack) {
1027                 existingTrack->setIndex(i);
1028                 if (existingTrack->pad() == pad)
1029                     continue;
1030             }
1031         }
1032
1033         auto track = InbandTextTrackPrivateGStreamer::create(i, pad);
1034         m_textTracks.add(streamId, track.copyRef());
1035         m_player->addTextTrack(track.get());
1036     }
1037
1038     purgeInvalidTextTracks(validTextStreams);
1039 }
1040
1041 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
1042 {
1043     player->newTextSample();
1044     return GST_FLOW_OK;
1045 }
1046
1047 void MediaPlayerPrivateGStreamer::newTextSample()
1048 {
1049     if (!m_textAppSink)
1050         return;
1051
1052     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
1053         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
1054
1055     GRefPtr<GstSample> sample;
1056     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
1057     ASSERT(sample);
1058
1059     if (streamStartEvent) {
1060         bool found = FALSE;
1061         const gchar* id;
1062         gst_event_parse_stream_start(streamStartEvent.get(), &id);
1063         for (auto& track : m_textTracks.values()) {
1064             if (!strcmp(track->streamId().utf8().data(), id)) {
1065                 track->handleSample(sample);
1066                 found = true;
1067                 break;
1068             }
1069         }
1070         if (!found)
1071             GST_WARNING("Got sample with unknown stream ID %s.", id);
1072     } else
1073         GST_WARNING("Unable to handle sample with no stream start event.");
1074 }
1075 #endif
1076
1077 void MediaPlayerPrivateGStreamer::setRate(float rate)
1078 {
1079     // Higher rate causes crash.
1080     rate = clampTo(rate, -20.0, 20.0);
1081
1082     // Avoid useless playback rate update.
1083     if (m_playbackRate == rate) {
1084         // and make sure that upper layers were notified if rate was set
1085
1086         if (!m_changingRate && m_player->rate() != m_playbackRate)
1087             m_player->rateChanged();
1088         return;
1089     }
1090
1091     if (isLiveStream()) {
1092         // notify upper layers that we cannot handle passed rate.
1093         m_changingRate = false;
1094         m_player->rateChanged();
1095         return;
1096     }
1097
1098     GstState state;
1099     GstState pending;
1100
1101     m_playbackRate = rate;
1102     m_changingRate = true;
1103
1104     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
1105
1106     if (!rate) {
1107         m_changingRate = false;
1108         m_playbackRatePause = true;
1109         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
1110             changePipelineState(GST_STATE_PAUSED);
1111         return;
1112     }
1113
1114     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
1115         || (pending == GST_STATE_PAUSED))
1116         return;
1117
1118     updatePlaybackRate();
1119 }
1120
1121 double MediaPlayerPrivateGStreamer::rate() const
1122 {
1123     return m_playbackRate;
1124 }
1125
1126 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
1127 {
1128     m_preservesPitch = preservesPitch;
1129 }
1130
1131 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
1132 {
1133     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1134     if (m_errorOccured || isLiveStream())
1135         return timeRanges;
1136
1137     MediaTime mediaDuration = durationMediaTime();
1138     if (!mediaDuration || mediaDuration.isPositiveInfinite())
1139         return timeRanges;
1140
1141     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1142
1143     if (!gst_element_query(m_pipeline.get(), query)) {
1144         gst_query_unref(query);
1145         return timeRanges;
1146     }
1147
1148     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
1149     for (guint index = 0; index < numBufferingRanges; index++) {
1150         gint64 rangeStart = 0, rangeStop = 0;
1151         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop)) {
1152             uint64_t startTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStart, GST_FORMAT_PERCENT_MAX);
1153             uint64_t stopTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStop, GST_FORMAT_PERCENT_MAX);
1154             timeRanges->add(MediaTime(startTime, GST_SECOND), MediaTime(stopTime, GST_SECOND));
1155         }
1156     }
1157
1158     // Fallback to the more general maxTimeLoaded() if no range has
1159     // been found.
1160     if (!timeRanges->length()) {
1161         MediaTime loaded = maxTimeLoaded();
1162         if (loaded.isValid() && loaded)
1163             timeRanges->add(MediaTime::zeroTime(), loaded);
1164     }
1165
1166     gst_query_unref(query);
1167
1168     return timeRanges;
1169 }
1170
1171 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
1172 {
1173     GUniqueOutPtr<GError> err;
1174     GUniqueOutPtr<gchar> debug;
1175     MediaPlayer::NetworkState error;
1176     bool issueError = true;
1177     bool attemptNextLocation = false;
1178     const GstStructure* structure = gst_message_get_structure(message);
1179     GstState requestedState, currentState;
1180
1181     m_canFallBackToLastFinishedSeekPosition = false;
1182
1183     if (structure) {
1184         const gchar* messageTypeName = gst_structure_get_name(structure);
1185
1186         // Redirect messages are sent from elements, like qtdemux, to
1187         // notify of the new location(s) of the media.
1188         if (!g_strcmp0(messageTypeName, "redirect")) {
1189             mediaLocationChanged(message);
1190             return;
1191         }
1192     }
1193
1194     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
1195     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
1196
1197     GST_LOG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
1198     switch (GST_MESSAGE_TYPE(message)) {
1199     case GST_MESSAGE_ERROR:
1200         if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured)
1201             break;
1202         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
1203         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
1204
1205         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
1206
1207         error = MediaPlayer::Empty;
1208         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
1209             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
1210             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
1211             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
1212             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
1213             error = MediaPlayer::FormatError;
1214         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
1215             // Let the mediaPlayerClient handle the stream error, in
1216             // this case the HTMLMediaElement will emit a stalled
1217             // event.
1218             GST_ERROR("Decode error, let the Media element emit a stalled event.");
1219             m_loadingStalled = true;
1220             break;
1221         } else if (err->domain == GST_STREAM_ERROR) {
1222             error = MediaPlayer::DecodeError;
1223             attemptNextLocation = true;
1224         } else if (err->domain == GST_RESOURCE_ERROR)
1225             error = MediaPlayer::NetworkError;
1226
1227         if (attemptNextLocation)
1228             issueError = !loadNextLocation();
1229         if (issueError) {
1230             m_errorOccured = true;
1231             if (m_networkState != error) {
1232                 m_networkState = error;
1233                 m_player->networkStateChanged();
1234             }
1235         }
1236         break;
1237     case GST_MESSAGE_EOS:
1238         didEnd();
1239         break;
1240     case GST_MESSAGE_ASYNC_DONE:
1241         if (!messageSourceIsPlaybin || m_delayingLoad)
1242             break;
1243         asyncStateChangeDone();
1244         break;
1245     case GST_MESSAGE_STATE_CHANGED: {
1246         if (!messageSourceIsPlaybin || m_delayingLoad)
1247             break;
1248         updateStates();
1249
1250         // Construct a filename for the graphviz dot file output.
1251         GstState newState;
1252         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
1253         CString dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), '.',
1254             gst_element_state_get_name(currentState), '_', gst_element_state_get_name(newState)).utf8();
1255         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
1256
1257         break;
1258     }
1259     case GST_MESSAGE_BUFFERING:
1260         processBufferingStats(message);
1261         break;
1262     case GST_MESSAGE_DURATION_CHANGED:
1263         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
1264         if (messageSourceIsPlaybin && !isMediaSource())
1265             durationChanged();
1266         break;
1267     case GST_MESSAGE_REQUEST_STATE:
1268         gst_message_parse_request_state(message, &requestedState);
1269         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1270         if (requestedState < currentState) {
1271             GST_INFO_OBJECT(pipeline(), "Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
1272                 gst_element_state_get_name(requestedState));
1273             m_requestedState = requestedState;
1274             if (!changePipelineState(requestedState))
1275                 loadingFailed(MediaPlayer::Empty);
1276         }
1277         break;
1278     case GST_MESSAGE_CLOCK_LOST:
1279         // This can only happen in PLAYING state and we should just
1280         // get a new clock by moving back to PAUSED and then to
1281         // PLAYING again.
1282         // This can happen if the stream that ends in a sink that
1283         // provides the current clock disappears, for example if
1284         // the audio sink provides the clock and the audio stream
1285         // is disabled. It also happens relatively often with
1286         // HTTP adaptive streams when switching between different
1287         // variants of a stream.
1288         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1289         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1290         break;
1291     case GST_MESSAGE_LATENCY:
1292         // Recalculate the latency, we don't need any special handling
1293         // here other than the GStreamer default.
1294         // This can happen if the latency of live elements changes, or
1295         // for one reason or another a new live element is added or
1296         // removed from the pipeline.
1297         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1298         break;
1299     case GST_MESSAGE_ELEMENT:
1300         if (gst_is_missing_plugin_message(message)) {
1301             if (gst_install_plugins_supported()) {
1302                 auto missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = makeWeakPtr(*this)](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
1303                     if (!weakThis) {
1304                         GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
1305                         return;
1306                     }
1307
1308                     GST_DEBUG("got missing plugin installation callback with result %u", result);
1309                     RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
1310                     weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
1311                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1312                         return;
1313
1314                     weakThis->changePipelineState(GST_STATE_READY);
1315                     weakThis->changePipelineState(GST_STATE_PAUSED);
1316                 });
1317                 m_missingPluginCallbacks.append(missingPluginCallback.copyRef());
1318                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1319                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1320                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), missingPluginCallback.get());
1321             }
1322         }
1323 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1324         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
1325             processMpegTsSection(section);
1326             gst_mpegts_section_unref(section);
1327         }
1328 #endif
1329 #if ENABLE(ENCRYPTED_MEDIA)
1330         else if (gst_structure_has_name(structure, "drm-waiting-for-key")) {
1331             GST_DEBUG_OBJECT(pipeline(), "drm-waiting-for-key message from %s", GST_MESSAGE_SRC_NAME(message));
1332             setWaitingForKey(true);
1333             // FIXME: The decryptors should be able to attempt to decrypt after being created and linked in a pipeline but currently they are not and current
1334             // architecture does not make this very easy. Fortunately, the arch will change soon and it does not pay off to fix this now with something that could be
1335             // more convoluted. In the meantime, force attempt to decrypt when they get blocked.
1336             attemptToDecryptWithLocalInstance();
1337         } else if (gst_structure_has_name(structure, "drm-key-received")) {
1338             GST_DEBUG_OBJECT(pipeline(), "drm-key-received message from %s", GST_MESSAGE_SRC_NAME(message));
1339             setWaitingForKey(false);
1340         }
1341 #endif
1342         else if (gst_structure_has_name(structure, "http-headers")) {
1343             GstStructure* responseHeaders;
1344             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders, nullptr)) {
1345                 if (!gst_structure_has_field(responseHeaders, httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data())) {
1346                     GST_INFO_OBJECT(pipeline(), "Live stream detected. Disabling on-disk buffering");
1347                     m_isStreaming = true;
1348                     setDownloadBuffering();
1349                 }
1350                 gst_structure_free(responseHeaders);
1351             }
1352         } else if (gst_structure_has_name(structure, "adaptive-streaming-statistics")) {
1353             if (WEBKIT_IS_WEB_SRC(m_source.get()))
1354                 if (const char* uri = gst_structure_get_string(structure, "uri"))
1355                     m_hasTaintedOrigin = webKitSrcWouldTaintOrigin(WEBKIT_WEB_SRC(m_source.get()), SecurityOrigin::create(URL(URL(), uri)));
1356         } else
1357             GST_DEBUG_OBJECT(pipeline(), "Unhandled element message: %" GST_PTR_FORMAT, structure);
1358         break;
1359 #if ENABLE(VIDEO_TRACK)
1360     case GST_MESSAGE_TOC:
1361         processTableOfContents(message);
1362         break;
1363 #endif
1364     case GST_MESSAGE_TAG: {
1365         GstTagList* tags = nullptr;
1366         GUniqueOutPtr<gchar> tag;
1367         gst_message_parse_tag(message, &tags);
1368         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1369             if (!g_strcmp0(tag.get(), "rotate-90"))
1370                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1371             else if (!g_strcmp0(tag.get(), "rotate-180"))
1372                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1373             else if (!g_strcmp0(tag.get(), "rotate-270"))
1374                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1375         }
1376         gst_tag_list_unref(tags);
1377         break;
1378     }
1379 #if GST_CHECK_VERSION(1, 10, 0)
1380     case GST_MESSAGE_STREAMS_SELECTED: {
1381         GRefPtr<GstStreamCollection> collection;
1382         gst_message_parse_streams_selected(message, &collection.outPtr());
1383
1384         if (!collection)
1385             break;
1386
1387         m_streamCollection.swap(collection);
1388         m_currentAudioStreamId = "";
1389         m_currentVideoStreamId = "";
1390         m_currentTextStreamId = "";
1391
1392         unsigned length = gst_message_streams_selected_get_size(message);
1393         for (unsigned i = 0; i < length; i++) {
1394             GRefPtr<GstStream> stream = gst_message_streams_selected_get_stream(message, i);
1395             if (!stream)
1396                 continue;
1397
1398             GstStreamType type = gst_stream_get_stream_type(stream.get());
1399             String streamId(gst_stream_get_stream_id(stream.get()));
1400
1401             GST_DEBUG_OBJECT(pipeline(), "Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
1402             // Playbin3 can send more than one selected stream of the same type
1403             // but there's no priority or ordering system in place, so we assume
1404             // the selected stream is the last one as reported by playbin3.
1405             if (type & GST_STREAM_TYPE_AUDIO) {
1406                 m_currentAudioStreamId = streamId;
1407                 auto track = m_audioTracks.get(m_currentAudioStreamId);
1408                 ASSERT(track);
1409                 track->markAsActive();
1410             } else if (type & GST_STREAM_TYPE_VIDEO) {
1411                 m_currentVideoStreamId = streamId;
1412                 auto track = m_videoTracks.get(m_currentVideoStreamId);
1413                 ASSERT(track);
1414                 track->markAsActive();
1415             } else if (type & GST_STREAM_TYPE_TEXT)
1416                 m_currentTextStreamId = streamId;
1417             else
1418                 GST_WARNING("Unknown stream type with stream-id %s", streamId.utf8().data());
1419         }
1420         break;
1421     }
1422 #endif
1423     default:
1424         GST_DEBUG_OBJECT(pipeline(), "Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
1425         break;
1426     }
1427 }
1428
1429 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1430 {
1431     m_buffering = true;
1432     gst_message_parse_buffering(message, &m_bufferingPercentage);
1433
1434     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1435
1436     if (m_bufferingPercentage == 100)
1437         updateStates();
1438 }
1439
1440 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1441 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1442 {
1443     ASSERT(section);
1444
1445     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1446         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1447         m_metadataTracks.clear();
1448         for (guint i = 0; i < pmt->streams->len; ++i) {
1449             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1450             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1451                 AtomicString pid = String::number(stream->pid);
1452                 auto track = InbandMetadataTextTrackPrivateGStreamer::create(
1453                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1454
1455                 // 4.7.10.12.2 Sourcing in-band text tracks
1456                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1457                 // type as follows, based on the type of the media resource:
1458                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1459                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1460                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1461                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1462                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1463                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1464                 // expressed in hexadecimal using uppercase ASCII hex digits.
1465                 String inbandMetadataTrackDispatchType;
1466                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1467                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1468                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1469                     for (guint k = 0; k < descriptor->length; ++k)
1470                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1471                 }
1472                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1473
1474                 m_metadataTracks.add(pid, track);
1475                 m_player->addTextTrack(*track);
1476             }
1477         }
1478     } else {
1479         AtomicString pid = String::number(section->pid);
1480         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1481         if (!track)
1482             return;
1483
1484         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1485         gsize size;
1486         const void* bytes = g_bytes_get_data(data.get(), &size);
1487
1488         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1489     }
1490 }
1491 #endif
1492
1493 #if ENABLE(VIDEO_TRACK)
1494 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1495 {
1496     if (m_chaptersTrack)
1497         m_player->removeTextTrack(*m_chaptersTrack);
1498
1499     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1500     m_player->addTextTrack(*m_chaptersTrack);
1501
1502     GRefPtr<GstToc> toc;
1503     gboolean updated;
1504     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1505     ASSERT(toc);
1506
1507     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1508         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1509 }
1510
1511 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1512 {
1513     ASSERT(entry);
1514
1515     auto cue = GenericCueData::create();
1516
1517     gint64 start = -1, stop = -1;
1518     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1519     if (start != -1)
1520         cue->setStartTime(MediaTime(start, GST_SECOND));
1521     if (stop != -1)
1522         cue->setEndTime(MediaTime(stop, GST_SECOND));
1523
1524     GstTagList* tags = gst_toc_entry_get_tags(entry);
1525     if (tags) {
1526         gchar* title = nullptr;
1527         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1528         if (title) {
1529             cue->setContent(title);
1530             g_free(title);
1531         }
1532     }
1533
1534     m_chaptersTrack->addGenericCue(cue);
1535
1536     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1537         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1538 }
1539
1540 void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
1541 {
1542     m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
1543         return !validTrackIds.contains(keyAndValue.key);
1544     });
1545 }
1546
1547 void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
1548 {
1549     m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
1550         return !validTrackIds.contains(keyAndValue.key);
1551     });
1552 }
1553
1554 void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
1555 {
1556     m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
1557         return !validTrackIds.contains(keyAndValue.key);
1558     });
1559 }
1560 #endif
1561
1562 static gint findHLSQueue(gconstpointer a, gconstpointer)
1563 {
1564     GValue* item = static_cast<GValue*>(const_cast<gpointer>(a));
1565     GstElement* element = GST_ELEMENT(g_value_get_object(item));
1566     if (g_str_has_prefix(GST_ELEMENT_NAME(element), "queue")) {
1567         GstElement* parent = GST_ELEMENT(GST_ELEMENT_PARENT(element));
1568         if (!GST_IS_OBJECT(parent))
1569             return 1;
1570
1571         if (g_str_has_prefix(GST_ELEMENT_NAME(GST_ELEMENT_PARENT(parent)), "hlsdemux"))
1572             return 0;
1573     }
1574
1575     return 1;
1576 }
1577
1578 static bool isHLSProgressing(GstElement* playbin, GstQuery* query)
1579 {
1580     GValue item = { };
1581     GstIterator* binIterator = gst_bin_iterate_recurse(GST_BIN(playbin));
1582     bool foundHLSQueue = gst_iterator_find_custom(binIterator, reinterpret_cast<GCompareFunc>(findHLSQueue), &item, nullptr);
1583     gst_iterator_free(binIterator);
1584
1585     if (!foundHLSQueue)
1586         return false;
1587
1588     GstElement* queueElement = GST_ELEMENT(g_value_get_object(&item));
1589     bool queryResult = gst_element_query(queueElement, query);
1590     g_value_unset(&item);
1591
1592     return queryResult;
1593 }
1594
1595 void MediaPlayerPrivateGStreamer::fillTimerFired()
1596 {
1597     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1598
1599     if (G_UNLIKELY(!gst_element_query(m_pipeline.get(), query))) {
1600         // This query always fails for live pipelines. In the case of HLS, try and find
1601         // the queue inside the HLS element to get a proxy measure of progress. Note
1602         // that the percentage value is rather meaningless as used below.
1603         // This is a hack, see https://bugs.webkit.org/show_bug.cgi?id=141469.
1604         if (!isHLSProgressing(m_pipeline.get(), query)) {
1605             gst_query_unref(query);
1606             return;
1607         }
1608     }
1609
1610     gint64 start, stop;
1611     gdouble fillStatus = 100.0;
1612
1613     gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
1614     gst_query_unref(query);
1615
1616     if (stop != -1)
1617         fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1618
1619     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Download buffer filled up to %f%%", fillStatus);
1620
1621     MediaTime mediaDuration = durationMediaTime();
1622
1623     // Update maxTimeLoaded only if the media duration is
1624     // available. Otherwise we can't compute it.
1625     if (mediaDuration) {
1626         if (fillStatus == 100.0)
1627             m_maxTimeLoaded = mediaDuration;
1628         else
1629             m_maxTimeLoaded = MediaTime(fillStatus * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
1630         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
1631     }
1632
1633     m_downloadFinished = fillStatus == 100.0;
1634     if (!m_downloadFinished) {
1635         updateStates();
1636         return;
1637     }
1638
1639     // Media is now fully loaded. It will play even if network
1640     // connection is cut. Buffering is done, remove the fill source
1641     // from the main loop.
1642     m_fillTimer.stop();
1643     updateStates();
1644 }
1645
1646 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
1647 {
1648     if (m_errorOccured)
1649         return MediaTime::zeroTime();
1650
1651     MediaTime duration = durationMediaTime();
1652     GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
1653     // infinite duration means live stream
1654     if (duration.isPositiveInfinite())
1655         return MediaTime::zeroTime();
1656
1657     return duration;
1658 }
1659
1660 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1661 {
1662     if (m_errorOccured)
1663         return MediaTime::zeroTime();
1664
1665     MediaTime loaded = m_maxTimeLoaded;
1666     if (m_isEndReached)
1667         loaded = durationMediaTime();
1668     GST_LOG("maxTimeLoaded: %s", toString(loaded).utf8().data());
1669     return loaded;
1670 }
1671
1672 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1673 {
1674     if (m_errorOccured || m_loadingStalled)
1675         return false;
1676
1677     if (isLiveStream())
1678         return true;
1679
1680     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1681         return false;
1682
1683     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1684     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1685     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1686     GST_LOG("didLoadingProgress: %s", toString(didLoadingProgress).utf8().data());
1687     return didLoadingProgress;
1688 }
1689
1690 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1691 {
1692     if (m_errorOccured)
1693         return 0;
1694
1695     if (m_totalBytes)
1696         return m_totalBytes;
1697
1698     if (!m_source)
1699         return 0;
1700
1701     if (isLiveStream())
1702         return 0;
1703
1704     GstFormat fmt = GST_FORMAT_BYTES;
1705     gint64 length = 0;
1706     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1707         GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1708         m_totalBytes = static_cast<unsigned long long>(length);
1709         m_isStreaming = !length;
1710         return m_totalBytes;
1711     }
1712
1713     // Fall back to querying the source pads manually.
1714     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1715     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1716     bool done = false;
1717     while (!done) {
1718         GValue item = G_VALUE_INIT;
1719         switch (gst_iterator_next(iter, &item)) {
1720         case GST_ITERATOR_OK: {
1721             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1722             gint64 padLength = 0;
1723             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1724                 length = padLength;
1725             break;
1726         }
1727         case GST_ITERATOR_RESYNC:
1728             gst_iterator_resync(iter);
1729             break;
1730         case GST_ITERATOR_ERROR:
1731             FALLTHROUGH;
1732         case GST_ITERATOR_DONE:
1733             done = true;
1734             break;
1735         }
1736
1737         g_value_unset(&item);
1738     }
1739
1740     gst_iterator_free(iter);
1741
1742     GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1743     m_totalBytes = static_cast<unsigned long long>(length);
1744     m_isStreaming = !length;
1745     return m_totalBytes;
1746 }
1747
1748 void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1749 {
1750     player->sourceSetup(sourceElement);
1751 }
1752
1753 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1754 {
1755     if (g_strcmp0(G_OBJECT_TYPE_NAME(element), "GstDownloadBuffer"))
1756         return;
1757
1758     player->m_downloadBuffer = element;
1759     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1760     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1761
1762     GUniqueOutPtr<char> oldDownloadTemplate;
1763     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1764
1765     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1766     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1767     GST_DEBUG_OBJECT(player->pipeline(), "Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1768
1769     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1770 }
1771
1772 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1773 {
1774     ASSERT(player->m_downloadBuffer);
1775
1776     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1777
1778     GUniqueOutPtr<char> downloadFile;
1779     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1780     player->m_downloadBuffer = nullptr;
1781
1782     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
1783         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1784         return;
1785     }
1786
1787     GST_DEBUG_OBJECT(player->pipeline(), "Unlinked media temporary file %s after creation", downloadFile.get());
1788 }
1789
1790 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1791 {
1792     if (!downloadFileTemplate)
1793         return;
1794
1795     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1796     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1797     String templatePattern = String(templateFile.get()).replace("X", "?");
1798
1799     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
1800         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
1801             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1802             continue;
1803         }
1804
1805         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1806     }
1807 }
1808
1809 void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1810 {
1811     GST_DEBUG_OBJECT(pipeline(), "Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1812
1813     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1814         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1815
1816     m_source = sourceElement;
1817
1818     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1819         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1820         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1821 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
1822     } else if (WEBKIT_IS_MEDIA_STREAM_SRC(sourceElement)) {
1823         auto stream = m_streamPrivate.get();
1824         ASSERT(stream);
1825         webkitMediaStreamSrcSetStream(WEBKIT_MEDIA_STREAM_SRC(sourceElement), stream);
1826 #endif
1827     }
1828 }
1829
1830 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1831 {
1832     if (!m_source)
1833         return false;
1834
1835     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1836         return true;
1837
1838     GUniqueOutPtr<char> originalURI, resolvedURI;
1839     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1840     if (!originalURI || !resolvedURI)
1841         return false;
1842     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1843         return true;
1844
1845     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1846     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1847     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1848 }
1849
1850 void MediaPlayerPrivateGStreamer::cancelLoad()
1851 {
1852     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1853         return;
1854
1855     if (m_pipeline)
1856         changePipelineState(GST_STATE_READY);
1857 }
1858
1859 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1860 {
1861     if (!m_pipeline || m_errorOccured)
1862         return;
1863
1864     if (m_seeking) {
1865         if (m_seekIsPending)
1866             updateStates();
1867         else {
1868             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data());
1869             m_seeking = false;
1870             m_cachedPosition = MediaTime::invalidTime();
1871             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
1872                 seek(m_timeOfOverlappingSeek);
1873                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
1874                 return;
1875             }
1876             m_timeOfOverlappingSeek = MediaTime::invalidTime();
1877
1878             // The pipeline can still have a pending state. In this case a position query will fail.
1879             // Right now we can use m_seekTime as a fallback.
1880             m_canFallBackToLastFinishedSeekPosition = true;
1881             timeChanged();
1882         }
1883     } else
1884         updateStates();
1885 }
1886
1887 void MediaPlayerPrivateGStreamer::updateStates()
1888 {
1889     if (!m_pipeline)
1890         return;
1891
1892     if (m_errorOccured)
1893         return;
1894
1895     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1896     MediaPlayer::ReadyState oldReadyState = m_readyState;
1897     GstState pending;
1898     GstState state;
1899     bool stateReallyChanged = false;
1900
1901     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1902     if (state != m_currentState) {
1903         m_oldState = m_currentState;
1904         m_currentState = state;
1905         stateReallyChanged = true;
1906     }
1907
1908     bool shouldUpdatePlaybackState = false;
1909     switch (getStateResult) {
1910     case GST_STATE_CHANGE_SUCCESS: {
1911         GST_DEBUG_OBJECT(pipeline(), "State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1912
1913         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1914         // on HTMLMediaElement and properly generate the video 'ended' event.
1915         if (m_isEndReached && m_currentState == GST_STATE_READY)
1916             break;
1917
1918         m_resetPipeline = m_currentState <= GST_STATE_READY;
1919
1920         bool didBuffering = m_buffering;
1921
1922         // Update ready and network states.
1923         switch (m_currentState) {
1924         case GST_STATE_NULL:
1925             m_readyState = MediaPlayer::HaveNothing;
1926             m_networkState = MediaPlayer::Empty;
1927             break;
1928         case GST_STATE_READY:
1929             m_readyState = MediaPlayer::HaveMetadata;
1930             m_networkState = MediaPlayer::Empty;
1931             break;
1932         case GST_STATE_PAUSED:
1933         case GST_STATE_PLAYING:
1934             if (m_buffering) {
1935                 if (m_bufferingPercentage == 100) {
1936                     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Complete.");
1937                     m_buffering = false;
1938                     m_readyState = MediaPlayer::HaveEnoughData;
1939                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1940                 } else {
1941                     m_readyState = MediaPlayer::HaveCurrentData;
1942                     m_networkState = MediaPlayer::Loading;
1943                 }
1944             } else if (m_downloadFinished) {
1945                 m_readyState = MediaPlayer::HaveEnoughData;
1946                 m_networkState = MediaPlayer::Loaded;
1947             } else {
1948                 m_readyState = MediaPlayer::HaveFutureData;
1949                 m_networkState = MediaPlayer::Loading;
1950             }
1951
1952             break;
1953         default:
1954             ASSERT_NOT_REACHED();
1955             break;
1956         }
1957
1958         // Sync states where needed.
1959         if (m_currentState == GST_STATE_PAUSED) {
1960             if (!m_volumeAndMuteInitialized) {
1961                 notifyPlayerOfVolumeChange();
1962                 notifyPlayerOfMute();
1963                 m_volumeAndMuteInitialized = true;
1964             }
1965
1966             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1967                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Restarting playback.");
1968                 changePipelineState(GST_STATE_PLAYING);
1969             }
1970         } else if (m_currentState == GST_STATE_PLAYING) {
1971             m_paused = false;
1972
1973             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1974                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Pausing stream for buffering.");
1975                 changePipelineState(GST_STATE_PAUSED);
1976             }
1977         } else
1978             m_paused = true;
1979
1980         GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
1981         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
1982             shouldUpdatePlaybackState = true;
1983             GST_INFO_OBJECT(pipeline(), "Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
1984         }
1985
1986         // Emit play state change notification only when going to PLAYING so that
1987         // the media element gets a chance to enable its page sleep disabler.
1988         // Emitting this notification in more cases triggers unwanted code paths
1989         // and test timeouts.
1990         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
1991             GST_INFO_OBJECT(pipeline(), "Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
1992             shouldUpdatePlaybackState = true;
1993         }
1994
1995         break;
1996     }
1997     case GST_STATE_CHANGE_ASYNC:
1998         GST_DEBUG_OBJECT(pipeline(), "Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1999         // Change in progress.
2000         break;
2001     case GST_STATE_CHANGE_FAILURE:
2002         GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2003         // Change failed
2004         return;
2005     case GST_STATE_CHANGE_NO_PREROLL:
2006         GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2007
2008         // Live pipelines go in PAUSED without prerolling.
2009         m_isStreaming = true;
2010         setDownloadBuffering();
2011
2012         if (m_currentState == GST_STATE_READY)
2013             m_readyState = MediaPlayer::HaveNothing;
2014         else if (m_currentState == GST_STATE_PAUSED) {
2015             m_readyState = MediaPlayer::HaveEnoughData;
2016             m_paused = true;
2017         } else if (m_currentState == GST_STATE_PLAYING)
2018             m_paused = false;
2019
2020         if (!m_paused && m_playbackRate)
2021             changePipelineState(GST_STATE_PLAYING);
2022
2023         m_networkState = MediaPlayer::Loading;
2024         break;
2025     default:
2026         GST_DEBUG_OBJECT(pipeline(), "Else : %d", getStateResult);
2027         break;
2028     }
2029
2030     m_requestedState = GST_STATE_VOID_PENDING;
2031
2032     if (shouldUpdatePlaybackState)
2033         m_player->playbackStateChanged();
2034
2035     if (m_networkState != oldNetworkState) {
2036         GST_DEBUG_OBJECT(pipeline(), "Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
2037         m_player->networkStateChanged();
2038     }
2039     if (m_readyState != oldReadyState) {
2040         GST_DEBUG_OBJECT(pipeline(), "Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
2041         m_player->readyStateChanged();
2042     }
2043
2044     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
2045         updatePlaybackRate();
2046         if (m_seekIsPending) {
2047             GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
2048             m_seekIsPending = false;
2049             m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
2050             if (!m_seeking) {
2051                 m_cachedPosition = MediaTime::invalidTime();
2052                 GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
2053             }
2054         }
2055     }
2056 }
2057
2058 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
2059 {
2060 #if GST_CHECK_VERSION(1, 10, 0)
2061     if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_STREAM_COLLECTION && !m_isLegacyPlaybin) {
2062         GRefPtr<GstStreamCollection> collection;
2063         gst_message_parse_stream_collection(message, &collection.outPtr());
2064
2065         if (collection) {
2066             m_streamCollection.swap(collection);
2067             m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
2068                 this->updateTracks();
2069             });
2070         }
2071     }
2072 #endif
2073
2074     return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
2075 }
2076
2077 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
2078 {
2079     if (m_mediaLocations)
2080         gst_structure_free(m_mediaLocations);
2081
2082     const GstStructure* structure = gst_message_get_structure(message);
2083     if (structure) {
2084         // This structure can contain:
2085         // - both a new-location string and embedded locations structure
2086         // - or only a new-location string.
2087         m_mediaLocations = gst_structure_copy(structure);
2088         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2089
2090         if (locations)
2091             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
2092
2093         loadNextLocation();
2094     }
2095 }
2096
2097 bool MediaPlayerPrivateGStreamer::loadNextLocation()
2098 {
2099     if (!m_mediaLocations)
2100         return false;
2101
2102     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2103     const gchar* newLocation = nullptr;
2104
2105     if (!locations) {
2106         // Fallback on new-location string.
2107         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
2108         if (!newLocation)
2109             return false;
2110     }
2111
2112     if (!newLocation) {
2113         if (m_mediaLocationCurrentIndex < 0) {
2114             m_mediaLocations = nullptr;
2115             return false;
2116         }
2117
2118         const GValue* location = gst_value_list_get_value(locations, m_mediaLocationCurrentIndex);
2119         const GstStructure* structure = gst_value_get_structure(location);
2120
2121         if (!structure) {
2122             m_mediaLocationCurrentIndex--;
2123             return false;
2124         }
2125
2126         newLocation = gst_structure_get_string(structure, "new-location");
2127     }
2128
2129     if (newLocation) {
2130         // Found a candidate. new-location is not always an absolute url
2131         // though. We need to take the base of the current url and
2132         // append the value of new-location to it.
2133         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2134         URL newUrl = URL(baseUrl, newLocation);
2135         convertToInternalProtocol(newUrl);
2136
2137         auto securityOrigin = SecurityOrigin::create(m_url);
2138         if (securityOrigin->canRequest(newUrl)) {
2139             GST_INFO_OBJECT(pipeline(), "New media url: %s", newUrl.string().utf8().data());
2140
2141             // Reset player states.
2142             m_networkState = MediaPlayer::Loading;
2143             m_player->networkStateChanged();
2144             m_readyState = MediaPlayer::HaveNothing;
2145             m_player->readyStateChanged();
2146
2147             // Reset pipeline state.
2148             m_resetPipeline = true;
2149             changePipelineState(GST_STATE_READY);
2150
2151             GstState state;
2152             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2153             if (state <= GST_STATE_READY) {
2154                 // Set the new uri and start playing.
2155                 setPlaybinURL(newUrl);
2156                 changePipelineState(GST_STATE_PLAYING);
2157                 return true;
2158             }
2159         } else
2160             GST_INFO_OBJECT(pipeline(), "Not allowed to load new media location: %s", newUrl.string().utf8().data());
2161     }
2162     m_mediaLocationCurrentIndex--;
2163     return false;
2164 }
2165
2166 void MediaPlayerPrivateGStreamer::loadStateChanged()
2167 {
2168     updateStates();
2169 }
2170
2171 void MediaPlayerPrivateGStreamer::timeChanged()
2172 {
2173     updateStates();
2174     m_player->timeChanged();
2175 }
2176
2177 void MediaPlayerPrivateGStreamer::didEnd()
2178 {
2179     GST_INFO_OBJECT(pipeline(), "Playback ended");
2180
2181     // Synchronize position and duration values to not confuse the
2182     // HTMLMediaElement. In some cases like reverse playback the
2183     // position is not always reported as 0 for instance.
2184     m_cachedPosition = MediaTime::invalidTime();
2185     MediaTime now = currentMediaTime();
2186     if (now > MediaTime { } && now <= durationMediaTime())
2187         m_player->durationChanged();
2188
2189     m_isEndReached = true;
2190     timeChanged();
2191
2192     if (!m_player->client().mediaPlayerIsLooping()) {
2193         m_paused = true;
2194         m_durationAtEOS = durationMediaTime();
2195         changePipelineState(GST_STATE_READY);
2196         m_downloadFinished = false;
2197     }
2198 }
2199
2200 void MediaPlayerPrivateGStreamer::durationChanged()
2201 {
2202     MediaTime previousDuration = durationMediaTime();
2203
2204     // FIXME: Check if this method is still useful, because it's not doing its work at all
2205     // since bug #159458 removed a cacheDuration() call here.
2206
2207     // Avoid emiting durationchanged in the case where the previous
2208     // duration was 0 because that case is already handled by the
2209     // HTMLMediaElement.
2210     if (previousDuration && durationMediaTime() != previousDuration)
2211         m_player->durationChanged();
2212 }
2213
2214 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
2215 {
2216     GST_WARNING("Loading failed, error: %d", error);
2217
2218     m_errorOccured = true;
2219     if (m_networkState != error) {
2220         m_networkState = error;
2221         m_player->networkStateChanged();
2222     }
2223     if (m_readyState != MediaPlayer::HaveNothing) {
2224         m_readyState = MediaPlayer::HaveNothing;
2225         m_player->readyStateChanged();
2226     }
2227
2228     // Loading failed, remove ready timer.
2229     m_readyTimerHandler.stop();
2230 }
2231
2232 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2233 {
2234     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2235     types = gstRegistryScanner.mimeTypeSet();
2236 }
2237
2238 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2239 {
2240     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
2241 #if ENABLE(MEDIA_SOURCE)
2242     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2243     if (parameters.isMediaSource)
2244         return result;
2245 #endif
2246
2247 #if !ENABLE(MEDIA_STREAM) || !GST_CHECK_VERSION(1, 10, 0)
2248     if (parameters.isMediaStream)
2249         return result;
2250 #endif
2251
2252     if (parameters.type.isEmpty())
2253         return result;
2254
2255     GST_DEBUG("Checking mime-type \"%s\"", parameters.type.raw().utf8().data());
2256     auto containerType = parameters.type.containerType();
2257     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2258     if (gstRegistryScanner.isContainerTypeSupported(containerType)) {
2259         // Spec says we should not return "probably" if the codecs string is empty.
2260         Vector<String> codecs = parameters.type.codecs();
2261         result = codecs.isEmpty() ? MediaPlayer::MayBeSupported : (gstRegistryScanner.areAllCodecsSupported(codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported);
2262     }
2263
2264     auto finalResult = extendedSupportsType(parameters, result);
2265     GST_DEBUG("Supported: %s", convertEnumerationToString(finalResult).utf8().data());
2266     return finalResult;
2267 }
2268
2269 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
2270 {
2271     if (!m_pipeline)
2272         return;
2273
2274     unsigned flags;
2275     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2276
2277     unsigned flagDownload = getGstPlayFlag("download");
2278
2279     // We don't want to stop downloading if we already started it.
2280     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline) {
2281         GST_DEBUG_OBJECT(pipeline(), "Download already started, not starting again");
2282         return;
2283     }
2284
2285     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
2286     if (shouldDownload) {
2287         GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering");
2288         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2289         m_fillTimer.startRepeating(200_ms);
2290     } else {
2291         GST_INFO_OBJECT(pipeline(), "Disabling on-disk buffering");
2292         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2293         m_fillTimer.stop();
2294     }
2295 }
2296
2297 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
2298 {
2299     GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data());
2300     if (preload == MediaPlayer::Auto && isLiveStream())
2301         return;
2302
2303     m_preload = preload;
2304     setDownloadBuffering();
2305
2306     if (m_delayingLoad && m_preload != MediaPlayer::None) {
2307         m_delayingLoad = false;
2308         commitLoad();
2309     }
2310 }
2311
2312 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
2313 {
2314     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
2315     if (!m_autoAudioSink) {
2316         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
2317         return nullptr;
2318     }
2319
2320     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
2321
2322     GstElement* audioSinkBin;
2323
2324     if (webkitGstCheckVersion(1, 4, 2)) {
2325 #if ENABLE(WEB_AUDIO)
2326         audioSinkBin = gst_bin_new("audio-sink");
2327         ensureAudioSourceProvider();
2328         m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2329         return audioSinkBin;
2330 #else
2331         return m_autoAudioSink.get();
2332 #endif
2333     }
2334
2335     // Construct audio sink only if pitch preserving is enabled.
2336     // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
2337     if (m_preservesPitch) {
2338         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2339         if (!scale) {
2340             GST_WARNING("Failed to create scaletempo");
2341             return m_autoAudioSink.get();
2342         }
2343
2344         audioSinkBin = gst_bin_new("audio-sink");
2345         gst_bin_add(GST_BIN(audioSinkBin), scale);
2346         GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
2347         gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
2348
2349 #if ENABLE(WEB_AUDIO)
2350         ensureAudioSourceProvider();
2351         m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
2352 #else
2353         GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
2354         GstElement* resample = gst_element_factory_make("audioresample", nullptr);
2355
2356         gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
2357
2358         if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
2359             GST_WARNING("Failed to link audio sink elements");
2360             gst_object_unref(audioSinkBin);
2361             return m_autoAudioSink.get();
2362         }
2363 #endif
2364         return audioSinkBin;
2365     }
2366
2367 #if ENABLE(WEB_AUDIO)
2368     audioSinkBin = gst_bin_new("audio-sink");
2369     ensureAudioSourceProvider();
2370     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2371     return audioSinkBin;
2372 #endif
2373     ASSERT_NOT_REACHED();
2374     return nullptr;
2375 }
2376
2377 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2378 {
2379     GstElement* sink;
2380     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2381     return sink;
2382 }
2383
2384 #if ENABLE(WEB_AUDIO)
2385 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2386 {
2387     if (!m_audioSourceProvider)
2388         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2389 }
2390
2391 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2392 {
2393     ensureAudioSourceProvider();
2394     return m_audioSourceProvider.get();
2395 }
2396 #endif
2397
2398 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const gchar* playbinName, const String& pipelineName)
2399 {
2400     if (m_pipeline) {
2401         if (!playbinName) {
2402             GST_INFO_OBJECT(pipeline(), "Keeping same playbin as nothing forced");
2403             return;
2404         }
2405
2406         if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2407             GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2408             return;
2409         }
2410
2411         GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.",
2412             playbinName);
2413         changePipelineState(GST_STATE_NULL);
2414         m_pipeline = nullptr;
2415     }
2416
2417     ASSERT(!m_pipeline);
2418
2419 #if GST_CHECK_VERSION(1, 10, 0)
2420     if (g_getenv("USE_PLAYBIN3"))
2421         playbinName = "playbin3";
2422 #else
2423     playbinName = "playbin";
2424 #endif
2425
2426     if (!playbinName)
2427         playbinName = "playbin";
2428
2429     m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
2430
2431     // gst_element_factory_make() returns a floating reference so
2432     // we should not adopt.
2433     setPipeline(gst_element_factory_make(playbinName,
2434         pipelineName.isEmpty() ? String::format("play_%p", this).utf8().data() : pipelineName.utf8().data()));
2435     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2436
2437     GST_INFO_OBJECT(pipeline(), "Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2438
2439     // Let also other listeners subscribe to (application) messages in this bus.
2440     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2441     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2442     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2443
2444     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2445
2446     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2447     if (m_isLegacyPlaybin) {
2448         g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2449         g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2450     }
2451
2452 #if ENABLE(VIDEO_TRACK)
2453     if (m_isLegacyPlaybin)
2454         g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2455
2456     GstElement* textCombiner = webkitTextCombinerNew();
2457     ASSERT(textCombiner);
2458     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2459
2460     m_textAppSink = webkitTextSinkNew();
2461     ASSERT(m_textAppSink);
2462
2463     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2464     ASSERT(m_textAppSinkPad);
2465
2466     GRefPtr<GstCaps> textCaps;
2467     if (webkitGstCheckVersion(1, 13, 0))
2468         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
2469     else
2470         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
2471     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
2472     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2473
2474     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2475 #endif
2476
2477     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2478
2479     configurePlaySink();
2480
2481     // On 1.4.2 and newer we use the audio-filter property instead.
2482     // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
2483     // the reason for using >= 1.4.2 instead of >= 1.4.0.
2484     if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
2485         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2486
2487         if (!scale)
2488             GST_WARNING("Failed to create scaletempo");
2489         else
2490             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2491     }
2492
2493     if (!m_renderingCanBeAccelerated) {
2494         // If not using accelerated compositing, let GStreamer handle
2495         // the image-orientation tag.
2496         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2497         if (videoFlip) {
2498             g_object_set(videoFlip, "method", 8, nullptr);
2499             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2500         } else
2501             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
2502     }
2503
2504     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2505     if (videoSinkPad)
2506         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2507 }
2508
2509 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2510 {
2511     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2512     gst_element_post_message(m_pipeline.get(), message);
2513 }
2514
2515 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2516 {
2517     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2518         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2519     return false;
2520 }
2521
2522 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2523 {
2524     if (isLiveStream())
2525         return false;
2526
2527     if (m_url.isLocalFile())
2528         return true;
2529
2530     if (m_url.protocolIsInHTTPFamily())
2531         return true;
2532
2533     return false;
2534 }
2535
2536 Optional<bool> MediaPlayerPrivateGStreamer::wouldTaintOrigin(const SecurityOrigin&) const
2537 {
2538     // Ideally the given origin should always be verified with
2539     // webKitSrcWouldTaintOrigin() instead of only checking it for
2540     // adaptive-streaming-statistics. We can't do this yet because HLS fragments
2541     // are currently downloaded independently from WebKit.
2542     // See also https://bugs.webkit.org/show_bug.cgi?id=189967.
2543     return m_hasTaintedOrigin;
2544 }
2545
2546
2547 }
2548
2549 #endif // USE(GSTREAMER)