[WTF] Add environment variable helpers
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "GStreamerCommon.h"
32 #include "GStreamerRegistryScanner.h"
33 #include "HTTPHeaderNames.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <glib.h>
42 #include <gst/gst.h>
43 #include <gst/pbutils/missing-plugins.h>
44 #include <limits>
45 #include <wtf/Environment.h>
46 #include <wtf/FileSystem.h>
47 #include <wtf/HexNumber.h>
48 #include <wtf/MediaTime.h>
49 #include <wtf/NeverDestroyed.h>
50 #include <wtf/StringPrintStream.h>
51 #include <wtf/URL.h>
52 #include <wtf/WallTime.h>
53 #include <wtf/glib/GUniquePtr.h>
54 #include <wtf/glib/RunLoopSourcePriority.h>
55 #include <wtf/text/CString.h>
56
57 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
58 #include "GStreamerMediaStreamSource.h"
59 #endif
60
61 #if ENABLE(VIDEO_TRACK)
62 #include "AudioTrackPrivateGStreamer.h"
63 #include "InbandMetadataTextTrackPrivateGStreamer.h"
64 #include "InbandTextTrackPrivateGStreamer.h"
65 #include "TextCombinerGStreamer.h"
66 #include "TextSinkGStreamer.h"
67 #include "VideoTrackPrivateGStreamer.h"
68 #endif
69
70 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
71 #define GST_USE_UNSTABLE_API
72 #include <gst/mpegts/mpegts.h>
73 #undef GST_USE_UNSTABLE_API
74 #endif
75 #include <gst/audio/streamvolume.h>
76
77 #if ENABLE(MEDIA_SOURCE)
78 #include "MediaSource.h"
79 #include "WebKitMediaSourceGStreamer.h"
80 #endif
81
82 #if ENABLE(WEB_AUDIO)
83 #include "AudioSourceProviderGStreamer.h"
84 #endif
85
86 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
87 #define GST_CAT_DEFAULT webkit_media_player_debug
88
89
90 namespace WebCore {
91 using namespace std;
92
93 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
94 {
95     player->handleMessage(message);
96 }
97
98 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
99 {
100     player->setAudioStreamProperties(object);
101 }
102
103 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
104 {
105     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
106         return;
107
108     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
109     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
110     g_object_set(object, "stream-properties", structure, nullptr);
111     gst_structure_free(structure);
112     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
113     GST_DEBUG_OBJECT(pipeline(), "Set media.role as %s at %s", role, elementName.get());
114 }
115
116 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
117 {
118     MediaPlayerPrivateGStreamerBase::initializeDebugCategory();
119     if (isAvailable()) {
120         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
121             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
122     }
123 }
124
125 bool MediaPlayerPrivateGStreamer::isAvailable()
126 {
127     if (!initializeGStreamerAndRegisterWebKitElements())
128         return false;
129
130     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
131     return factory;
132 }
133
134 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
135     : MediaPlayerPrivateGStreamerBase(player)
136     , m_buffering(false)
137     , m_bufferingPercentage(0)
138     , m_cachedPosition(MediaTime::invalidTime())
139     , m_canFallBackToLastFinishedSeekPosition(false)
140     , m_changingRate(false)
141     , m_downloadFinished(false)
142     , m_errorOccured(false)
143     , m_isEndReached(false)
144     , m_isStreaming(false)
145     , m_durationAtEOS(MediaTime::invalidTime())
146     , m_paused(true)
147     , m_playbackRate(1)
148     , m_requestedState(GST_STATE_VOID_PENDING)
149     , m_resetPipeline(false)
150     , m_seeking(false)
151     , m_seekIsPending(false)
152     , m_seekTime(MediaTime::invalidTime())
153     , m_source(nullptr)
154     , m_volumeAndMuteInitialized(false)
155     , m_mediaLocations(nullptr)
156     , m_mediaLocationCurrentIndex(0)
157     , m_playbackRatePause(false)
158     , m_timeOfOverlappingSeek(MediaTime::invalidTime())
159     , m_lastPlaybackRate(1)
160     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
161     , m_maxTimeLoaded(MediaTime::zeroTime())
162     , m_preload(player->preload())
163     , m_delayingLoad(false)
164     , m_maxTimeLoadedAtLastDidLoadingProgress(MediaTime::zeroTime())
165     , m_hasVideo(false)
166     , m_hasAudio(false)
167     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
168     , m_totalBytes(0)
169     , m_preservesPitch(false)
170 {
171 #if USE(GLIB)
172     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
173 #endif
174 }
175
176 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
177 {
178     GST_DEBUG_OBJECT(pipeline(), "Disposing player");
179
180 #if ENABLE(VIDEO_TRACK)
181     for (auto& track : m_audioTracks.values())
182         track->disconnect();
183
184     for (auto& track : m_textTracks.values())
185         track->disconnect();
186
187     for (auto& track : m_videoTracks.values())
188         track->disconnect();
189 #endif
190     if (m_fillTimer.isActive())
191         m_fillTimer.stop();
192
193     if (m_mediaLocations) {
194         gst_structure_free(m_mediaLocations);
195         m_mediaLocations = nullptr;
196     }
197
198     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
199         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
200
201     if (m_autoAudioSink) {
202         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
203             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
204     }
205
206     m_readyTimerHandler.stop();
207     for (auto& missingPluginCallback : m_missingPluginCallbacks) {
208         if (missingPluginCallback)
209             missingPluginCallback->invalidate();
210     }
211     m_missingPluginCallbacks.clear();
212
213     if (m_videoSink) {
214         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
215         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
216     }
217
218     if (m_pipeline) {
219         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
220         ASSERT(bus);
221         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
222         gst_bus_remove_signal_watch(bus.get());
223         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
224         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
225     }
226 }
227
228 static void convertToInternalProtocol(URL& url)
229 {
230     if (url.protocolIsInHTTPFamily() || url.protocolIsBlob())
231         url.setProtocol("webkit+" + url.protocol());
232 }
233
234 void MediaPlayerPrivateGStreamer::setPlaybinURL(const URL& url)
235 {
236     // Clean out everything after file:// url path.
237     String cleanURLString(url.string());
238     if (url.isLocalFile())
239         cleanURLString = cleanURLString.substring(0, url.pathEnd());
240
241     m_url = URL(URL(), cleanURLString);
242     convertToInternalProtocol(m_url);
243
244     GST_INFO_OBJECT(pipeline(), "Load %s", m_url.string().utf8().data());
245     g_object_set(m_pipeline.get(), "uri", m_url.string().utf8().data(), nullptr);
246 }
247
248 void MediaPlayerPrivateGStreamer::load(const String& urlString)
249 {
250     loadFull(urlString, nullptr, String());
251 }
252
253 static void setSyncOnClock(GstElement *element, bool sync)
254 {
255     if (!GST_IS_BIN(element)) {
256         g_object_set(element, "sync", sync, NULL);
257         return;
258     }
259
260     GstIterator* it = gst_bin_iterate_sinks(GST_BIN(element));
261     while (gst_iterator_foreach(it, (GstIteratorForeachFunction)([](const GValue* item, void* syncPtr) {
262         bool* sync = static_cast<bool*>(syncPtr);
263         setSyncOnClock(GST_ELEMENT(g_value_get_object(item)), *sync);
264     }), &sync) == GST_ITERATOR_RESYNC)
265         gst_iterator_resync(it);
266     gst_iterator_free(it);
267 }
268
269 void MediaPlayerPrivateGStreamer::syncOnClock(bool sync)
270 {
271     setSyncOnClock(videoSink(), sync);
272     setSyncOnClock(audioSink(), sync);
273 }
274
275 void MediaPlayerPrivateGStreamer::loadFull(const String& urlString, const gchar* playbinName,
276     const String& pipelineName)
277 {
278     // FIXME: This method is still called even if supportsType() returned
279     // IsNotSupported. This would deserve more investigation but meanwhile make
280     // sure we don't ever try to play animated gif assets.
281     if (m_player->contentMIMEType() == "image/gif") {
282         loadingFailed(MediaPlayer::FormatError);
283         return;
284     }
285
286     URL url(URL(), urlString);
287     if (url.protocolIsAbout())
288         return;
289
290     if (!m_pipeline)
291         createGSTPlayBin(isMediaSource() ? "playbin" : playbinName, pipelineName);
292     syncOnClock(true);
293     if (m_fillTimer.isActive())
294         m_fillTimer.stop();
295
296     ASSERT(m_pipeline);
297
298     setPlaybinURL(url);
299
300     GST_DEBUG_OBJECT(pipeline(), "preload: %s", convertEnumerationToString(m_preload).utf8().data());
301     if (m_preload == MediaPlayer::None) {
302         GST_INFO_OBJECT(pipeline(), "Delaying load.");
303         m_delayingLoad = true;
304     }
305
306     // Reset network and ready states. Those will be set properly once
307     // the pipeline pre-rolled.
308     m_networkState = MediaPlayer::Loading;
309     m_player->networkStateChanged();
310     m_readyState = MediaPlayer::HaveNothing;
311     m_player->readyStateChanged();
312     m_volumeAndMuteInitialized = false;
313     m_durationAtEOS = MediaTime::invalidTime();
314     m_hasTaintedOrigin = WTF::nullopt;
315
316     if (!m_delayingLoad)
317         commitLoad();
318 }
319
320 #if ENABLE(MEDIA_SOURCE)
321 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
322 {
323     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
324     m_networkState = MediaPlayer::FormatError;
325     m_player->networkStateChanged();
326 }
327 #endif
328
329 #if ENABLE(MEDIA_STREAM)
330 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate& stream)
331 {
332 #if GST_CHECK_VERSION(1, 10, 0)
333     m_streamPrivate = &stream;
334     auto pipelineName = String::format("mediastream_%s_%p",
335         (stream.hasCaptureVideoSource() || stream.hasCaptureAudioSource()) ? "Local" : "Remote", this);
336
337     loadFull(String("mediastream://") + stream.id(), "playbin3", pipelineName);
338     syncOnClock(false);
339
340 #if USE(GSTREAMER_GL)
341     ensureGLVideoSinkContext();
342 #endif
343     m_player->play();
344 #else
345     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
346     m_networkState = MediaPlayer::FormatError;
347     m_player->networkStateChanged();
348     notImplemented();
349 #endif
350 }
351 #endif
352
353 void MediaPlayerPrivateGStreamer::commitLoad()
354 {
355     ASSERT(!m_delayingLoad);
356     GST_DEBUG_OBJECT(pipeline(), "Committing load.");
357
358     // GStreamer needs to have the pipeline set to a paused state to
359     // start providing anything useful.
360     changePipelineState(GST_STATE_PAUSED);
361
362     setDownloadBuffering();
363     updateStates();
364 }
365
366 MediaTime MediaPlayerPrivateGStreamer::playbackPosition() const
367 {
368     if (m_isEndReached && m_seeking)
369         return m_seekTime;
370
371     // This constant should remain lower than HTMLMediaElement's maxTimeupdateEventFrequency.
372     static const Seconds positionCacheThreshold = 200_ms;
373     Seconds now = WTF::WallTime::now().secondsSinceEpoch();
374     if (m_lastQueryTime && (now - m_lastQueryTime.value()) < positionCacheThreshold && m_cachedPosition.isValid())
375         return m_cachedPosition;
376
377     m_lastQueryTime = now;
378
379     // Position is only available if no async state change is going on and the state is either paused or playing.
380     gint64 position = GST_CLOCK_TIME_NONE;
381     GstQuery* query = gst_query_new_position(GST_FORMAT_TIME);
382     if (gst_element_query(m_pipeline.get(), query))
383         gst_query_parse_position(query, 0, &position);
384     gst_query_unref(query);
385
386     GST_TRACE_OBJECT(pipeline(), "Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
387
388     MediaTime playbackPosition = MediaTime::zeroTime();
389     GstClockTime gstreamerPosition = static_cast<GstClockTime>(position);
390     if (GST_CLOCK_TIME_IS_VALID(gstreamerPosition))
391         playbackPosition = MediaTime(gstreamerPosition, GST_SECOND);
392     else if (m_canFallBackToLastFinishedSeekPosition)
393         playbackPosition = m_seekTime;
394
395     m_cachedPosition = playbackPosition;
396     return playbackPosition;
397 }
398
399 void MediaPlayerPrivateGStreamer::readyTimerFired()
400 {
401     GST_DEBUG_OBJECT(pipeline(), "In READY for too long. Releasing pipeline resources.");
402     changePipelineState(GST_STATE_NULL);
403 }
404
405 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
406 {
407     ASSERT(m_pipeline);
408
409     GstState currentState;
410     GstState pending;
411
412     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
413     if (currentState == newState || pending == newState) {
414         GST_DEBUG_OBJECT(pipeline(), "Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
415             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
416         return true;
417     }
418
419     GST_DEBUG_OBJECT(pipeline(), "Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
420         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
421
422 #if USE(GSTREAMER_GL)
423     if (currentState == GST_STATE_READY && newState == GST_STATE_PAUSED)
424         ensureGLVideoSinkContext();
425 #endif
426
427     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
428     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
429     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE)
430         return false;
431
432     // Create a timer when entering the READY state so that we can free resources
433     // if we stay for too long on READY.
434     // Also lets remove the timer if we request a state change for any state other than READY.
435     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
436     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
437         // Max interval in seconds to stay in the READY state on manual
438         // state change requests.
439         static const Seconds readyStateTimerDelay { 1_min };
440         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
441     } else if (newState != GST_STATE_READY)
442         m_readyTimerHandler.stop();
443
444     return true;
445 }
446
447 void MediaPlayerPrivateGStreamer::prepareToPlay()
448 {
449     GST_DEBUG_OBJECT(pipeline(), "Prepare to play");
450     m_preload = MediaPlayer::Auto;
451     if (m_delayingLoad) {
452         m_delayingLoad = false;
453         commitLoad();
454     }
455 }
456
457 void MediaPlayerPrivateGStreamer::play()
458 {
459     if (!m_playbackRate) {
460         m_playbackRatePause = true;
461         return;
462     }
463
464     if (changePipelineState(GST_STATE_PLAYING)) {
465         m_isEndReached = false;
466         m_delayingLoad = false;
467         m_preload = MediaPlayer::Auto;
468         setDownloadBuffering();
469         GST_INFO_OBJECT(pipeline(), "Play");
470     } else
471         loadingFailed(MediaPlayer::Empty);
472 }
473
474 void MediaPlayerPrivateGStreamer::pause()
475 {
476     m_playbackRatePause = false;
477     GstState currentState, pendingState;
478     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
479     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
480         return;
481
482     if (changePipelineState(GST_STATE_PAUSED))
483         GST_INFO_OBJECT(pipeline(), "Pause");
484     else
485         loadingFailed(MediaPlayer::Empty);
486 }
487
488 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
489 {
490     if (!m_pipeline || m_errorOccured)
491         return MediaTime::invalidTime();
492
493     if (m_durationAtEOS.isValid())
494         return m_durationAtEOS;
495
496     // The duration query would fail on a not-prerolled pipeline.
497     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
498         return MediaTime::positiveInfiniteTime();
499
500     gint64 timeLength = 0;
501
502     if (!gst_element_query_duration(m_pipeline.get(), GST_FORMAT_TIME, &timeLength) || !GST_CLOCK_TIME_IS_VALID(timeLength)) {
503         GST_DEBUG_OBJECT(pipeline(), "Time duration query failed for %s", m_url.string().utf8().data());
504         return MediaTime::positiveInfiniteTime();
505     }
506
507     GST_LOG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
508
509     return MediaTime(timeLength, GST_SECOND);
510     // FIXME: handle 3.14.9.5 properly
511 }
512
513 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
514 {
515     if (!m_pipeline || m_errorOccured)
516         return MediaTime::invalidTime();
517
518     if (m_seeking)
519         return m_seekTime;
520
521     return playbackPosition();
522 }
523
524 void MediaPlayerPrivateGStreamer::seek(const MediaTime& mediaTime)
525 {
526     if (!m_pipeline)
527         return;
528
529     if (m_errorOccured)
530         return;
531
532     GST_INFO_OBJECT(pipeline(), "[Seek] seek attempt to %s", toString(mediaTime).utf8().data());
533
534     // Avoid useless seeking.
535     if (mediaTime == currentMediaTime())
536         return;
537
538     MediaTime time = std::min(mediaTime, durationMediaTime());
539
540     if (isLiveStream())
541         return;
542
543     GST_INFO_OBJECT(pipeline(), "[Seek] seeking to %s", toString(time).utf8().data());
544
545     if (m_seeking) {
546         m_timeOfOverlappingSeek = time;
547         if (m_seekIsPending) {
548             m_seekTime = time;
549             return;
550         }
551     }
552
553     GstState state;
554     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
555     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
556         GST_DEBUG_OBJECT(pipeline(), "[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
557         return;
558     }
559     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
560         m_seekIsPending = true;
561         if (m_isEndReached) {
562             GST_DEBUG_OBJECT(pipeline(), "[Seek] reset pipeline");
563             m_resetPipeline = true;
564             if (!changePipelineState(GST_STATE_PAUSED))
565                 loadingFailed(MediaPlayer::Empty);
566         }
567     } else {
568         // We can seek now.
569         if (!doSeek(time, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
570             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(time).utf8().data());
571             return;
572         }
573     }
574
575     m_seeking = true;
576     m_seekTime = time;
577     m_isEndReached = false;
578 }
579
580 bool MediaPlayerPrivateGStreamer::doSeek(const MediaTime& position, float rate, GstSeekFlags seekType)
581 {
582     // Default values for rate >= 0.
583     MediaTime startTime = position, endTime = MediaTime::invalidTime();
584
585     // TODO: Should do more than that, need to notify the media source
586     // and probably flush the pipeline at least.
587     if (isMediaSource())
588         return true;
589
590     if (rate < 0) {
591         startTime = MediaTime::zeroTime();
592         // If we are at beginning of media, start from the end to
593         // avoid immediate EOS.
594         if (position < MediaTime::zeroTime())
595             endTime = durationMediaTime();
596         else
597             endTime = position;
598     }
599
600     if (!rate)
601         rate = 1.0;
602
603     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
604         GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime));
605 }
606
607 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
608 {
609     if (!m_changingRate)
610         return;
611
612     GST_INFO_OBJECT(pipeline(), "Set Rate to %f", m_playbackRate);
613
614     // Mute the sound if the playback rate is negative or too extreme and audio pitch is not adjusted.
615     bool mute = m_playbackRate <= 0 || (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
616
617     GST_INFO_OBJECT(pipeline(), mute ? "Need to mute audio" : "Do not need to mute audio");
618
619     if (doSeek(playbackPosition(), m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
620         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
621         m_lastPlaybackRate = m_playbackRate;
622     } else {
623         m_playbackRate = m_lastPlaybackRate;
624         GST_ERROR("Set rate to %f failed", m_playbackRate);
625     }
626
627     if (m_playbackRatePause) {
628         GstState state;
629         GstState pending;
630
631         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
632         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
633             changePipelineState(GST_STATE_PLAYING);
634         m_playbackRatePause = false;
635     }
636
637     m_changingRate = false;
638     m_player->rateChanged();
639 }
640
641 bool MediaPlayerPrivateGStreamer::paused() const
642 {
643     if (m_isEndReached) {
644         GST_DEBUG_OBJECT(pipeline(), "Ignoring pause at EOS");
645         return true;
646     }
647
648     if (m_playbackRatePause) {
649         GST_DEBUG_OBJECT(pipeline(), "Playback rate is 0, simulating PAUSED state");
650         return false;
651     }
652
653     GstState state;
654     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
655     bool paused = state <= GST_STATE_PAUSED;
656     GST_DEBUG_OBJECT(pipeline(), "Paused: %s", toString(paused).utf8().data());
657     return paused;
658 }
659
660 bool MediaPlayerPrivateGStreamer::seeking() const
661 {
662     return m_seeking;
663 }
664
665 #if GST_CHECK_VERSION(1, 10, 0)
666 #define CLEAR_TRACKS(tracks, method) \
667     for (auto& track : tracks.values())\
668         method(*track);\
669     tracks.clear();
670
671 void MediaPlayerPrivateGStreamer::clearTracks()
672 {
673 #if ENABLE(VIDEO_TRACK)
674     CLEAR_TRACKS(m_audioTracks, m_player->removeAudioTrack);
675     CLEAR_TRACKS(m_videoTracks, m_player->removeVideoTrack);
676     CLEAR_TRACKS(m_textTracks, m_player->removeTextTrack);
677 #endif // ENABLE(VIDEO_TRACK)
678 }
679 #undef CLEAR_TRACKS
680
681 #if ENABLE(VIDEO_TRACK)
682 #define CREATE_TRACK(type, Type) \
683     m_has##Type = true; \
684     if (!useMediaSource) {\
685         RefPtr<Type##TrackPrivateGStreamer> track = Type##TrackPrivateGStreamer::create(makeWeakPtr(*this), i, stream); \
686         m_##type##Tracks.add(track->id(), track); \
687         m_player->add##Type##Track(*track);\
688         if (gst_stream_get_stream_flags(stream.get()) & GST_STREAM_FLAG_SELECT) {                                    \
689             m_current##Type##StreamId = String(gst_stream_get_stream_id(stream.get()));                              \
690         }                                                                                                            \
691     }
692
693 FloatSize MediaPlayerPrivateGStreamer::naturalSize() const
694 {
695 #if ENABLE(MEDIA_STREAM)
696     if (!m_isLegacyPlaybin && !m_currentVideoStreamId.isEmpty()) {
697         RefPtr<VideoTrackPrivateGStreamer> videoTrack = m_videoTracks.get(m_currentVideoStreamId);
698
699         if (videoTrack) {
700             auto tags = adoptGRef(gst_stream_get_tags(videoTrack->stream()));
701             gint width, height;
702
703             if (tags && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_WIDTH, &width) && gst_tag_list_get_int(tags.get(), WEBKIT_MEDIA_TRACK_TAG_HEIGHT, &height))
704                 return FloatSize(width, height);
705         }
706     }
707 #endif // ENABLE(MEDIA_STREAM)
708
709     return MediaPlayerPrivateGStreamerBase::naturalSize();
710 }
711 #else
712 #define CREATE_TRACK(type, _id, tracks, method, stream) m_has##Type## = true;
713 #endif // ENABLE(VIDEO_TRACK)
714
715 void MediaPlayerPrivateGStreamer::updateTracks()
716 {
717     ASSERT(!m_isLegacyPlaybin);
718
719     bool useMediaSource = isMediaSource();
720     unsigned length = gst_stream_collection_get_size(m_streamCollection.get());
721
722     bool oldHasAudio = m_hasAudio;
723     bool oldHasVideo = m_hasVideo;
724     // New stream collections override previous ones.
725     clearTracks();
726     unsigned textTrackIndex = 0;
727     for (unsigned i = 0; i < length; i++) {
728         GRefPtr<GstStream> stream = gst_stream_collection_get_stream(m_streamCollection.get(), i);
729         String streamId(gst_stream_get_stream_id(stream.get()));
730         GstStreamType type = gst_stream_get_stream_type(stream.get());
731
732         GST_DEBUG_OBJECT(pipeline(), "Inspecting %s track with ID %s", gst_stream_type_get_name(type), streamId.utf8().data());
733         if (type & GST_STREAM_TYPE_AUDIO) {
734             CREATE_TRACK(audio, Audio)
735         } else if (type & GST_STREAM_TYPE_VIDEO) {
736             CREATE_TRACK(video, Video)
737         } else if (type & GST_STREAM_TYPE_TEXT && !useMediaSource) {
738 #if ENABLE(VIDEO_TRACK)
739             auto track = InbandTextTrackPrivateGStreamer::create(textTrackIndex++, stream);
740             m_textTracks.add(streamId, track.copyRef());
741             m_player->addTextTrack(track.get());
742 #endif
743         } else
744             GST_WARNING("Unknown track type found for stream %s", streamId.utf8().data());
745     }
746
747     if ((oldHasVideo != m_hasVideo) || (oldHasAudio != m_hasAudio))
748         m_player->characteristicChanged();
749
750     if (m_hasVideo)
751         m_player->sizeChanged();
752
753     m_player->client().mediaPlayerEngineUpdated(m_player);
754 }
755 #endif // GST_CHECK_VERSION(1, 10, 0)
756
757 void MediaPlayerPrivateGStreamer::enableTrack(TrackPrivateBaseGStreamer::TrackType trackType, unsigned index)
758 {
759     // FIXME: Remove isMediaSource() test below when fixing https://bugs.webkit.org/show_bug.cgi?id=182531.
760     if (isMediaSource()) {
761         GST_FIXME_OBJECT(m_pipeline.get(), "Audio/Video/Text track switching is not yet supported by the MSE backend.");
762         return;
763     }
764
765     const char* propertyName;
766     const char* trackTypeAsString;
767     Vector<String> selectedStreams;
768     String selectedStreamId;
769
770 #if GST_CHECK_VERSION(1, 10, 0)
771     GstStream* stream = nullptr;
772
773     if (!m_isLegacyPlaybin) {
774         stream = gst_stream_collection_get_stream(m_streamCollection.get(), index);
775         if (!stream) {
776             GST_WARNING_OBJECT(pipeline(), "No stream to select at index %u", index);
777             return;
778         }
779         selectedStreamId = String::fromUTF8(gst_stream_get_stream_id(stream));
780         selectedStreams.append(selectedStreamId);
781     }
782 #endif // GST_CHECK_VERSION(1,0,0)
783
784     switch (trackType) {
785     case TrackPrivateBaseGStreamer::TrackType::Audio:
786         propertyName = "current-audio";
787         trackTypeAsString = "audio";
788         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentAudioStreamId) {
789             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
790             return;
791         }
792
793         if (!m_currentTextStreamId.isEmpty())
794             selectedStreams.append(m_currentTextStreamId);
795         if (!m_currentVideoStreamId.isEmpty())
796             selectedStreams.append(m_currentVideoStreamId);
797         break;
798     case TrackPrivateBaseGStreamer::TrackType::Video:
799         propertyName = "current-video";
800         trackTypeAsString = "video";
801         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentVideoStreamId) {
802             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
803             return;
804         }
805
806         if (!m_currentAudioStreamId.isEmpty())
807             selectedStreams.append(m_currentAudioStreamId);
808         if (!m_currentTextStreamId.isEmpty())
809             selectedStreams.append(m_currentTextStreamId);
810         break;
811     case TrackPrivateBaseGStreamer::TrackType::Text:
812         if (!selectedStreamId.isEmpty() && selectedStreamId == m_currentTextStreamId) {
813             GST_INFO_OBJECT(pipeline(), "%s stream: %s already selected, not doing anything.", trackTypeAsString, selectedStreamId.utf8().data());
814             return;
815         }
816
817         propertyName = "current-text";
818         trackTypeAsString = "text";
819         if (!m_currentAudioStreamId.isEmpty())
820             selectedStreams.append(m_currentAudioStreamId);
821         if (!m_currentVideoStreamId.isEmpty())
822             selectedStreams.append(m_currentVideoStreamId);
823         break;
824     case TrackPrivateBaseGStreamer::TrackType::Unknown:
825     default:
826         ASSERT_NOT_REACHED();
827     }
828
829     GST_INFO_OBJECT(pipeline(), "Enabling %s track with index: %u", trackTypeAsString, index);
830     if (m_isLegacyPlaybin)
831         g_object_set(m_pipeline.get(), propertyName, index, nullptr);
832 #if GST_CHECK_VERSION(1, 10, 0)
833     else {
834         GList* selectedStreamsList = nullptr;
835
836         for (const auto& streamId : selectedStreams)
837             selectedStreamsList = g_list_append(selectedStreamsList, g_strdup(streamId.utf8().data()));
838
839         // TODO: MSE GstStream API support: https://bugs.webkit.org/show_bug.cgi?id=182531
840         gst_element_send_event(m_pipeline.get(), gst_event_new_select_streams(selectedStreamsList));
841         g_list_free_full(selectedStreamsList, reinterpret_cast<GDestroyNotify>(g_free));
842     }
843 #endif
844 }
845
846 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
847 {
848     player->m_notifier->notify(MainThreadNotification::VideoChanged, [player] {
849         player->notifyPlayerOfVideo();
850     });
851 }
852
853 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
854 {
855     if (UNLIKELY(!m_pipeline || !m_source))
856         return;
857
858     ASSERT(m_isLegacyPlaybin || isMediaSource());
859
860     gint numTracks = 0;
861     bool useMediaSource = isMediaSource();
862     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
863     g_object_get(element, "n-video", &numTracks, nullptr);
864
865     GST_INFO_OBJECT(pipeline(), "Media has %d video tracks", numTracks);
866
867     bool oldHasVideo = m_hasVideo;
868     m_hasVideo = numTracks > 0;
869     if (oldHasVideo != m_hasVideo)
870         m_player->characteristicChanged();
871
872     if (m_hasVideo)
873         m_player->sizeChanged();
874
875     if (useMediaSource) {
876         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
877         m_player->client().mediaPlayerEngineUpdated(m_player);
878         return;
879     }
880
881 #if ENABLE(VIDEO_TRACK)
882     Vector<String> validVideoStreams;
883     for (gint i = 0; i < numTracks; ++i) {
884         GRefPtr<GstPad> pad;
885         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
886         ASSERT(pad);
887
888         String streamId = "V" + String::number(i);
889         validVideoStreams.append(streamId);
890         if (i < static_cast<gint>(m_videoTracks.size())) {
891             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks.get(streamId);
892             if (existingTrack) {
893                 existingTrack->setIndex(i);
894                 if (existingTrack->pad() == pad)
895                     continue;
896             }
897         }
898
899         auto track = VideoTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
900         ASSERT(streamId == track->id());
901         m_videoTracks.add(streamId, track.copyRef());
902         m_player->addVideoTrack(track.get());
903     }
904
905     purgeInvalidVideoTracks(validVideoStreams);
906 #endif
907
908     m_player->client().mediaPlayerEngineUpdated(m_player);
909 }
910
911 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
912 {
913     player->m_notifier->notify(MainThreadNotification::VideoCapsChanged, [player] {
914         player->notifyPlayerOfVideoCaps();
915     });
916 }
917
918 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
919 {
920     m_videoSize = IntSize();
921     m_player->client().mediaPlayerEngineUpdated(m_player);
922 }
923
924 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
925 {
926     player->m_notifier->notify(MainThreadNotification::AudioChanged, [player] {
927         player->notifyPlayerOfAudio();
928     });
929 }
930
931 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
932 {
933     if (UNLIKELY(!m_pipeline || !m_source))
934         return;
935
936     ASSERT(m_isLegacyPlaybin || isMediaSource());
937
938     gint numTracks = 0;
939     bool useMediaSource = isMediaSource();
940     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
941     g_object_get(element, "n-audio", &numTracks, nullptr);
942
943     GST_INFO_OBJECT(pipeline(), "Media has %d audio tracks", numTracks);
944     bool oldHasAudio = m_hasAudio;
945     m_hasAudio = numTracks > 0;
946     if (oldHasAudio != m_hasAudio)
947         m_player->characteristicChanged();
948
949     if (useMediaSource) {
950         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
951         m_player->client().mediaPlayerEngineUpdated(m_player);
952         return;
953     }
954
955 #if ENABLE(VIDEO_TRACK)
956     Vector<String> validAudioStreams;
957     for (gint i = 0; i < numTracks; ++i) {
958         GRefPtr<GstPad> pad;
959         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
960         ASSERT(pad);
961
962         String streamId = "A" + String::number(i);
963         validAudioStreams.append(streamId);
964         if (i < static_cast<gint>(m_audioTracks.size())) {
965             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks.get(streamId);
966             if (existingTrack) {
967                 existingTrack->setIndex(i);
968                 if (existingTrack->pad() == pad)
969                     continue;
970             }
971         }
972
973         auto track = AudioTrackPrivateGStreamer::create(makeWeakPtr(*this), i, pad);
974         ASSERT(streamId == track->id());
975         m_audioTracks.add(streamId, track);
976         m_player->addAudioTrack(*track);
977     }
978
979     purgeInvalidAudioTracks(validAudioStreams);
980 #endif
981
982     m_player->client().mediaPlayerEngineUpdated(m_player);
983 }
984
985 #if ENABLE(VIDEO_TRACK)
986 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
987 {
988     player->m_notifier->notify(MainThreadNotification::TextChanged, [player] {
989         player->notifyPlayerOfText();
990     });
991 }
992
993 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
994 {
995     if (UNLIKELY(!m_pipeline || !m_source))
996         return;
997
998     ASSERT(m_isLegacyPlaybin || isMediaSource());
999
1000     gint numTracks = 0;
1001     bool useMediaSource = isMediaSource();
1002     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
1003     g_object_get(element, "n-text", &numTracks, nullptr);
1004
1005     GST_INFO_OBJECT(pipeline(), "Media has %d text tracks", numTracks);
1006
1007     if (useMediaSource) {
1008         GST_DEBUG_OBJECT(pipeline(), "Tracks managed by source element. Bailing out now.");
1009         return;
1010     }
1011
1012     Vector<String> validTextStreams;
1013     for (gint i = 0; i < numTracks; ++i) {
1014         GRefPtr<GstPad> pad;
1015         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
1016         ASSERT(pad);
1017
1018         // We can't assume the pad has a sticky event here like implemented in
1019         // InbandTextTrackPrivateGStreamer because it might be emitted after the
1020         // track was created. So fallback to a dummy stream ID like in the Audio
1021         // and Video tracks.
1022         String streamId = "T" + String::number(i);
1023
1024         validTextStreams.append(streamId);
1025         if (i < static_cast<gint>(m_textTracks.size())) {
1026             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks.get(streamId);
1027             if (existingTrack) {
1028                 existingTrack->setIndex(i);
1029                 if (existingTrack->pad() == pad)
1030                     continue;
1031             }
1032         }
1033
1034         auto track = InbandTextTrackPrivateGStreamer::create(i, pad);
1035         m_textTracks.add(streamId, track.copyRef());
1036         m_player->addTextTrack(track.get());
1037     }
1038
1039     purgeInvalidTextTracks(validTextStreams);
1040 }
1041
1042 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
1043 {
1044     player->newTextSample();
1045     return GST_FLOW_OK;
1046 }
1047
1048 void MediaPlayerPrivateGStreamer::newTextSample()
1049 {
1050     if (!m_textAppSink)
1051         return;
1052
1053     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
1054         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
1055
1056     GRefPtr<GstSample> sample;
1057     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
1058     ASSERT(sample);
1059
1060     if (streamStartEvent) {
1061         bool found = FALSE;
1062         const gchar* id;
1063         gst_event_parse_stream_start(streamStartEvent.get(), &id);
1064         for (auto& track : m_textTracks.values()) {
1065             if (!strcmp(track->streamId().utf8().data(), id)) {
1066                 track->handleSample(sample);
1067                 found = true;
1068                 break;
1069             }
1070         }
1071         if (!found)
1072             GST_WARNING("Got sample with unknown stream ID %s.", id);
1073     } else
1074         GST_WARNING("Unable to handle sample with no stream start event.");
1075 }
1076 #endif
1077
1078 void MediaPlayerPrivateGStreamer::setRate(float rate)
1079 {
1080     // Higher rate causes crash.
1081     rate = clampTo(rate, -20.0, 20.0);
1082
1083     // Avoid useless playback rate update.
1084     if (m_playbackRate == rate) {
1085         // and make sure that upper layers were notified if rate was set
1086
1087         if (!m_changingRate && m_player->rate() != m_playbackRate)
1088             m_player->rateChanged();
1089         return;
1090     }
1091
1092     if (isLiveStream()) {
1093         // notify upper layers that we cannot handle passed rate.
1094         m_changingRate = false;
1095         m_player->rateChanged();
1096         return;
1097     }
1098
1099     GstState state;
1100     GstState pending;
1101
1102     m_playbackRate = rate;
1103     m_changingRate = true;
1104
1105     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
1106
1107     if (!rate) {
1108         m_changingRate = false;
1109         m_playbackRatePause = true;
1110         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
1111             changePipelineState(GST_STATE_PAUSED);
1112         return;
1113     }
1114
1115     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
1116         || (pending == GST_STATE_PAUSED))
1117         return;
1118
1119     updatePlaybackRate();
1120 }
1121
1122 double MediaPlayerPrivateGStreamer::rate() const
1123 {
1124     return m_playbackRate;
1125 }
1126
1127 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
1128 {
1129     m_preservesPitch = preservesPitch;
1130 }
1131
1132 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
1133 {
1134     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1135     if (m_errorOccured || isLiveStream())
1136         return timeRanges;
1137
1138     MediaTime mediaDuration = durationMediaTime();
1139     if (!mediaDuration || mediaDuration.isPositiveInfinite())
1140         return timeRanges;
1141
1142     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1143
1144     if (!gst_element_query(m_pipeline.get(), query)) {
1145         gst_query_unref(query);
1146         return timeRanges;
1147     }
1148
1149     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
1150     for (guint index = 0; index < numBufferingRanges; index++) {
1151         gint64 rangeStart = 0, rangeStop = 0;
1152         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop)) {
1153             uint64_t startTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStart, GST_FORMAT_PERCENT_MAX);
1154             uint64_t stopTime = gst_util_uint64_scale_int_round(toGstUnsigned64Time(mediaDuration), rangeStop, GST_FORMAT_PERCENT_MAX);
1155             timeRanges->add(MediaTime(startTime, GST_SECOND), MediaTime(stopTime, GST_SECOND));
1156         }
1157     }
1158
1159     // Fallback to the more general maxTimeLoaded() if no range has
1160     // been found.
1161     if (!timeRanges->length()) {
1162         MediaTime loaded = maxTimeLoaded();
1163         if (loaded.isValid() && loaded)
1164             timeRanges->add(MediaTime::zeroTime(), loaded);
1165     }
1166
1167     gst_query_unref(query);
1168
1169     return timeRanges;
1170 }
1171
1172 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
1173 {
1174     GUniqueOutPtr<GError> err;
1175     GUniqueOutPtr<gchar> debug;
1176     MediaPlayer::NetworkState error;
1177     bool issueError = true;
1178     bool attemptNextLocation = false;
1179     const GstStructure* structure = gst_message_get_structure(message);
1180     GstState requestedState, currentState;
1181
1182     m_canFallBackToLastFinishedSeekPosition = false;
1183
1184     if (structure) {
1185         const gchar* messageTypeName = gst_structure_get_name(structure);
1186
1187         // Redirect messages are sent from elements, like qtdemux, to
1188         // notify of the new location(s) of the media.
1189         if (!g_strcmp0(messageTypeName, "redirect")) {
1190             mediaLocationChanged(message);
1191             return;
1192         }
1193     }
1194
1195     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
1196     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
1197
1198     GST_LOG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
1199     switch (GST_MESSAGE_TYPE(message)) {
1200     case GST_MESSAGE_ERROR:
1201         if (m_resetPipeline || !m_missingPluginCallbacks.isEmpty() || m_errorOccured)
1202             break;
1203         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
1204         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
1205
1206         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
1207
1208         error = MediaPlayer::Empty;
1209         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
1210             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
1211             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
1212             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
1213             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
1214             error = MediaPlayer::FormatError;
1215         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
1216             // Let the mediaPlayerClient handle the stream error, in
1217             // this case the HTMLMediaElement will emit a stalled
1218             // event.
1219             GST_ERROR("Decode error, let the Media element emit a stalled event.");
1220             m_loadingStalled = true;
1221             break;
1222         } else if (err->domain == GST_STREAM_ERROR) {
1223             error = MediaPlayer::DecodeError;
1224             attemptNextLocation = true;
1225         } else if (err->domain == GST_RESOURCE_ERROR)
1226             error = MediaPlayer::NetworkError;
1227
1228         if (attemptNextLocation)
1229             issueError = !loadNextLocation();
1230         if (issueError) {
1231             m_errorOccured = true;
1232             if (m_networkState != error) {
1233                 m_networkState = error;
1234                 m_player->networkStateChanged();
1235             }
1236         }
1237         break;
1238     case GST_MESSAGE_EOS:
1239         didEnd();
1240         break;
1241     case GST_MESSAGE_ASYNC_DONE:
1242         if (!messageSourceIsPlaybin || m_delayingLoad)
1243             break;
1244         asyncStateChangeDone();
1245         break;
1246     case GST_MESSAGE_STATE_CHANGED: {
1247         if (!messageSourceIsPlaybin || m_delayingLoad)
1248             break;
1249         updateStates();
1250
1251         // Construct a filename for the graphviz dot file output.
1252         GstState newState;
1253         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
1254         CString dotFileName = makeString(GST_OBJECT_NAME(m_pipeline.get()), '.',
1255             gst_element_state_get_name(currentState), '_', gst_element_state_get_name(newState)).utf8();
1256         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
1257
1258         break;
1259     }
1260     case GST_MESSAGE_BUFFERING:
1261         processBufferingStats(message);
1262         break;
1263     case GST_MESSAGE_DURATION_CHANGED:
1264         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
1265         if (messageSourceIsPlaybin && !isMediaSource())
1266             durationChanged();
1267         break;
1268     case GST_MESSAGE_REQUEST_STATE:
1269         gst_message_parse_request_state(message, &requestedState);
1270         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1271         if (requestedState < currentState) {
1272             GST_INFO_OBJECT(pipeline(), "Element %s requested state change to %s", GST_MESSAGE_SRC_NAME(message),
1273                 gst_element_state_get_name(requestedState));
1274             m_requestedState = requestedState;
1275             if (!changePipelineState(requestedState))
1276                 loadingFailed(MediaPlayer::Empty);
1277         }
1278         break;
1279     case GST_MESSAGE_CLOCK_LOST:
1280         // This can only happen in PLAYING state and we should just
1281         // get a new clock by moving back to PAUSED and then to
1282         // PLAYING again.
1283         // This can happen if the stream that ends in a sink that
1284         // provides the current clock disappears, for example if
1285         // the audio sink provides the clock and the audio stream
1286         // is disabled. It also happens relatively often with
1287         // HTTP adaptive streams when switching between different
1288         // variants of a stream.
1289         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1290         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1291         break;
1292     case GST_MESSAGE_LATENCY:
1293         // Recalculate the latency, we don't need any special handling
1294         // here other than the GStreamer default.
1295         // This can happen if the latency of live elements changes, or
1296         // for one reason or another a new live element is added or
1297         // removed from the pipeline.
1298         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1299         break;
1300     case GST_MESSAGE_ELEMENT:
1301         if (gst_is_missing_plugin_message(message)) {
1302             if (gst_install_plugins_supported()) {
1303                 auto missingPluginCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([weakThis = makeWeakPtr(*this)](uint32_t result, MediaPlayerRequestInstallMissingPluginsCallback& missingPluginCallback) {
1304                     if (!weakThis) {
1305                         GST_INFO("got missing pluging installation callback in destroyed player with result %u", result);
1306                         return;
1307                     }
1308
1309                     GST_DEBUG("got missing plugin installation callback with result %u", result);
1310                     RefPtr<MediaPlayerRequestInstallMissingPluginsCallback> protectedMissingPluginCallback = &missingPluginCallback;
1311                     weakThis->m_missingPluginCallbacks.removeFirst(protectedMissingPluginCallback);
1312                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1313                         return;
1314
1315                     weakThis->changePipelineState(GST_STATE_READY);
1316                     weakThis->changePipelineState(GST_STATE_PAUSED);
1317                 });
1318                 m_missingPluginCallbacks.append(missingPluginCallback.copyRef());
1319                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1320                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1321                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), missingPluginCallback.get());
1322             }
1323         }
1324 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1325         else if (GstMpegtsSection* section = gst_message_parse_mpegts_section(message)) {
1326             processMpegTsSection(section);
1327             gst_mpegts_section_unref(section);
1328         }
1329 #endif
1330 #if ENABLE(ENCRYPTED_MEDIA)
1331         else if (gst_structure_has_name(structure, "drm-waiting-for-key")) {
1332             GST_DEBUG_OBJECT(pipeline(), "drm-waiting-for-key message from %s", GST_MESSAGE_SRC_NAME(message));
1333             setWaitingForKey(true);
1334             // FIXME: The decryptors should be able to attempt to decrypt after being created and linked in a pipeline but currently they are not and current
1335             // architecture does not make this very easy. Fortunately, the arch will change soon and it does not pay off to fix this now with something that could be
1336             // more convoluted. In the meantime, force attempt to decrypt when they get blocked.
1337             attemptToDecryptWithLocalInstance();
1338         } else if (gst_structure_has_name(structure, "drm-key-received")) {
1339             GST_DEBUG_OBJECT(pipeline(), "drm-key-received message from %s", GST_MESSAGE_SRC_NAME(message));
1340             setWaitingForKey(false);
1341         }
1342 #endif
1343         else if (gst_structure_has_name(structure, "http-headers")) {
1344             GstStructure* responseHeaders;
1345             if (gst_structure_get(structure, "response-headers", GST_TYPE_STRUCTURE, &responseHeaders, nullptr)) {
1346                 if (!gst_structure_has_field(responseHeaders, httpHeaderNameString(HTTPHeaderName::ContentLength).utf8().data())) {
1347                     GST_INFO_OBJECT(pipeline(), "Live stream detected. Disabling on-disk buffering");
1348                     m_isStreaming = true;
1349                     setDownloadBuffering();
1350                 }
1351                 gst_structure_free(responseHeaders);
1352             }
1353         } else if (gst_structure_has_name(structure, "adaptive-streaming-statistics")) {
1354             if (WEBKIT_IS_WEB_SRC(m_source.get()))
1355                 if (const char* uri = gst_structure_get_string(structure, "uri"))
1356                     m_hasTaintedOrigin = webKitSrcWouldTaintOrigin(WEBKIT_WEB_SRC(m_source.get()), SecurityOrigin::create(URL(URL(), uri)));
1357         } else
1358             GST_DEBUG_OBJECT(pipeline(), "Unhandled element message: %" GST_PTR_FORMAT, structure);
1359         break;
1360 #if ENABLE(VIDEO_TRACK)
1361     case GST_MESSAGE_TOC:
1362         processTableOfContents(message);
1363         break;
1364 #endif
1365     case GST_MESSAGE_TAG: {
1366         GstTagList* tags = nullptr;
1367         GUniqueOutPtr<gchar> tag;
1368         gst_message_parse_tag(message, &tags);
1369         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1370             if (!g_strcmp0(tag.get(), "rotate-90"))
1371                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1372             else if (!g_strcmp0(tag.get(), "rotate-180"))
1373                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1374             else if (!g_strcmp0(tag.get(), "rotate-270"))
1375                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1376         }
1377         gst_tag_list_unref(tags);
1378         break;
1379     }
1380 #if GST_CHECK_VERSION(1, 10, 0)
1381     case GST_MESSAGE_STREAMS_SELECTED: {
1382         GRefPtr<GstStreamCollection> collection;
1383         gst_message_parse_streams_selected(message, &collection.outPtr());
1384
1385         if (!collection)
1386             break;
1387
1388         m_streamCollection.swap(collection);
1389         m_currentAudioStreamId = "";
1390         m_currentVideoStreamId = "";
1391         m_currentTextStreamId = "";
1392
1393         unsigned length = gst_message_streams_selected_get_size(message);
1394         for (unsigned i = 0; i < length; i++) {
1395             GRefPtr<GstStream> stream = gst_message_streams_selected_get_stream(message, i);
1396             if (!stream)
1397                 continue;
1398
1399             GstStreamType type = gst_stream_get_stream_type(stream.get());
1400             String streamId(gst_stream_get_stream_id(stream.get()));
1401
1402             GST_DEBUG_OBJECT(pipeline(), "Selecting %s track with ID: %s", gst_stream_type_get_name(type), streamId.utf8().data());
1403             // Playbin3 can send more than one selected stream of the same type
1404             // but there's no priority or ordering system in place, so we assume
1405             // the selected stream is the last one as reported by playbin3.
1406             if (type & GST_STREAM_TYPE_AUDIO) {
1407                 m_currentAudioStreamId = streamId;
1408                 auto track = m_audioTracks.get(m_currentAudioStreamId);
1409                 ASSERT(track);
1410                 track->markAsActive();
1411             } else if (type & GST_STREAM_TYPE_VIDEO) {
1412                 m_currentVideoStreamId = streamId;
1413                 auto track = m_videoTracks.get(m_currentVideoStreamId);
1414                 ASSERT(track);
1415                 track->markAsActive();
1416             } else if (type & GST_STREAM_TYPE_TEXT)
1417                 m_currentTextStreamId = streamId;
1418             else
1419                 GST_WARNING("Unknown stream type with stream-id %s", streamId.utf8().data());
1420         }
1421         break;
1422     }
1423 #endif
1424     default:
1425         GST_DEBUG_OBJECT(pipeline(), "Unhandled GStreamer message type: %s", GST_MESSAGE_TYPE_NAME(message));
1426         break;
1427     }
1428 }
1429
1430 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1431 {
1432     m_buffering = true;
1433     gst_message_parse_buffering(message, &m_bufferingPercentage);
1434
1435     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1436
1437     if (m_bufferingPercentage == 100)
1438         updateStates();
1439 }
1440
1441 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1442 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1443 {
1444     ASSERT(section);
1445
1446     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1447         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1448         m_metadataTracks.clear();
1449         for (guint i = 0; i < pmt->streams->len; ++i) {
1450             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1451             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1452                 AtomicString pid = String::number(stream->pid);
1453                 auto track = InbandMetadataTextTrackPrivateGStreamer::create(
1454                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1455
1456                 // 4.7.10.12.2 Sourcing in-band text tracks
1457                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1458                 // type as follows, based on the type of the media resource:
1459                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1460                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1461                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1462                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1463                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1464                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1465                 // expressed in hexadecimal using uppercase ASCII hex digits.
1466                 String inbandMetadataTrackDispatchType;
1467                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1468                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1469                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1470                     for (guint k = 0; k < descriptor->length; ++k)
1471                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1472                 }
1473                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1474
1475                 m_metadataTracks.add(pid, track);
1476                 m_player->addTextTrack(*track);
1477             }
1478         }
1479     } else {
1480         AtomicString pid = String::number(section->pid);
1481         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1482         if (!track)
1483             return;
1484
1485         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1486         gsize size;
1487         const void* bytes = g_bytes_get_data(data.get(), &size);
1488
1489         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1490     }
1491 }
1492 #endif
1493
1494 #if ENABLE(VIDEO_TRACK)
1495 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1496 {
1497     if (m_chaptersTrack)
1498         m_player->removeTextTrack(*m_chaptersTrack);
1499
1500     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1501     m_player->addTextTrack(*m_chaptersTrack);
1502
1503     GRefPtr<GstToc> toc;
1504     gboolean updated;
1505     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1506     ASSERT(toc);
1507
1508     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1509         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1510 }
1511
1512 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1513 {
1514     ASSERT(entry);
1515
1516     auto cue = GenericCueData::create();
1517
1518     gint64 start = -1, stop = -1;
1519     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1520     if (start != -1)
1521         cue->setStartTime(MediaTime(start, GST_SECOND));
1522     if (stop != -1)
1523         cue->setEndTime(MediaTime(stop, GST_SECOND));
1524
1525     GstTagList* tags = gst_toc_entry_get_tags(entry);
1526     if (tags) {
1527         gchar* title = nullptr;
1528         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1529         if (title) {
1530             cue->setContent(title);
1531             g_free(title);
1532         }
1533     }
1534
1535     m_chaptersTrack->addGenericCue(cue);
1536
1537     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1538         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1539 }
1540
1541 void MediaPlayerPrivateGStreamer::purgeInvalidAudioTracks(Vector<String> validTrackIds)
1542 {
1543     m_audioTracks.removeIf([validTrackIds](auto& keyAndValue) {
1544         return !validTrackIds.contains(keyAndValue.key);
1545     });
1546 }
1547
1548 void MediaPlayerPrivateGStreamer::purgeInvalidVideoTracks(Vector<String> validTrackIds)
1549 {
1550     m_videoTracks.removeIf([validTrackIds](auto& keyAndValue) {
1551         return !validTrackIds.contains(keyAndValue.key);
1552     });
1553 }
1554
1555 void MediaPlayerPrivateGStreamer::purgeInvalidTextTracks(Vector<String> validTrackIds)
1556 {
1557     m_textTracks.removeIf([validTrackIds](auto& keyAndValue) {
1558         return !validTrackIds.contains(keyAndValue.key);
1559     });
1560 }
1561 #endif
1562
1563 static gint findHLSQueue(gconstpointer a, gconstpointer)
1564 {
1565     GValue* item = static_cast<GValue*>(const_cast<gpointer>(a));
1566     GstElement* element = GST_ELEMENT(g_value_get_object(item));
1567     if (g_str_has_prefix(GST_ELEMENT_NAME(element), "queue")) {
1568         GstElement* parent = GST_ELEMENT(GST_ELEMENT_PARENT(element));
1569         if (!GST_IS_OBJECT(parent))
1570             return 1;
1571
1572         if (g_str_has_prefix(GST_ELEMENT_NAME(GST_ELEMENT_PARENT(parent)), "hlsdemux"))
1573             return 0;
1574     }
1575
1576     return 1;
1577 }
1578
1579 static bool isHLSProgressing(GstElement* playbin, GstQuery* query)
1580 {
1581     GValue item = { };
1582     GstIterator* binIterator = gst_bin_iterate_recurse(GST_BIN(playbin));
1583     bool foundHLSQueue = gst_iterator_find_custom(binIterator, reinterpret_cast<GCompareFunc>(findHLSQueue), &item, nullptr);
1584     gst_iterator_free(binIterator);
1585
1586     if (!foundHLSQueue)
1587         return false;
1588
1589     GstElement* queueElement = GST_ELEMENT(g_value_get_object(&item));
1590     bool queryResult = gst_element_query(queueElement, query);
1591     g_value_unset(&item);
1592
1593     return queryResult;
1594 }
1595
1596 void MediaPlayerPrivateGStreamer::fillTimerFired()
1597 {
1598     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1599
1600     if (G_UNLIKELY(!gst_element_query(m_pipeline.get(), query))) {
1601         // This query always fails for live pipelines. In the case of HLS, try and find
1602         // the queue inside the HLS element to get a proxy measure of progress. Note
1603         // that the percentage value is rather meaningless as used below.
1604         // This is a hack, see https://bugs.webkit.org/show_bug.cgi?id=141469.
1605         if (!isHLSProgressing(m_pipeline.get(), query)) {
1606             gst_query_unref(query);
1607             return;
1608         }
1609     }
1610
1611     gint64 start, stop;
1612     gdouble fillStatus = 100.0;
1613
1614     gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
1615     gst_query_unref(query);
1616
1617     if (stop != -1)
1618         fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1619
1620     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Download buffer filled up to %f%%", fillStatus);
1621
1622     MediaTime mediaDuration = durationMediaTime();
1623
1624     // Update maxTimeLoaded only if the media duration is
1625     // available. Otherwise we can't compute it.
1626     if (mediaDuration) {
1627         if (fillStatus == 100.0)
1628             m_maxTimeLoaded = mediaDuration;
1629         else
1630             m_maxTimeLoaded = MediaTime(fillStatus * static_cast<double>(toGstUnsigned64Time(mediaDuration)) / 100, GST_SECOND);
1631         GST_DEBUG_OBJECT(pipeline(), "[Buffering] Updated maxTimeLoaded: %s", toString(m_maxTimeLoaded).utf8().data());
1632     }
1633
1634     m_downloadFinished = fillStatus == 100.0;
1635     if (!m_downloadFinished) {
1636         updateStates();
1637         return;
1638     }
1639
1640     // Media is now fully loaded. It will play even if network
1641     // connection is cut. Buffering is done, remove the fill source
1642     // from the main loop.
1643     m_fillTimer.stop();
1644     updateStates();
1645 }
1646
1647 MediaTime MediaPlayerPrivateGStreamer::maxMediaTimeSeekable() const
1648 {
1649     if (m_errorOccured)
1650         return MediaTime::zeroTime();
1651
1652     MediaTime duration = durationMediaTime();
1653     GST_DEBUG_OBJECT(pipeline(), "maxMediaTimeSeekable, duration: %s", toString(duration).utf8().data());
1654     // infinite duration means live stream
1655     if (duration.isPositiveInfinite())
1656         return MediaTime::zeroTime();
1657
1658     return duration;
1659 }
1660
1661 MediaTime MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1662 {
1663     if (m_errorOccured)
1664         return MediaTime::zeroTime();
1665
1666     MediaTime loaded = m_maxTimeLoaded;
1667     if (m_isEndReached)
1668         loaded = durationMediaTime();
1669     GST_LOG("maxTimeLoaded: %s", toString(loaded).utf8().data());
1670     return loaded;
1671 }
1672
1673 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1674 {
1675     if (m_errorOccured || m_loadingStalled)
1676         return false;
1677
1678     if (isLiveStream())
1679         return true;
1680
1681     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1682         return false;
1683
1684     MediaTime currentMaxTimeLoaded = maxTimeLoaded();
1685     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1686     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1687     GST_LOG("didLoadingProgress: %s", toString(didLoadingProgress).utf8().data());
1688     return didLoadingProgress;
1689 }
1690
1691 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1692 {
1693     if (m_errorOccured)
1694         return 0;
1695
1696     if (m_totalBytes)
1697         return m_totalBytes;
1698
1699     if (!m_source)
1700         return 0;
1701
1702     if (isLiveStream())
1703         return 0;
1704
1705     GstFormat fmt = GST_FORMAT_BYTES;
1706     gint64 length = 0;
1707     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1708         GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1709         m_totalBytes = static_cast<unsigned long long>(length);
1710         m_isStreaming = !length;
1711         return m_totalBytes;
1712     }
1713
1714     // Fall back to querying the source pads manually.
1715     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1716     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1717     bool done = false;
1718     while (!done) {
1719         GValue item = G_VALUE_INIT;
1720         switch (gst_iterator_next(iter, &item)) {
1721         case GST_ITERATOR_OK: {
1722             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1723             gint64 padLength = 0;
1724             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1725                 length = padLength;
1726             break;
1727         }
1728         case GST_ITERATOR_RESYNC:
1729             gst_iterator_resync(iter);
1730             break;
1731         case GST_ITERATOR_ERROR:
1732             FALLTHROUGH;
1733         case GST_ITERATOR_DONE:
1734             done = true;
1735             break;
1736         }
1737
1738         g_value_unset(&item);
1739     }
1740
1741     gst_iterator_free(iter);
1742
1743     GST_INFO_OBJECT(pipeline(), "totalBytes %" G_GINT64_FORMAT, length);
1744     m_totalBytes = static_cast<unsigned long long>(length);
1745     m_isStreaming = !length;
1746     return m_totalBytes;
1747 }
1748
1749 void MediaPlayerPrivateGStreamer::sourceSetupCallback(MediaPlayerPrivateGStreamer* player, GstElement* sourceElement)
1750 {
1751     player->sourceSetup(sourceElement);
1752 }
1753
1754 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1755 {
1756     if (g_strcmp0(G_OBJECT_TYPE_NAME(element), "GstDownloadBuffer"))
1757         return;
1758
1759     player->m_downloadBuffer = element;
1760     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1761     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1762
1763     GUniqueOutPtr<char> oldDownloadTemplate;
1764     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1765
1766     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1767     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1768     GST_DEBUG_OBJECT(player->pipeline(), "Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1769
1770     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1771 }
1772
1773 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1774 {
1775     ASSERT(player->m_downloadBuffer);
1776
1777     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1778
1779     GUniqueOutPtr<char> downloadFile;
1780     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1781     player->m_downloadBuffer = nullptr;
1782
1783     if (UNLIKELY(!FileSystem::deleteFile(downloadFile.get()))) {
1784         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1785         return;
1786     }
1787
1788     GST_DEBUG_OBJECT(player->pipeline(), "Unlinked media temporary file %s after creation", downloadFile.get());
1789 }
1790
1791 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1792 {
1793     if (!downloadFileTemplate)
1794         return;
1795
1796     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1797     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1798     String templatePattern = String(templateFile.get()).replace("X", "?");
1799
1800     for (auto& filePath : FileSystem::listDirectory(templatePath.get(), templatePattern)) {
1801         if (UNLIKELY(!FileSystem::deleteFile(filePath))) {
1802             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1803             continue;
1804         }
1805
1806         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1807     }
1808 }
1809
1810 void MediaPlayerPrivateGStreamer::sourceSetup(GstElement* sourceElement)
1811 {
1812     GST_DEBUG_OBJECT(pipeline(), "Source element set-up for %s", GST_ELEMENT_NAME(sourceElement));
1813
1814     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1815         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1816
1817     m_source = sourceElement;
1818
1819     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1820         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1821         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1822 #if ENABLE(MEDIA_STREAM) && GST_CHECK_VERSION(1, 10, 0)
1823     } else if (WEBKIT_IS_MEDIA_STREAM_SRC(sourceElement)) {
1824         auto stream = m_streamPrivate.get();
1825         ASSERT(stream);
1826         webkitMediaStreamSrcSetStream(WEBKIT_MEDIA_STREAM_SRC(sourceElement), stream);
1827 #endif
1828     }
1829 }
1830
1831 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1832 {
1833     if (!m_source)
1834         return false;
1835
1836     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1837         return true;
1838
1839     GUniqueOutPtr<char> originalURI, resolvedURI;
1840     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1841     if (!originalURI || !resolvedURI)
1842         return false;
1843     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1844         return true;
1845
1846     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1847     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1848     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1849 }
1850
1851 void MediaPlayerPrivateGStreamer::cancelLoad()
1852 {
1853     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1854         return;
1855
1856     if (m_pipeline)
1857         changePipelineState(GST_STATE_READY);
1858 }
1859
1860 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1861 {
1862     if (!m_pipeline || m_errorOccured)
1863         return;
1864
1865     if (m_seeking) {
1866         if (m_seekIsPending)
1867             updateStates();
1868         else {
1869             GST_DEBUG_OBJECT(pipeline(), "[Seek] seeked to %s", toString(m_seekTime).utf8().data());
1870             m_seeking = false;
1871             m_cachedPosition = MediaTime::invalidTime();
1872             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek.isValid()) {
1873                 seek(m_timeOfOverlappingSeek);
1874                 m_timeOfOverlappingSeek = MediaTime::invalidTime();
1875                 return;
1876             }
1877             m_timeOfOverlappingSeek = MediaTime::invalidTime();
1878
1879             // The pipeline can still have a pending state. In this case a position query will fail.
1880             // Right now we can use m_seekTime as a fallback.
1881             m_canFallBackToLastFinishedSeekPosition = true;
1882             timeChanged();
1883         }
1884     } else
1885         updateStates();
1886 }
1887
1888 void MediaPlayerPrivateGStreamer::updateStates()
1889 {
1890     if (!m_pipeline)
1891         return;
1892
1893     if (m_errorOccured)
1894         return;
1895
1896     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1897     MediaPlayer::ReadyState oldReadyState = m_readyState;
1898     GstState pending;
1899     GstState state;
1900     bool stateReallyChanged = false;
1901
1902     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1903     if (state != m_currentState) {
1904         m_oldState = m_currentState;
1905         m_currentState = state;
1906         stateReallyChanged = true;
1907     }
1908
1909     bool shouldUpdatePlaybackState = false;
1910     switch (getStateResult) {
1911     case GST_STATE_CHANGE_SUCCESS: {
1912         GST_DEBUG_OBJECT(pipeline(), "State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
1913
1914         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1915         // on HTMLMediaElement and properly generate the video 'ended' event.
1916         if (m_isEndReached && m_currentState == GST_STATE_READY)
1917             break;
1918
1919         m_resetPipeline = m_currentState <= GST_STATE_READY;
1920
1921         bool didBuffering = m_buffering;
1922
1923         // Update ready and network states.
1924         switch (m_currentState) {
1925         case GST_STATE_NULL:
1926             m_readyState = MediaPlayer::HaveNothing;
1927             m_networkState = MediaPlayer::Empty;
1928             break;
1929         case GST_STATE_READY:
1930             m_readyState = MediaPlayer::HaveMetadata;
1931             m_networkState = MediaPlayer::Empty;
1932             break;
1933         case GST_STATE_PAUSED:
1934         case GST_STATE_PLAYING:
1935             if (m_buffering) {
1936                 if (m_bufferingPercentage == 100) {
1937                     GST_DEBUG_OBJECT(pipeline(), "[Buffering] Complete.");
1938                     m_buffering = false;
1939                     m_readyState = MediaPlayer::HaveEnoughData;
1940                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1941                 } else {
1942                     m_readyState = MediaPlayer::HaveCurrentData;
1943                     m_networkState = MediaPlayer::Loading;
1944                 }
1945             } else if (m_downloadFinished) {
1946                 m_readyState = MediaPlayer::HaveEnoughData;
1947                 m_networkState = MediaPlayer::Loaded;
1948             } else {
1949                 m_readyState = MediaPlayer::HaveFutureData;
1950                 m_networkState = MediaPlayer::Loading;
1951             }
1952
1953             break;
1954         default:
1955             ASSERT_NOT_REACHED();
1956             break;
1957         }
1958
1959         // Sync states where needed.
1960         if (m_currentState == GST_STATE_PAUSED) {
1961             if (!m_volumeAndMuteInitialized) {
1962                 notifyPlayerOfVolumeChange();
1963                 notifyPlayerOfMute();
1964                 m_volumeAndMuteInitialized = true;
1965             }
1966
1967             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1968                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Restarting playback.");
1969                 changePipelineState(GST_STATE_PLAYING);
1970             }
1971         } else if (m_currentState == GST_STATE_PLAYING) {
1972             m_paused = false;
1973
1974             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1975                 GST_DEBUG_OBJECT(pipeline(), "[Buffering] Pausing stream for buffering.");
1976                 changePipelineState(GST_STATE_PAUSED);
1977             }
1978         } else
1979             m_paused = true;
1980
1981         GST_DEBUG_OBJECT(pipeline(), "Old state: %s, new state: %s (requested: %s)", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState), gst_element_state_get_name(m_requestedState));
1982         if (m_requestedState == GST_STATE_PAUSED && m_currentState == GST_STATE_PAUSED) {
1983             shouldUpdatePlaybackState = true;
1984             GST_INFO_OBJECT(pipeline(), "Requested state change to %s was completed", gst_element_state_get_name(m_currentState));
1985         }
1986
1987         // Emit play state change notification only when going to PLAYING so that
1988         // the media element gets a chance to enable its page sleep disabler.
1989         // Emitting this notification in more cases triggers unwanted code paths
1990         // and test timeouts.
1991         if (stateReallyChanged && (m_oldState != m_currentState) && (m_oldState == GST_STATE_PAUSED && m_currentState == GST_STATE_PLAYING)) {
1992             GST_INFO_OBJECT(pipeline(), "Playback state changed from %s to %s. Notifying the media player client", gst_element_state_get_name(m_oldState), gst_element_state_get_name(m_currentState));
1993             shouldUpdatePlaybackState = true;
1994         }
1995
1996         break;
1997     }
1998     case GST_STATE_CHANGE_ASYNC:
1999         GST_DEBUG_OBJECT(pipeline(), "Async: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2000         // Change in progress.
2001         break;
2002     case GST_STATE_CHANGE_FAILURE:
2003         GST_DEBUG_OBJECT(pipeline(), "Failure: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2004         // Change failed
2005         return;
2006     case GST_STATE_CHANGE_NO_PREROLL:
2007         GST_DEBUG_OBJECT(pipeline(), "No preroll: State: %s, pending: %s", gst_element_state_get_name(m_currentState), gst_element_state_get_name(pending));
2008
2009         // Live pipelines go in PAUSED without prerolling.
2010         m_isStreaming = true;
2011         setDownloadBuffering();
2012
2013         if (m_currentState == GST_STATE_READY)
2014             m_readyState = MediaPlayer::HaveNothing;
2015         else if (m_currentState == GST_STATE_PAUSED) {
2016             m_readyState = MediaPlayer::HaveEnoughData;
2017             m_paused = true;
2018         } else if (m_currentState == GST_STATE_PLAYING)
2019             m_paused = false;
2020
2021         if (!m_paused && m_playbackRate)
2022             changePipelineState(GST_STATE_PLAYING);
2023
2024         m_networkState = MediaPlayer::Loading;
2025         break;
2026     default:
2027         GST_DEBUG_OBJECT(pipeline(), "Else : %d", getStateResult);
2028         break;
2029     }
2030
2031     m_requestedState = GST_STATE_VOID_PENDING;
2032
2033     if (shouldUpdatePlaybackState)
2034         m_player->playbackStateChanged();
2035
2036     if (m_networkState != oldNetworkState) {
2037         GST_DEBUG_OBJECT(pipeline(), "Network State Changed from %s to %s", convertEnumerationToString(oldNetworkState).utf8().data(), convertEnumerationToString(m_networkState).utf8().data());
2038         m_player->networkStateChanged();
2039     }
2040     if (m_readyState != oldReadyState) {
2041         GST_DEBUG_OBJECT(pipeline(), "Ready State Changed from %s to %s", convertEnumerationToString(oldReadyState).utf8().data(), convertEnumerationToString(m_readyState).utf8().data());
2042         m_player->readyStateChanged();
2043     }
2044
2045     if (getStateResult == GST_STATE_CHANGE_SUCCESS && m_currentState >= GST_STATE_PAUSED) {
2046         updatePlaybackRate();
2047         if (m_seekIsPending) {
2048             GST_DEBUG_OBJECT(pipeline(), "[Seek] committing pending seek to %s", toString(m_seekTime).utf8().data());
2049             m_seekIsPending = false;
2050             m_seeking = doSeek(m_seekTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
2051             if (!m_seeking) {
2052                 m_cachedPosition = MediaTime::invalidTime();
2053                 GST_DEBUG_OBJECT(pipeline(), "[Seek] seeking to %s failed", toString(m_seekTime).utf8().data());
2054             }
2055         }
2056     }
2057 }
2058
2059 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
2060 {
2061 #if GST_CHECK_VERSION(1, 10, 0)
2062     if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_STREAM_COLLECTION && !m_isLegacyPlaybin) {
2063         GRefPtr<GstStreamCollection> collection;
2064         gst_message_parse_stream_collection(message, &collection.outPtr());
2065
2066         if (collection) {
2067             m_streamCollection.swap(collection);
2068             m_notifier->notify(MainThreadNotification::StreamCollectionChanged, [this] {
2069                 this->updateTracks();
2070             });
2071         }
2072     }
2073 #endif
2074
2075     return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
2076 }
2077
2078 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
2079 {
2080     if (m_mediaLocations)
2081         gst_structure_free(m_mediaLocations);
2082
2083     const GstStructure* structure = gst_message_get_structure(message);
2084     if (structure) {
2085         // This structure can contain:
2086         // - both a new-location string and embedded locations structure
2087         // - or only a new-location string.
2088         m_mediaLocations = gst_structure_copy(structure);
2089         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2090
2091         if (locations)
2092             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
2093
2094         loadNextLocation();
2095     }
2096 }
2097
2098 bool MediaPlayerPrivateGStreamer::loadNextLocation()
2099 {
2100     if (!m_mediaLocations)
2101         return false;
2102
2103     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
2104     const gchar* newLocation = nullptr;
2105
2106     if (!locations) {
2107         // Fallback on new-location string.
2108         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
2109         if (!newLocation)
2110             return false;
2111     }
2112
2113     if (!newLocation) {
2114         if (m_mediaLocationCurrentIndex < 0) {
2115             m_mediaLocations = nullptr;
2116             return false;
2117         }
2118
2119         const GValue* location = gst_value_list_get_value(locations, m_mediaLocationCurrentIndex);
2120         const GstStructure* structure = gst_value_get_structure(location);
2121
2122         if (!structure) {
2123             m_mediaLocationCurrentIndex--;
2124             return false;
2125         }
2126
2127         newLocation = gst_structure_get_string(structure, "new-location");
2128     }
2129
2130     if (newLocation) {
2131         // Found a candidate. new-location is not always an absolute url
2132         // though. We need to take the base of the current url and
2133         // append the value of new-location to it.
2134         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
2135         URL newUrl = URL(baseUrl, newLocation);
2136         convertToInternalProtocol(newUrl);
2137
2138         auto securityOrigin = SecurityOrigin::create(m_url);
2139         if (securityOrigin->canRequest(newUrl)) {
2140             GST_INFO_OBJECT(pipeline(), "New media url: %s", newUrl.string().utf8().data());
2141
2142             // Reset player states.
2143             m_networkState = MediaPlayer::Loading;
2144             m_player->networkStateChanged();
2145             m_readyState = MediaPlayer::HaveNothing;
2146             m_player->readyStateChanged();
2147
2148             // Reset pipeline state.
2149             m_resetPipeline = true;
2150             changePipelineState(GST_STATE_READY);
2151
2152             GstState state;
2153             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
2154             if (state <= GST_STATE_READY) {
2155                 // Set the new uri and start playing.
2156                 setPlaybinURL(newUrl);
2157                 changePipelineState(GST_STATE_PLAYING);
2158                 return true;
2159             }
2160         } else
2161             GST_INFO_OBJECT(pipeline(), "Not allowed to load new media location: %s", newUrl.string().utf8().data());
2162     }
2163     m_mediaLocationCurrentIndex--;
2164     return false;
2165 }
2166
2167 void MediaPlayerPrivateGStreamer::loadStateChanged()
2168 {
2169     updateStates();
2170 }
2171
2172 void MediaPlayerPrivateGStreamer::timeChanged()
2173 {
2174     updateStates();
2175     m_player->timeChanged();
2176 }
2177
2178 void MediaPlayerPrivateGStreamer::didEnd()
2179 {
2180     GST_INFO_OBJECT(pipeline(), "Playback ended");
2181
2182     // Synchronize position and duration values to not confuse the
2183     // HTMLMediaElement. In some cases like reverse playback the
2184     // position is not always reported as 0 for instance.
2185     m_cachedPosition = MediaTime::invalidTime();
2186     MediaTime now = currentMediaTime();
2187     if (now > MediaTime { } && now <= durationMediaTime())
2188         m_player->durationChanged();
2189
2190     m_isEndReached = true;
2191     timeChanged();
2192
2193     if (!m_player->client().mediaPlayerIsLooping()) {
2194         m_paused = true;
2195         m_durationAtEOS = durationMediaTime();
2196         changePipelineState(GST_STATE_READY);
2197         m_downloadFinished = false;
2198     }
2199 }
2200
2201 void MediaPlayerPrivateGStreamer::durationChanged()
2202 {
2203     MediaTime previousDuration = durationMediaTime();
2204
2205     // FIXME: Check if this method is still useful, because it's not doing its work at all
2206     // since bug #159458 removed a cacheDuration() call here.
2207
2208     // Avoid emiting durationchanged in the case where the previous
2209     // duration was 0 because that case is already handled by the
2210     // HTMLMediaElement.
2211     if (previousDuration && durationMediaTime() != previousDuration)
2212         m_player->durationChanged();
2213 }
2214
2215 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
2216 {
2217     GST_WARNING("Loading failed, error: %d", error);
2218
2219     m_errorOccured = true;
2220     if (m_networkState != error) {
2221         m_networkState = error;
2222         m_player->networkStateChanged();
2223     }
2224     if (m_readyState != MediaPlayer::HaveNothing) {
2225         m_readyState = MediaPlayer::HaveNothing;
2226         m_player->readyStateChanged();
2227     }
2228
2229     // Loading failed, remove ready timer.
2230     m_readyTimerHandler.stop();
2231 }
2232
2233 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
2234 {
2235     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2236     types = gstRegistryScanner.mimeTypeSet();
2237 }
2238
2239 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
2240 {
2241     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
2242 #if ENABLE(MEDIA_SOURCE)
2243     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
2244     if (parameters.isMediaSource)
2245         return result;
2246 #endif
2247
2248 #if !ENABLE(MEDIA_STREAM) || !GST_CHECK_VERSION(1, 10, 0)
2249     if (parameters.isMediaStream)
2250         return result;
2251 #endif
2252
2253     if (parameters.type.isEmpty())
2254         return result;
2255
2256     GST_DEBUG("Checking mime-type \"%s\"", parameters.type.raw().utf8().data());
2257     auto containerType = parameters.type.containerType();
2258     auto& gstRegistryScanner = GStreamerRegistryScanner::singleton();
2259     if (gstRegistryScanner.isContainerTypeSupported(containerType)) {
2260         // Spec says we should not return "probably" if the codecs string is empty.
2261         Vector<String> codecs = parameters.type.codecs();
2262         result = codecs.isEmpty() ? MediaPlayer::MayBeSupported : (gstRegistryScanner.areAllCodecsSupported(codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported);
2263     }
2264
2265     auto finalResult = extendedSupportsType(parameters, result);
2266     GST_DEBUG("Supported: %s", convertEnumerationToString(finalResult).utf8().data());
2267     return finalResult;
2268 }
2269
2270 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
2271 {
2272     if (!m_pipeline)
2273         return;
2274
2275     unsigned flags;
2276     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
2277
2278     unsigned flagDownload = getGstPlayFlag("download");
2279
2280     // We don't want to stop downloading if we already started it.
2281     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline) {
2282         GST_DEBUG_OBJECT(pipeline(), "Download already started, not starting again");
2283         return;
2284     }
2285
2286     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
2287     if (shouldDownload) {
2288         GST_INFO_OBJECT(pipeline(), "Enabling on-disk buffering");
2289         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
2290         m_fillTimer.startRepeating(200_ms);
2291     } else {
2292         GST_INFO_OBJECT(pipeline(), "Disabling on-disk buffering");
2293         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
2294         m_fillTimer.stop();
2295     }
2296 }
2297
2298 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
2299 {
2300     GST_DEBUG_OBJECT(pipeline(), "Setting preload to %s", convertEnumerationToString(preload).utf8().data());
2301     if (preload == MediaPlayer::Auto && isLiveStream())
2302         return;
2303
2304     m_preload = preload;
2305     setDownloadBuffering();
2306
2307     if (m_delayingLoad && m_preload != MediaPlayer::None) {
2308         m_delayingLoad = false;
2309         commitLoad();
2310     }
2311 }
2312
2313 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
2314 {
2315     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
2316     if (!m_autoAudioSink) {
2317         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
2318         return nullptr;
2319     }
2320
2321     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
2322
2323     GstElement* audioSinkBin;
2324
2325     if (webkitGstCheckVersion(1, 4, 2)) {
2326 #if ENABLE(WEB_AUDIO)
2327         audioSinkBin = gst_bin_new("audio-sink");
2328         ensureAudioSourceProvider();
2329         m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2330         return audioSinkBin;
2331 #else
2332         return m_autoAudioSink.get();
2333 #endif
2334     }
2335
2336     // Construct audio sink only if pitch preserving is enabled.
2337     // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
2338     if (m_preservesPitch) {
2339         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2340         if (!scale) {
2341             GST_WARNING("Failed to create scaletempo");
2342             return m_autoAudioSink.get();
2343         }
2344
2345         audioSinkBin = gst_bin_new("audio-sink");
2346         gst_bin_add(GST_BIN(audioSinkBin), scale);
2347         GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
2348         gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
2349
2350 #if ENABLE(WEB_AUDIO)
2351         ensureAudioSourceProvider();
2352         m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
2353 #else
2354         GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
2355         GstElement* resample = gst_element_factory_make("audioresample", nullptr);
2356
2357         gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
2358
2359         if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
2360             GST_WARNING("Failed to link audio sink elements");
2361             gst_object_unref(audioSinkBin);
2362             return m_autoAudioSink.get();
2363         }
2364 #endif
2365         return audioSinkBin;
2366     }
2367
2368 #if ENABLE(WEB_AUDIO)
2369     audioSinkBin = gst_bin_new("audio-sink");
2370     ensureAudioSourceProvider();
2371     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2372     return audioSinkBin;
2373 #endif
2374     ASSERT_NOT_REACHED();
2375     return nullptr;
2376 }
2377
2378 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2379 {
2380     GstElement* sink;
2381     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2382     return sink;
2383 }
2384
2385 #if ENABLE(WEB_AUDIO)
2386 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2387 {
2388     if (!m_audioSourceProvider)
2389         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2390 }
2391
2392 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2393 {
2394     ensureAudioSourceProvider();
2395     return m_audioSourceProvider.get();
2396 }
2397 #endif
2398
2399 void MediaPlayerPrivateGStreamer::createGSTPlayBin(const gchar* playbinName, const String& pipelineName)
2400 {
2401     if (m_pipeline) {
2402         if (!playbinName) {
2403             GST_INFO_OBJECT(pipeline(), "Keeping same playbin as nothing forced");
2404             return;
2405         }
2406
2407         if (!g_strcmp0(GST_OBJECT_NAME(gst_element_get_factory(m_pipeline.get())), playbinName)) {
2408             GST_INFO_OBJECT(pipeline(), "Already using %s", playbinName);
2409             return;
2410         }
2411
2412         GST_INFO_OBJECT(pipeline(), "Tearing down as we need to use %s now.",
2413             playbinName);
2414         changePipelineState(GST_STATE_NULL);
2415         m_pipeline = nullptr;
2416     }
2417
2418     ASSERT(!m_pipeline);
2419
2420 #if GST_CHECK_VERSION(1, 10, 0)
2421     if (Environment::get("USE_PLAYBIN3"))
2422         playbinName = "playbin3";
2423 #else
2424     playbinName = "playbin";
2425 #endif
2426
2427     if (!playbinName)
2428         playbinName = "playbin";
2429
2430     m_isLegacyPlaybin = !g_strcmp0(playbinName, "playbin");
2431
2432     // gst_element_factory_make() returns a floating reference so
2433     // we should not adopt.
2434     setPipeline(gst_element_factory_make(playbinName,
2435         pipelineName.isEmpty() ? String::format("play_%p", this).utf8().data() : pipelineName.utf8().data()));
2436     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2437
2438     GST_INFO_OBJECT(pipeline(), "Using legacy playbin element: %s", boolForPrinting(m_isLegacyPlaybin));
2439
2440     // Let also other listeners subscribe to (application) messages in this bus.
2441     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2442     gst_bus_add_signal_watch_full(bus.get(), RunLoopSourcePriority::RunLoopDispatcher);
2443     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2444
2445     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2446
2447     g_signal_connect_swapped(m_pipeline.get(), "source-setup", G_CALLBACK(sourceSetupCallback), this);
2448     if (m_isLegacyPlaybin) {
2449         g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2450         g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2451     }
2452
2453 #if ENABLE(VIDEO_TRACK)
2454     if (m_isLegacyPlaybin)
2455         g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2456
2457     GstElement* textCombiner = webkitTextCombinerNew();
2458     ASSERT(textCombiner);
2459     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2460
2461     m_textAppSink = webkitTextSinkNew();
2462     ASSERT(m_textAppSink);
2463
2464     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2465     ASSERT(m_textAppSinkPad);
2466
2467     GRefPtr<GstCaps> textCaps;
2468     if (webkitGstCheckVersion(1, 13, 0))
2469         textCaps = adoptGRef(gst_caps_new_empty_simple("application/x-subtitle-vtt"));
2470     else
2471         textCaps = adoptGRef(gst_caps_new_empty_simple("text/vtt"));
2472     g_object_set(m_textAppSink.get(), "emit-signals", TRUE, "enable-last-sample", FALSE, "caps", textCaps.get(), nullptr);
2473     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2474
2475     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2476 #endif
2477
2478     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2479
2480     configurePlaySink();
2481
2482     // On 1.4.2 and newer we use the audio-filter property instead.
2483     // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
2484     // the reason for using >= 1.4.2 instead of >= 1.4.0.
2485     if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
2486         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2487
2488         if (!scale)
2489             GST_WARNING("Failed to create scaletempo");
2490         else
2491             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2492     }
2493
2494     if (!m_renderingCanBeAccelerated) {
2495         // If not using accelerated compositing, let GStreamer handle
2496         // the image-orientation tag.
2497         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2498         if (videoFlip) {
2499             g_object_set(videoFlip, "method", 8, nullptr);
2500             g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2501         } else
2502             GST_WARNING("The videoflip element is missing, video rotation support is now disabled. Please check your gst-plugins-good installation.");
2503     }
2504
2505     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2506     if (videoSinkPad)
2507         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2508 }
2509
2510 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2511 {
2512     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2513     gst_element_post_message(m_pipeline.get(), message);
2514 }
2515
2516 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2517 {
2518     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2519         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2520     return false;
2521 }
2522
2523 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2524 {
2525     if (isLiveStream())
2526         return false;
2527
2528     if (m_url.isLocalFile())
2529         return true;
2530
2531     if (m_url.protocolIsInHTTPFamily())
2532         return true;
2533
2534     return false;
2535 }
2536
2537 Optional<bool> MediaPlayerPrivateGStreamer::wouldTaintOrigin(const SecurityOrigin&) const
2538 {
2539     // Ideally the given origin should always be verified with
2540     // webKitSrcWouldTaintOrigin() instead of only checking it for
2541     // adaptive-streaming-statistics. We can't do this yet because HLS fragments
2542     // are currently downloaded independently from WebKit.
2543     // See also https://bugs.webkit.org/show_bug.cgi?id=189967.
2544     return m_hasTaintedOrigin;
2545 }
2546
2547
2548 }
2549
2550 #endif // USE(GSTREAMER)