Stop using PassRefPtr in platform/graphics
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / MediaPlayerPrivateGStreamer.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2015, 2016 Igalia S.L
7  * Copyright (C) 2014 Cable Television Laboratories, Inc.
8  * Copyright (C) 2015, 2016 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamer.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER)
30
31 #include "FileSystem.h"
32 #include "GStreamerUtilities.h"
33 #include "URL.h"
34 #include "MIMETypeRegistry.h"
35 #include "MediaPlayer.h"
36 #include "MediaPlayerRequestInstallMissingPluginsCallback.h"
37 #include "NotImplemented.h"
38 #include "SecurityOrigin.h"
39 #include "TimeRanges.h"
40 #include "WebKitWebSourceGStreamer.h"
41 #include <glib.h>
42 #include <gst/gst.h>
43 #include <gst/pbutils/missing-plugins.h>
44 #include <limits>
45 #include <wtf/HexNumber.h>
46 #include <wtf/MediaTime.h>
47 #include <wtf/NeverDestroyed.h>
48 #include <wtf/glib/GUniquePtr.h>
49 #include <wtf/text/CString.h>
50
51 #if ENABLE(VIDEO_TRACK)
52 #include "AudioTrackPrivateGStreamer.h"
53 #include "InbandMetadataTextTrackPrivateGStreamer.h"
54 #include "InbandTextTrackPrivateGStreamer.h"
55 #include "TextCombinerGStreamer.h"
56 #include "TextSinkGStreamer.h"
57 #include "VideoTrackPrivateGStreamer.h"
58 #endif
59
60 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
61 #define GST_USE_UNSTABLE_API
62 #include <gst/mpegts/mpegts.h>
63 #undef GST_USE_UNSTABLE_API
64 #endif
65 #include <gst/audio/streamvolume.h>
66
67 #if ENABLE(MEDIA_SOURCE)
68 #include "MediaSource.h"
69 #include "WebKitMediaSourceGStreamer.h"
70 #endif
71
72 #if ENABLE(WEB_AUDIO)
73 #include "AudioSourceProviderGStreamer.h"
74 #endif
75
76 GST_DEBUG_CATEGORY_EXTERN(webkit_media_player_debug);
77 #define GST_CAT_DEFAULT webkit_media_player_debug
78
79 using namespace std;
80
81 namespace WebCore {
82
83 static void busMessageCallback(GstBus*, GstMessage* message, MediaPlayerPrivateGStreamer* player)
84 {
85     player->handleMessage(message);
86 }
87
88 void MediaPlayerPrivateGStreamer::setAudioStreamPropertiesCallback(MediaPlayerPrivateGStreamer* player, GObject* object)
89 {
90     player->setAudioStreamProperties(object);
91 }
92
93 void MediaPlayerPrivateGStreamer::setAudioStreamProperties(GObject* object)
94 {
95     if (g_strcmp0(G_OBJECT_TYPE_NAME(object), "GstPulseSink"))
96         return;
97
98     const char* role = m_player->client().mediaPlayerIsVideo() ? "video" : "music";
99     GstStructure* structure = gst_structure_new("stream-properties", "media.role", G_TYPE_STRING, role, nullptr);
100     g_object_set(object, "stream-properties", structure, nullptr);
101     gst_structure_free(structure);
102     GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(object)));
103     GST_DEBUG("Set media.role as %s at %s", role, elementName.get());
104 }
105
106 void MediaPlayerPrivateGStreamer::registerMediaEngine(MediaEngineRegistrar registrar)
107 {
108     if (isAvailable())
109         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamer>(player); },
110             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
111 }
112
113 bool initializeGStreamerAndRegisterWebKitElements()
114 {
115     if (!initializeGStreamer())
116         return false;
117
118     registerWebKitGStreamerElements();
119
120     GRefPtr<GstElementFactory> srcFactory = adoptGRef(gst_element_factory_find("webkitwebsrc"));
121     if (!srcFactory) {
122         GST_DEBUG_CATEGORY_INIT(webkit_media_player_debug, "webkitmediaplayer", 0, "WebKit media player");
123         gst_element_register(nullptr, "webkitwebsrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_WEB_SRC);
124     }
125
126     return true;
127 }
128
129 bool MediaPlayerPrivateGStreamer::isAvailable()
130 {
131     if (!initializeGStreamerAndRegisterWebKitElements())
132         return false;
133
134     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
135     return factory;
136 }
137
138 MediaPlayerPrivateGStreamer::MediaPlayerPrivateGStreamer(MediaPlayer* player)
139     : MediaPlayerPrivateGStreamerBase(player)
140     , m_buffering(false)
141     , m_bufferingPercentage(0)
142     , m_canFallBackToLastFinishedSeekPosition(false)
143     , m_changingRate(false)
144     , m_downloadFinished(false)
145     , m_errorOccured(false)
146     , m_isEndReached(false)
147     , m_isStreaming(false)
148     , m_durationAtEOS(0)
149     , m_paused(true)
150     , m_playbackRate(1)
151     , m_requestedState(GST_STATE_VOID_PENDING)
152     , m_resetPipeline(false)
153     , m_seeking(false)
154     , m_seekIsPending(false)
155     , m_seekTime(0)
156     , m_source(nullptr)
157     , m_volumeAndMuteInitialized(false)
158     , m_weakPtrFactory(this)
159     , m_mediaLocations(nullptr)
160     , m_mediaLocationCurrentIndex(0)
161     , m_playbackRatePause(false)
162     , m_timeOfOverlappingSeek(-1)
163     , m_lastPlaybackRate(1)
164     , m_fillTimer(*this, &MediaPlayerPrivateGStreamer::fillTimerFired)
165     , m_maxTimeLoaded(0)
166     , m_preload(player->preload())
167     , m_delayingLoad(false)
168     , m_maxTimeLoadedAtLastDidLoadingProgress(0)
169     , m_hasVideo(false)
170     , m_hasAudio(false)
171     , m_readyTimerHandler(RunLoop::main(), this, &MediaPlayerPrivateGStreamer::readyTimerFired)
172     , m_totalBytes(0)
173     , m_preservesPitch(false)
174 {
175 #if USE(GLIB)
176     m_readyTimerHandler.setPriority(G_PRIORITY_DEFAULT_IDLE);
177 #endif
178 }
179
180 MediaPlayerPrivateGStreamer::~MediaPlayerPrivateGStreamer()
181 {
182 #if ENABLE(VIDEO_TRACK)
183     for (size_t i = 0; i < m_audioTracks.size(); ++i)
184         m_audioTracks[i]->disconnect();
185
186     for (size_t i = 0; i < m_textTracks.size(); ++i)
187         m_textTracks[i]->disconnect();
188
189     for (size_t i = 0; i < m_videoTracks.size(); ++i)
190         m_videoTracks[i]->disconnect();
191 #endif
192     if (m_fillTimer.isActive())
193         m_fillTimer.stop();
194
195     if (m_mediaLocations) {
196         gst_structure_free(m_mediaLocations);
197         m_mediaLocations = nullptr;
198     }
199
200     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
201         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
202
203     if (m_autoAudioSink)
204         g_signal_handlers_disconnect_by_func(G_OBJECT(m_autoAudioSink.get()),
205             reinterpret_cast<gpointer>(setAudioStreamPropertiesCallback), this);
206
207     m_readyTimerHandler.stop();
208     if (m_missingPluginsCallback) {
209         m_missingPluginsCallback->invalidate();
210         m_missingPluginsCallback = nullptr;
211     }
212
213     if (m_videoSink) {
214         GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
215         g_signal_handlers_disconnect_matched(videoSinkPad.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
216     }
217
218     if (m_pipeline) {
219         GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
220         ASSERT(bus);
221         g_signal_handlers_disconnect_by_func(bus.get(), gpointer(busMessageCallback), this);
222         gst_bus_remove_signal_watch(bus.get());
223         gst_bus_set_sync_handler(bus.get(), nullptr, nullptr, nullptr);
224         g_signal_handlers_disconnect_matched(m_pipeline.get(), G_SIGNAL_MATCH_DATA, 0, 0, nullptr, nullptr, this);
225     }
226 }
227
228 void MediaPlayerPrivateGStreamer::load(const String& urlString)
229 {
230     if (!initializeGStreamerAndRegisterWebKitElements())
231         return;
232
233     URL url(URL(), urlString);
234     if (url.isBlankURL())
235         return;
236
237     // Clean out everything after file:// url path.
238     String cleanURL(urlString);
239     if (url.isLocalFile())
240         cleanURL = cleanURL.substring(0, url.pathEnd());
241
242     if (!m_pipeline)
243         createGSTPlayBin();
244
245     if (m_fillTimer.isActive())
246         m_fillTimer.stop();
247
248     ASSERT(m_pipeline);
249
250     m_url = URL(URL(), cleanURL);
251     g_object_set(m_pipeline.get(), "uri", cleanURL.utf8().data(), nullptr);
252
253     GST_INFO("Load %s", cleanURL.utf8().data());
254
255     if (m_preload == MediaPlayer::None) {
256         GST_DEBUG("Delaying load.");
257         m_delayingLoad = true;
258     }
259
260     // Reset network and ready states. Those will be set properly once
261     // the pipeline pre-rolled.
262     m_networkState = MediaPlayer::Loading;
263     m_player->networkStateChanged();
264     m_readyState = MediaPlayer::HaveNothing;
265     m_player->readyStateChanged();
266     m_volumeAndMuteInitialized = false;
267     m_durationAtEOS = 0;
268
269     if (!m_delayingLoad)
270         commitLoad();
271 }
272
273 #if ENABLE(MEDIA_SOURCE)
274 void MediaPlayerPrivateGStreamer::load(const String&, MediaSourcePrivateClient*)
275 {
276     // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
277     m_networkState = MediaPlayer::FormatError;
278     m_player->networkStateChanged();
279 }
280 #endif
281
282 #if ENABLE(MEDIA_STREAM)
283 void MediaPlayerPrivateGStreamer::load(MediaStreamPrivate&)
284 {
285     notImplemented();
286 }
287 #endif
288
289 void MediaPlayerPrivateGStreamer::commitLoad()
290 {
291     ASSERT(!m_delayingLoad);
292     GST_DEBUG("Committing load.");
293
294     // GStreamer needs to have the pipeline set to a paused state to
295     // start providing anything useful.
296     changePipelineState(GST_STATE_PAUSED);
297
298     setDownloadBuffering();
299     updateStates();
300 }
301
302 double MediaPlayerPrivateGStreamer::playbackPosition() const
303 {
304     if (m_isEndReached) {
305         // Position queries on a null pipeline return 0. If we're at
306         // the end of the stream the pipeline is null but we want to
307         // report either the seek time or the duration because this is
308         // what the Media element spec expects us to do.
309         if (m_seeking)
310             return m_seekTime;
311
312         MediaTime mediaDuration = durationMediaTime();
313         if (mediaDuration)
314             return mediaDuration.toDouble();
315         return 0;
316     }
317
318     // Position is only available if no async state change is going on and the state is either paused or playing.
319     gint64 position = GST_CLOCK_TIME_NONE;
320     GstQuery* query= gst_query_new_position(GST_FORMAT_TIME);
321     if (gst_element_query(m_pipeline.get(), query))
322         gst_query_parse_position(query, 0, &position);
323     gst_query_unref(query);
324
325     GST_DEBUG("Position %" GST_TIME_FORMAT, GST_TIME_ARGS(position));
326
327     double result = 0.0f;
328     if (static_cast<GstClockTime>(position) != GST_CLOCK_TIME_NONE) {
329         GTimeVal timeValue;
330         GST_TIME_TO_TIMEVAL(position, timeValue);
331         result = static_cast<double>(timeValue.tv_sec + (timeValue.tv_usec / 1000000.0));
332     } else if (m_canFallBackToLastFinishedSeekPosition)
333         result = m_seekTime;
334
335     return result;
336 }
337
338 void MediaPlayerPrivateGStreamer::readyTimerFired()
339 {
340     changePipelineState(GST_STATE_NULL);
341 }
342
343 bool MediaPlayerPrivateGStreamer::changePipelineState(GstState newState)
344 {
345     ASSERT(m_pipeline);
346
347     GstState currentState;
348     GstState pending;
349
350     gst_element_get_state(m_pipeline.get(), &currentState, &pending, 0);
351     if (currentState == newState || pending == newState) {
352         GST_DEBUG("Rejected state change to %s from %s with %s pending", gst_element_state_get_name(newState),
353             gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
354         return true;
355     }
356
357     GST_DEBUG("Changing state change to %s from %s with %s pending", gst_element_state_get_name(newState),
358         gst_element_state_get_name(currentState), gst_element_state_get_name(pending));
359
360     GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), newState);
361     GstState pausedOrPlaying = newState == GST_STATE_PLAYING ? GST_STATE_PAUSED : GST_STATE_PLAYING;
362     if (currentState != pausedOrPlaying && setStateResult == GST_STATE_CHANGE_FAILURE) {
363         return false;
364     }
365
366     // Create a timer when entering the READY state so that we can free resources
367     // if we stay for too long on READY.
368     // Also lets remove the timer if we request a state change for any state other than READY.
369     // See also https://bugs.webkit.org/show_bug.cgi?id=117354
370     if (newState == GST_STATE_READY && !m_readyTimerHandler.isActive()) {
371         // Max interval in seconds to stay in the READY state on manual
372         // state change requests.
373         static const Seconds readyStateTimerDelay { 1_min };
374         m_readyTimerHandler.startOneShot(readyStateTimerDelay);
375     } else if (newState != GST_STATE_READY)
376         m_readyTimerHandler.stop();
377
378     return true;
379 }
380
381 void MediaPlayerPrivateGStreamer::prepareToPlay()
382 {
383     m_preload = MediaPlayer::Auto;
384     if (m_delayingLoad) {
385         m_delayingLoad = false;
386         commitLoad();
387     }
388 }
389
390 void MediaPlayerPrivateGStreamer::play()
391 {
392     if (!m_playbackRate) {
393         m_playbackRatePause = true;
394         return;
395     }
396
397     if (changePipelineState(GST_STATE_PLAYING)) {
398         m_isEndReached = false;
399         m_delayingLoad = false;
400         m_preload = MediaPlayer::Auto;
401         setDownloadBuffering();
402         GST_DEBUG("Play");
403     } else {
404         loadingFailed(MediaPlayer::Empty);
405     }
406 }
407
408 void MediaPlayerPrivateGStreamer::pause()
409 {
410     m_playbackRatePause = false;
411     GstState currentState, pendingState;
412     gst_element_get_state(m_pipeline.get(), &currentState, &pendingState, 0);
413     if (currentState < GST_STATE_PAUSED && pendingState <= GST_STATE_PAUSED)
414         return;
415
416     if (changePipelineState(GST_STATE_PAUSED))
417         GST_INFO("Pause");
418     else
419         loadingFailed(MediaPlayer::Empty);
420 }
421
422 MediaTime MediaPlayerPrivateGStreamer::durationMediaTime() const
423 {
424     if (!m_pipeline)
425         return { };
426
427     if (m_errorOccured)
428         return { };
429
430     if (m_durationAtEOS)
431         return MediaTime::createWithDouble(m_durationAtEOS);
432
433     // The duration query would fail on a not-prerolled pipeline.
434     if (GST_STATE(m_pipeline.get()) < GST_STATE_PAUSED)
435         return { };
436
437     GstFormat timeFormat = GST_FORMAT_TIME;
438     gint64 timeLength = 0;
439
440     bool failure = !gst_element_query_duration(m_pipeline.get(), timeFormat, &timeLength) || static_cast<guint64>(timeLength) == GST_CLOCK_TIME_NONE;
441     if (failure) {
442         GST_DEBUG("Time duration query failed for %s", m_url.string().utf8().data());
443         return MediaTime::positiveInfiniteTime();
444     }
445
446     GST_DEBUG("Duration: %" GST_TIME_FORMAT, GST_TIME_ARGS(timeLength));
447
448     return MediaTime::createWithDouble(static_cast<double>(timeLength) / GST_SECOND);
449     // FIXME: handle 3.14.9.5 properly
450 }
451
452 MediaTime MediaPlayerPrivateGStreamer::currentMediaTime() const
453 {
454     if (!m_pipeline)
455         return { };
456
457     if (m_errorOccured)
458         return { };
459
460     if (m_seeking)
461         return MediaTime::createWithFloat(m_seekTime);
462
463     // Workaround for
464     // https://bugzilla.gnome.org/show_bug.cgi?id=639941 In GStreamer
465     // 0.10.35 basesink reports wrong duration in case of EOS and
466     // negative playback rate. There's no upstream accepted patch for
467     // this bug yet, hence this temporary workaround.
468     if (m_isEndReached && m_playbackRate < 0)
469         return { };
470
471     return MediaTime::createWithDouble(playbackPosition());
472 }
473
474 void MediaPlayerPrivateGStreamer::seek(float time)
475 {
476     if (!m_pipeline)
477         return;
478
479     if (m_errorOccured)
480         return;
481
482     GST_INFO("[Seek] seek attempt to %f secs", time);
483
484     // Avoid useless seeking.
485     if (MediaTime::createWithFloat(time) == currentMediaTime())
486         return;
487
488     if (isLiveStream())
489         return;
490
491     GstClockTime clockTime = toGstClockTime(time);
492     GST_INFO("[Seek] seeking to %" GST_TIME_FORMAT " (%f)", GST_TIME_ARGS(clockTime), time);
493
494     if (m_seeking) {
495         m_timeOfOverlappingSeek = time;
496         if (m_seekIsPending) {
497             m_seekTime = time;
498             return;
499         }
500     }
501
502     GstState state;
503     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
504     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
505         GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
506         return;
507     }
508     if (getStateResult == GST_STATE_CHANGE_ASYNC || state < GST_STATE_PAUSED || m_isEndReached) {
509         m_seekIsPending = true;
510         if (m_isEndReached) {
511             GST_DEBUG("[Seek] reset pipeline");
512             m_resetPipeline = true;
513             if (!changePipelineState(GST_STATE_PAUSED))
514                 loadingFailed(MediaPlayer::Empty);
515         }
516     } else {
517         // We can seek now.
518         if (!doSeek(clockTime, m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE))) {
519             GST_DEBUG("[Seek] seeking to %f failed", time);
520             return;
521         }
522     }
523
524     m_seeking = true;
525     m_seekTime = time;
526     m_isEndReached = false;
527 }
528
529 bool MediaPlayerPrivateGStreamer::doSeek(gint64 position, float rate, GstSeekFlags seekType)
530 {
531     gint64 startTime, endTime;
532
533     // TODO: Should do more than that, need to notify the media source
534     // and probably flush the pipeline at least.
535     if (isMediaSource())
536         return true;
537
538     if (rate > 0) {
539         startTime = position;
540         endTime = GST_CLOCK_TIME_NONE;
541     } else {
542         startTime = 0;
543         // If we are at beginning of media, start from the end to
544         // avoid immediate EOS.
545         if (position < 0)
546             endTime = static_cast<gint64>(durationMediaTime().toDouble() * GST_SECOND);
547         else
548             endTime = position;
549     }
550
551     if (!rate)
552         rate = 1.0;
553
554     return gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType,
555         GST_SEEK_TYPE_SET, startTime, GST_SEEK_TYPE_SET, endTime);
556 }
557
558 void MediaPlayerPrivateGStreamer::updatePlaybackRate()
559 {
560     if (!m_changingRate)
561         return;
562
563     float currentPosition = static_cast<float>(playbackPosition() * GST_SECOND);
564     bool mute = false;
565
566     GST_INFO("Set Rate to %f", m_playbackRate);
567
568     if (m_playbackRate > 0) {
569         // Mute the sound if the playback rate is too extreme and
570         // audio pitch is not adjusted.
571         mute = (!m_preservesPitch && (m_playbackRate < 0.8 || m_playbackRate > 2));
572     } else {
573         if (currentPosition == 0.0f)
574             currentPosition = -1.0f;
575         mute = true;
576     }
577
578     GST_INFO("Need to mute audio?: %d", (int) mute);
579     if (doSeek(currentPosition, m_playbackRate, static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH))) {
580         g_object_set(m_pipeline.get(), "mute", mute, nullptr);
581         m_lastPlaybackRate = m_playbackRate;
582     } else {
583         m_playbackRate = m_lastPlaybackRate;
584         GST_ERROR("Set rate to %f failed", m_playbackRate);
585     }
586
587     if (m_playbackRatePause) {
588         GstState state;
589         GstState pending;
590
591         gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
592         if (state != GST_STATE_PLAYING && pending != GST_STATE_PLAYING)
593             changePipelineState(GST_STATE_PLAYING);
594         m_playbackRatePause = false;
595     }
596
597     m_changingRate = false;
598     m_player->rateChanged();
599 }
600
601 bool MediaPlayerPrivateGStreamer::paused() const
602 {
603     if (m_isEndReached) {
604         GST_DEBUG("Ignoring pause at EOS");
605         return true;
606     }
607
608     if (m_playbackRatePause)
609         return false;
610
611     GstState state;
612     gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
613     return state <= GST_STATE_PAUSED;
614 }
615
616 bool MediaPlayerPrivateGStreamer::seeking() const
617 {
618     return m_seeking;
619 }
620
621 void MediaPlayerPrivateGStreamer::videoChangedCallback(MediaPlayerPrivateGStreamer* player)
622 {
623     player->m_notifier.notify(MainThreadNotification::VideoChanged, [player] { player->notifyPlayerOfVideo(); });
624 }
625
626 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideo()
627 {
628     if (UNLIKELY(!m_pipeline || !m_source))
629         return;
630
631     gint numTracks = 0;
632     bool useMediaSource = isMediaSource();
633     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
634     g_object_get(element, "n-video", &numTracks, nullptr);
635
636     m_hasVideo = numTracks > 0;
637     if (m_hasVideo)
638         m_player->sizeChanged();
639
640     if (useMediaSource) {
641         GST_DEBUG("Tracks managed by source element. Bailing out now.");
642         m_player->client().mediaPlayerEngineUpdated(m_player);
643         return;
644     }
645
646 #if ENABLE(VIDEO_TRACK)
647     for (gint i = 0; i < numTracks; ++i) {
648         GRefPtr<GstPad> pad;
649         g_signal_emit_by_name(m_pipeline.get(), "get-video-pad", i, &pad.outPtr(), nullptr);
650         ASSERT(pad);
651
652         if (i < static_cast<gint>(m_videoTracks.size())) {
653             RefPtr<VideoTrackPrivateGStreamer> existingTrack = m_videoTracks[i];
654             existingTrack->setIndex(i);
655             if (existingTrack->pad() == pad)
656                 continue;
657         }
658
659         RefPtr<VideoTrackPrivateGStreamer> track = VideoTrackPrivateGStreamer::create(m_pipeline, i, pad);
660         m_videoTracks.append(track);
661         m_player->addVideoTrack(*track);
662     }
663
664     while (static_cast<gint>(m_videoTracks.size()) > numTracks) {
665         RefPtr<VideoTrackPrivateGStreamer> track = m_videoTracks.last();
666         track->disconnect();
667         m_videoTracks.removeLast();
668         m_player->removeVideoTrack(*track);
669     }
670 #endif
671
672     m_player->client().mediaPlayerEngineUpdated(m_player);
673 }
674
675 void MediaPlayerPrivateGStreamer::videoSinkCapsChangedCallback(MediaPlayerPrivateGStreamer* player)
676 {
677     player->m_notifier.notify(MainThreadNotification::VideoCapsChanged, [player] { player->notifyPlayerOfVideoCaps(); });
678 }
679
680 void MediaPlayerPrivateGStreamer::notifyPlayerOfVideoCaps()
681 {
682     m_videoSize = IntSize();
683     m_player->client().mediaPlayerEngineUpdated(m_player);
684 }
685
686 void MediaPlayerPrivateGStreamer::audioChangedCallback(MediaPlayerPrivateGStreamer* player)
687 {
688     player->m_notifier.notify(MainThreadNotification::AudioChanged, [player] { player->notifyPlayerOfAudio(); });
689 }
690
691 void MediaPlayerPrivateGStreamer::notifyPlayerOfAudio()
692 {
693     if (UNLIKELY(!m_pipeline || !m_source))
694         return;
695
696     gint numTracks = 0;
697     bool useMediaSource = isMediaSource();
698     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
699     g_object_get(element, "n-audio", &numTracks, nullptr);
700
701     m_hasAudio = numTracks > 0;
702
703     if (useMediaSource) {
704         GST_DEBUG("Tracks managed by source element. Bailing out now.");
705         m_player->client().mediaPlayerEngineUpdated(m_player);
706         return;
707     }
708
709 #if ENABLE(VIDEO_TRACK)
710     for (gint i = 0; i < numTracks; ++i) {
711         GRefPtr<GstPad> pad;
712         g_signal_emit_by_name(m_pipeline.get(), "get-audio-pad", i, &pad.outPtr(), nullptr);
713         ASSERT(pad);
714
715         if (i < static_cast<gint>(m_audioTracks.size())) {
716             RefPtr<AudioTrackPrivateGStreamer> existingTrack = m_audioTracks[i];
717             existingTrack->setIndex(i);
718             if (existingTrack->pad() == pad)
719                 continue;
720         }
721
722         RefPtr<AudioTrackPrivateGStreamer> track = AudioTrackPrivateGStreamer::create(m_pipeline, i, pad);
723         m_audioTracks.insert(i, track);
724         m_player->addAudioTrack(*track);
725     }
726
727     while (static_cast<gint>(m_audioTracks.size()) > numTracks) {
728         RefPtr<AudioTrackPrivateGStreamer> track = m_audioTracks.last();
729         track->disconnect();
730         m_audioTracks.removeLast();
731         m_player->removeAudioTrack(*track);
732     }
733 #endif
734
735     m_player->client().mediaPlayerEngineUpdated(m_player);
736 }
737
738 #if ENABLE(VIDEO_TRACK)
739 void MediaPlayerPrivateGStreamer::textChangedCallback(MediaPlayerPrivateGStreamer* player)
740 {
741     player->m_notifier.notify(MainThreadNotification::TextChanged, [player] { player->notifyPlayerOfText(); });
742 }
743
744 void MediaPlayerPrivateGStreamer::notifyPlayerOfText()
745 {
746     if (UNLIKELY(!m_pipeline || !m_source))
747         return;
748
749     gint numTracks = 0;
750     bool useMediaSource = isMediaSource();
751     GstElement* element = useMediaSource ? m_source.get() : m_pipeline.get();
752     g_object_get(element, "n-text", &numTracks, nullptr);
753
754     if (useMediaSource) {
755         GST_DEBUG("Tracks managed by source element. Bailing out now.");
756         return;
757     }
758
759     for (gint i = 0; i < numTracks; ++i) {
760         GRefPtr<GstPad> pad;
761         g_signal_emit_by_name(m_pipeline.get(), "get-text-pad", i, &pad.outPtr(), nullptr);
762         ASSERT(pad);
763
764         if (i < static_cast<gint>(m_textTracks.size())) {
765             RefPtr<InbandTextTrackPrivateGStreamer> existingTrack = m_textTracks[i];
766             existingTrack->setIndex(i);
767             if (existingTrack->pad() == pad)
768                 continue;
769         }
770
771         RefPtr<InbandTextTrackPrivateGStreamer> track = InbandTextTrackPrivateGStreamer::create(i, pad);
772         m_textTracks.insert(i, track);
773         m_player->addTextTrack(*track);
774     }
775
776     while (static_cast<gint>(m_textTracks.size()) > numTracks) {
777         RefPtr<InbandTextTrackPrivateGStreamer> track = m_textTracks.last();
778         track->disconnect();
779         m_textTracks.removeLast();
780         m_player->removeTextTrack(*track);
781     }
782 }
783
784 GstFlowReturn MediaPlayerPrivateGStreamer::newTextSampleCallback(MediaPlayerPrivateGStreamer* player)
785 {
786     player->newTextSample();
787     return GST_FLOW_OK;
788 }
789
790 void MediaPlayerPrivateGStreamer::newTextSample()
791 {
792     if (!m_textAppSink)
793         return;
794
795     GRefPtr<GstEvent> streamStartEvent = adoptGRef(
796         gst_pad_get_sticky_event(m_textAppSinkPad.get(), GST_EVENT_STREAM_START, 0));
797
798     GRefPtr<GstSample> sample;
799     g_signal_emit_by_name(m_textAppSink.get(), "pull-sample", &sample.outPtr(), nullptr);
800     ASSERT(sample);
801
802     if (streamStartEvent) {
803         bool found = FALSE;
804         const gchar* id;
805         gst_event_parse_stream_start(streamStartEvent.get(), &id);
806         for (size_t i = 0; i < m_textTracks.size(); ++i) {
807             RefPtr<InbandTextTrackPrivateGStreamer> track = m_textTracks[i];
808             if (track->streamId() == id) {
809                 track->handleSample(sample);
810                 found = true;
811                 break;
812             }
813         }
814         if (!found)
815             GST_WARNING("Got sample with unknown stream ID.");
816     } else
817         GST_WARNING("Unable to handle sample with no stream start event.");
818 }
819 #endif
820
821 void MediaPlayerPrivateGStreamer::setRate(float rate)
822 {
823     // Higher rate causes crash.
824     rate = clampTo(rate, -20.0, 20.0);
825
826     // Avoid useless playback rate update.
827     if (m_playbackRate == rate) {
828         // and make sure that upper layers were notified if rate was set
829
830         if (!m_changingRate && m_player->rate() != m_playbackRate)
831             m_player->rateChanged();
832         return;
833     }
834
835     if (isLiveStream()) {
836         // notify upper layers that we cannot handle passed rate.
837         m_changingRate = false;
838         m_player->rateChanged();
839         return;
840     }
841
842     GstState state;
843     GstState pending;
844
845     m_playbackRate = rate;
846     m_changingRate = true;
847
848     gst_element_get_state(m_pipeline.get(), &state, &pending, 0);
849
850     if (!rate) {
851         m_changingRate = false;
852         m_playbackRatePause = true;
853         if (state != GST_STATE_PAUSED && pending != GST_STATE_PAUSED)
854             changePipelineState(GST_STATE_PAUSED);
855         return;
856     }
857
858     if ((state != GST_STATE_PLAYING && state != GST_STATE_PAUSED)
859         || (pending == GST_STATE_PAUSED))
860         return;
861
862     updatePlaybackRate();
863 }
864
865 double MediaPlayerPrivateGStreamer::rate() const
866 {
867     return m_playbackRate;
868 }
869
870 void MediaPlayerPrivateGStreamer::setPreservesPitch(bool preservesPitch)
871 {
872     m_preservesPitch = preservesPitch;
873 }
874
875 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamer::buffered() const
876 {
877     auto timeRanges = std::make_unique<PlatformTimeRanges>();
878     if (m_errorOccured || isLiveStream())
879         return timeRanges;
880
881     float mediaDuration(durationMediaTime().toDouble());
882     if (!mediaDuration || std::isinf(mediaDuration))
883         return timeRanges;
884
885     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
886
887     if (!gst_element_query(m_pipeline.get(), query)) {
888         gst_query_unref(query);
889         return timeRanges;
890     }
891
892     guint numBufferingRanges = gst_query_get_n_buffering_ranges(query);
893     for (guint index = 0; index < numBufferingRanges; index++) {
894         gint64 rangeStart = 0, rangeStop = 0;
895         if (gst_query_parse_nth_buffering_range(query, index, &rangeStart, &rangeStop))
896             timeRanges->add(MediaTime::createWithDouble((rangeStart * mediaDuration) / GST_FORMAT_PERCENT_MAX),
897                 MediaTime::createWithDouble((rangeStop * mediaDuration) / GST_FORMAT_PERCENT_MAX));
898     }
899
900     // Fallback to the more general maxTimeLoaded() if no range has
901     // been found.
902     if (!timeRanges->length())
903         if (float loaded = maxTimeLoaded())
904             timeRanges->add(MediaTime::zeroTime(), MediaTime::createWithDouble(loaded));
905
906     gst_query_unref(query);
907
908     return timeRanges;
909 }
910
911 void MediaPlayerPrivateGStreamer::handleMessage(GstMessage* message)
912 {
913     GUniqueOutPtr<GError> err;
914     GUniqueOutPtr<gchar> debug;
915     MediaPlayer::NetworkState error;
916     bool issueError = true;
917     bool attemptNextLocation = false;
918     const GstStructure* structure = gst_message_get_structure(message);
919     GstState requestedState, currentState;
920
921     m_canFallBackToLastFinishedSeekPosition = false;
922
923     if (structure) {
924         const gchar* messageTypeName = gst_structure_get_name(structure);
925
926         // Redirect messages are sent from elements, like qtdemux, to
927         // notify of the new location(s) of the media.
928         if (!g_strcmp0(messageTypeName, "redirect")) {
929             mediaLocationChanged(message);
930             return;
931         }
932     }
933
934     // We ignore state changes from internal elements. They are forwarded to playbin2 anyway.
935     bool messageSourceIsPlaybin = GST_MESSAGE_SRC(message) == reinterpret_cast<GstObject*>(m_pipeline.get());
936
937     GST_DEBUG("Message %s received from element %s", GST_MESSAGE_TYPE_NAME(message), GST_MESSAGE_SRC_NAME(message));
938     switch (GST_MESSAGE_TYPE(message)) {
939     case GST_MESSAGE_ERROR:
940         if (m_resetPipeline || m_missingPluginsCallback || m_errorOccured)
941             break;
942         gst_message_parse_error(message, &err.outPtr(), &debug.outPtr());
943         GST_ERROR("Error %d: %s (url=%s)", err->code, err->message, m_url.string().utf8().data());
944
945         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "webkit-video.error");
946
947         error = MediaPlayer::Empty;
948         if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_CODEC_NOT_FOUND)
949             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_WRONG_TYPE)
950             || g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_FAILED)
951             || g_error_matches(err.get(), GST_CORE_ERROR, GST_CORE_ERROR_MISSING_PLUGIN)
952             || g_error_matches(err.get(), GST_RESOURCE_ERROR, GST_RESOURCE_ERROR_NOT_FOUND))
953             error = MediaPlayer::FormatError;
954         else if (g_error_matches(err.get(), GST_STREAM_ERROR, GST_STREAM_ERROR_TYPE_NOT_FOUND)) {
955             // Let the mediaPlayerClient handle the stream error, in
956             // this case the HTMLMediaElement will emit a stalled
957             // event.
958             GST_ERROR("Decode error, let the Media element emit a stalled event.");
959             break;
960         } else if (err->domain == GST_STREAM_ERROR) {
961             error = MediaPlayer::DecodeError;
962             attemptNextLocation = true;
963         } else if (err->domain == GST_RESOURCE_ERROR)
964             error = MediaPlayer::NetworkError;
965
966         if (attemptNextLocation)
967             issueError = !loadNextLocation();
968         if (issueError)
969             loadingFailed(error);
970         break;
971     case GST_MESSAGE_EOS:
972         didEnd();
973         break;
974     case GST_MESSAGE_ASYNC_DONE:
975         if (!messageSourceIsPlaybin || m_delayingLoad)
976             break;
977         asyncStateChangeDone();
978         break;
979     case GST_MESSAGE_STATE_CHANGED: {
980         if (!messageSourceIsPlaybin || m_delayingLoad)
981             break;
982         updateStates();
983
984         // Construct a filename for the graphviz dot file output.
985         GstState newState;
986         gst_message_parse_state_changed(message, &currentState, &newState, nullptr);
987         CString dotFileName = String::format("webkit-video.%s_%s", gst_element_state_get_name(currentState), gst_element_state_get_name(newState)).utf8();
988         GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, dotFileName.data());
989
990         break;
991     }
992     case GST_MESSAGE_BUFFERING:
993         processBufferingStats(message);
994         break;
995     case GST_MESSAGE_DURATION_CHANGED:
996         // Duration in MSE is managed by MediaSource, SourceBuffer and AppendPipeline.
997         if (messageSourceIsPlaybin && !isMediaSource())
998             durationChanged();
999         break;
1000     case GST_MESSAGE_REQUEST_STATE:
1001         gst_message_parse_request_state(message, &requestedState);
1002         gst_element_get_state(m_pipeline.get(), &currentState, nullptr, 250 * GST_NSECOND);
1003         if (requestedState < currentState) {
1004             GUniquePtr<gchar> elementName(gst_element_get_name(GST_ELEMENT(message)));
1005             GST_INFO("Element %s requested state change to %s", elementName.get(),
1006                 gst_element_state_get_name(requestedState));
1007             m_requestedState = requestedState;
1008             if (!changePipelineState(requestedState))
1009                 loadingFailed(MediaPlayer::Empty);
1010         }
1011         break;
1012     case GST_MESSAGE_CLOCK_LOST:
1013         // This can only happen in PLAYING state and we should just
1014         // get a new clock by moving back to PAUSED and then to
1015         // PLAYING again.
1016         // This can happen if the stream that ends in a sink that
1017         // provides the current clock disappears, for example if
1018         // the audio sink provides the clock and the audio stream
1019         // is disabled. It also happens relatively often with
1020         // HTTP adaptive streams when switching between different
1021         // variants of a stream.
1022         gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
1023         gst_element_set_state(m_pipeline.get(), GST_STATE_PLAYING);
1024         break;
1025     case GST_MESSAGE_LATENCY:
1026         // Recalculate the latency, we don't need any special handling
1027         // here other than the GStreamer default.
1028         // This can happen if the latency of live elements changes, or
1029         // for one reason or another a new live element is added or
1030         // removed from the pipeline.
1031         gst_bin_recalculate_latency(GST_BIN(m_pipeline.get()));
1032         break;
1033     case GST_MESSAGE_ELEMENT:
1034         if (gst_is_missing_plugin_message(message)) {
1035             if (gst_install_plugins_supported()) {
1036                 m_missingPluginsCallback = MediaPlayerRequestInstallMissingPluginsCallback::create([this](uint32_t result) {
1037                     m_missingPluginsCallback = nullptr;
1038                     if (result != GST_INSTALL_PLUGINS_SUCCESS)
1039                         return;
1040
1041                     changePipelineState(GST_STATE_READY);
1042                     changePipelineState(GST_STATE_PAUSED);
1043                 });
1044                 GUniquePtr<char> detail(gst_missing_plugin_message_get_installer_detail(message));
1045                 GUniquePtr<char> description(gst_missing_plugin_message_get_description(message));
1046                 m_player->client().requestInstallMissingPlugins(String::fromUTF8(detail.get()), String::fromUTF8(description.get()), *m_missingPluginsCallback);
1047             }
1048         }
1049 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1050         else if (gst_structure_has_name(structure, "drm-key-needed")) {
1051             GST_DEBUG("drm-key-needed message from %s", GST_MESSAGE_SRC_NAME(message));
1052             GRefPtr<GstEvent> event;
1053             gst_structure_get(structure, "event", GST_TYPE_EVENT, &event.outPtr(), nullptr);
1054             handleProtectionEvent(event.get());
1055         }
1056 #endif
1057 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1058         else {
1059             GstMpegtsSection* section = gst_message_parse_mpegts_section(message);
1060             if (section) {
1061                 processMpegTsSection(section);
1062                 gst_mpegts_section_unref(section);
1063             }
1064         }
1065 #endif
1066         break;
1067 #if ENABLE(VIDEO_TRACK)
1068     case GST_MESSAGE_TOC:
1069         processTableOfContents(message);
1070         break;
1071 #endif
1072     case GST_MESSAGE_TAG: {
1073         GstTagList* tags = nullptr;
1074         GUniqueOutPtr<gchar> tag;
1075         gst_message_parse_tag(message, &tags);
1076         if (gst_tag_list_get_string(tags, GST_TAG_IMAGE_ORIENTATION, &tag.outPtr())) {
1077             if (!g_strcmp0(tag.get(), "rotate-90"))
1078                 setVideoSourceOrientation(ImageOrientation(OriginRightTop));
1079             else if (!g_strcmp0(tag.get(), "rotate-180"))
1080                 setVideoSourceOrientation(ImageOrientation(OriginBottomRight));
1081             else if (!g_strcmp0(tag.get(), "rotate-270"))
1082                 setVideoSourceOrientation(ImageOrientation(OriginLeftBottom));
1083         }
1084         gst_tag_list_unref(tags);
1085         break;
1086     }
1087     default:
1088         GST_DEBUG("Unhandled GStreamer message type: %s",
1089                     GST_MESSAGE_TYPE_NAME(message));
1090         break;
1091     }
1092     return;
1093 }
1094
1095 void MediaPlayerPrivateGStreamer::processBufferingStats(GstMessage* message)
1096 {
1097     m_buffering = true;
1098     gst_message_parse_buffering(message, &m_bufferingPercentage);
1099
1100     GST_DEBUG("[Buffering] Buffering: %d%%.", m_bufferingPercentage);
1101
1102     updateStates();
1103 }
1104
1105 #if ENABLE(VIDEO_TRACK) && USE(GSTREAMER_MPEGTS)
1106 void MediaPlayerPrivateGStreamer::processMpegTsSection(GstMpegtsSection* section)
1107 {
1108     ASSERT(section);
1109
1110     if (section->section_type == GST_MPEGTS_SECTION_PMT) {
1111         const GstMpegtsPMT* pmt = gst_mpegts_section_get_pmt(section);
1112         m_metadataTracks.clear();
1113         for (guint i = 0; i < pmt->streams->len; ++i) {
1114             const GstMpegtsPMTStream* stream = static_cast<const GstMpegtsPMTStream*>(g_ptr_array_index(pmt->streams, i));
1115             if (stream->stream_type == 0x05 || stream->stream_type >= 0x80) {
1116                 AtomicString pid = String::number(stream->pid);
1117                 RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = InbandMetadataTextTrackPrivateGStreamer::create(
1118                     InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data, pid);
1119
1120                 // 4.7.10.12.2 Sourcing in-band text tracks
1121                 // If the new text track's kind is metadata, then set the text track in-band metadata track dispatch
1122                 // type as follows, based on the type of the media resource:
1123                 // Let stream type be the value of the "stream_type" field describing the text track's type in the
1124                 // file's program map section, interpreted as an 8-bit unsigned integer. Let length be the value of
1125                 // the "ES_info_length" field for the track in the same part of the program map section, interpreted
1126                 // as an integer as defined by the MPEG-2 specification. Let descriptor bytes be the length bytes
1127                 // following the "ES_info_length" field. The text track in-band metadata track dispatch type must be
1128                 // set to the concatenation of the stream type byte and the zero or more descriptor bytes bytes,
1129                 // expressed in hexadecimal using uppercase ASCII hex digits.
1130                 String inbandMetadataTrackDispatchType;
1131                 appendUnsignedAsHexFixedSize(stream->stream_type, inbandMetadataTrackDispatchType, 2);
1132                 for (guint j = 0; j < stream->descriptors->len; ++j) {
1133                     const GstMpegtsDescriptor* descriptor = static_cast<const GstMpegtsDescriptor*>(g_ptr_array_index(stream->descriptors, j));
1134                     for (guint k = 0; k < descriptor->length; ++k)
1135                         appendByteAsHex(descriptor->data[k], inbandMetadataTrackDispatchType);
1136                 }
1137                 track->setInBandMetadataTrackDispatchType(inbandMetadataTrackDispatchType);
1138
1139                 m_metadataTracks.add(pid, track);
1140                 m_player->addTextTrack(*track);
1141             }
1142         }
1143     } else {
1144         AtomicString pid = String::number(section->pid);
1145         RefPtr<InbandMetadataTextTrackPrivateGStreamer> track = m_metadataTracks.get(pid);
1146         if (!track)
1147             return;
1148
1149         GRefPtr<GBytes> data = gst_mpegts_section_get_data(section);
1150         gsize size;
1151         const void* bytes = g_bytes_get_data(data.get(), &size);
1152
1153         track->addDataCue(currentMediaTime(), currentMediaTime(), bytes, size);
1154     }
1155 }
1156 #endif
1157
1158 #if ENABLE(VIDEO_TRACK)
1159 void MediaPlayerPrivateGStreamer::processTableOfContents(GstMessage* message)
1160 {
1161     if (m_chaptersTrack)
1162         m_player->removeTextTrack(*m_chaptersTrack);
1163
1164     m_chaptersTrack = InbandMetadataTextTrackPrivateGStreamer::create(InbandTextTrackPrivate::Chapters, InbandTextTrackPrivate::Generic);
1165     m_player->addTextTrack(*m_chaptersTrack);
1166
1167     GRefPtr<GstToc> toc;
1168     gboolean updated;
1169     gst_message_parse_toc(message, &toc.outPtr(), &updated);
1170     ASSERT(toc);
1171
1172     for (GList* i = gst_toc_get_entries(toc.get()); i; i = i->next)
1173         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1174 }
1175
1176 void MediaPlayerPrivateGStreamer::processTableOfContentsEntry(GstTocEntry* entry)
1177 {
1178     ASSERT(entry);
1179
1180     auto cue = GenericCueData::create();
1181
1182     gint64 start = -1, stop = -1;
1183     gst_toc_entry_get_start_stop_times(entry, &start, &stop);
1184     if (start != -1)
1185         cue->setStartTime(MediaTime(start, GST_SECOND));
1186     if (stop != -1)
1187         cue->setEndTime(MediaTime(stop, GST_SECOND));
1188
1189     GstTagList* tags = gst_toc_entry_get_tags(entry);
1190     if (tags) {
1191         gchar* title =  nullptr;
1192         gst_tag_list_get_string(tags, GST_TAG_TITLE, &title);
1193         if (title) {
1194             cue->setContent(title);
1195             g_free(title);
1196         }
1197     }
1198
1199     m_chaptersTrack->addGenericCue(cue);
1200
1201     for (GList* i = gst_toc_entry_get_sub_entries(entry); i; i = i->next)
1202         processTableOfContentsEntry(static_cast<GstTocEntry*>(i->data));
1203 }
1204 #endif
1205
1206 void MediaPlayerPrivateGStreamer::fillTimerFired()
1207 {
1208     GstQuery* query = gst_query_new_buffering(GST_FORMAT_PERCENT);
1209
1210     if (!gst_element_query(m_pipeline.get(), query)) {
1211         gst_query_unref(query);
1212         return;
1213     }
1214
1215     gint64 start, stop;
1216     gdouble fillStatus = 100.0;
1217
1218     gst_query_parse_buffering_range(query, nullptr, &start, &stop, nullptr);
1219     gst_query_unref(query);
1220
1221     if (stop != -1)
1222         fillStatus = 100.0 * stop / GST_FORMAT_PERCENT_MAX;
1223
1224     GST_DEBUG("[Buffering] Download buffer filled up to %f%%", fillStatus);
1225
1226     float mediaDuration = durationMediaTime().toDouble();
1227
1228     // Update maxTimeLoaded only if the media duration is
1229     // available. Otherwise we can't compute it.
1230     if (mediaDuration) {
1231         if (fillStatus == 100.0)
1232             m_maxTimeLoaded = mediaDuration;
1233         else
1234             m_maxTimeLoaded = static_cast<float>((fillStatus * mediaDuration) / 100.0);
1235         GST_DEBUG("[Buffering] Updated maxTimeLoaded: %f", m_maxTimeLoaded);
1236     }
1237
1238     m_downloadFinished = fillStatus == 100.0;
1239     if (!m_downloadFinished) {
1240         updateStates();
1241         return;
1242     }
1243
1244     // Media is now fully loaded. It will play even if network
1245     // connection is cut. Buffering is done, remove the fill source
1246     // from the main loop.
1247     m_fillTimer.stop();
1248     updateStates();
1249 }
1250
1251 float MediaPlayerPrivateGStreamer::maxTimeSeekable() const
1252 {
1253     if (m_errorOccured)
1254         return 0.0f;
1255
1256     float mediaDuration = durationMediaTime().toDouble();
1257     GST_DEBUG("maxTimeSeekable, duration: %f", mediaDuration);
1258     // infinite duration means live stream
1259     if (std::isinf(mediaDuration))
1260         return 0.0f;
1261
1262     return mediaDuration;
1263 }
1264
1265 float MediaPlayerPrivateGStreamer::maxTimeLoaded() const
1266 {
1267     if (m_errorOccured)
1268         return 0.0f;
1269
1270     float loaded = m_maxTimeLoaded;
1271     if (m_isEndReached)
1272         loaded = durationMediaTime().toDouble();
1273     GST_DEBUG("maxTimeLoaded: %f", loaded);
1274     return loaded;
1275 }
1276
1277 bool MediaPlayerPrivateGStreamer::didLoadingProgress() const
1278 {
1279     if (UNLIKELY(!m_pipeline || !durationMediaTime() || (!isMediaSource() && !totalBytes())))
1280         return false;
1281     float currentMaxTimeLoaded = maxTimeLoaded();
1282     bool didLoadingProgress = currentMaxTimeLoaded != m_maxTimeLoadedAtLastDidLoadingProgress;
1283     m_maxTimeLoadedAtLastDidLoadingProgress = currentMaxTimeLoaded;
1284     GST_DEBUG("didLoadingProgress: %d", didLoadingProgress);
1285     return didLoadingProgress;
1286 }
1287
1288 unsigned long long MediaPlayerPrivateGStreamer::totalBytes() const
1289 {
1290     if (m_errorOccured)
1291         return 0;
1292
1293     if (m_totalBytes)
1294         return m_totalBytes;
1295
1296     if (!m_source)
1297         return 0;
1298
1299     GstFormat fmt = GST_FORMAT_BYTES;
1300     gint64 length = 0;
1301     if (gst_element_query_duration(m_source.get(), fmt, &length)) {
1302         GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1303         m_totalBytes = static_cast<unsigned long long>(length);
1304         m_isStreaming = !length;
1305         return m_totalBytes;
1306     }
1307
1308     // Fall back to querying the source pads manually.
1309     // See also https://bugzilla.gnome.org/show_bug.cgi?id=638749
1310     GstIterator* iter = gst_element_iterate_src_pads(m_source.get());
1311     bool done = false;
1312     while (!done) {
1313         GValue item = G_VALUE_INIT;
1314         switch (gst_iterator_next(iter, &item)) {
1315         case GST_ITERATOR_OK: {
1316             GstPad* pad = static_cast<GstPad*>(g_value_get_object(&item));
1317             gint64 padLength = 0;
1318             if (gst_pad_query_duration(pad, fmt, &padLength) && padLength > length)
1319                 length = padLength;
1320             break;
1321         }
1322         case GST_ITERATOR_RESYNC:
1323             gst_iterator_resync(iter);
1324             break;
1325         case GST_ITERATOR_ERROR:
1326             FALLTHROUGH;
1327         case GST_ITERATOR_DONE:
1328             done = true;
1329             break;
1330         }
1331
1332         g_value_unset(&item);
1333     }
1334
1335     gst_iterator_free(iter);
1336
1337     GST_INFO("totalBytes %" G_GINT64_FORMAT, length);
1338     m_totalBytes = static_cast<unsigned long long>(length);
1339     m_isStreaming = !length;
1340     return m_totalBytes;
1341 }
1342
1343 void MediaPlayerPrivateGStreamer::sourceChangedCallback(MediaPlayerPrivateGStreamer* player)
1344 {
1345     player->sourceChanged();
1346 }
1347
1348 void MediaPlayerPrivateGStreamer::uriDecodeBinElementAddedCallback(GstBin* bin, GstElement* element, MediaPlayerPrivateGStreamer* player)
1349 {
1350     if (g_strcmp0(G_OBJECT_CLASS_NAME(G_OBJECT_GET_CLASS(G_OBJECT(element))), "GstDownloadBuffer"))
1351         return;
1352
1353     player->m_downloadBuffer = element;
1354     g_signal_handlers_disconnect_by_func(bin, reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), player);
1355     g_signal_connect_swapped(element, "notify::temp-location", G_CALLBACK(downloadBufferFileCreatedCallback), player);
1356
1357     GUniqueOutPtr<char> oldDownloadTemplate;
1358     g_object_get(element, "temp-template", &oldDownloadTemplate.outPtr(), nullptr);
1359
1360     GUniquePtr<char> newDownloadTemplate(g_build_filename(G_DIR_SEPARATOR_S, "var", "tmp", "WebKit-Media-XXXXXX", nullptr));
1361     g_object_set(element, "temp-template", newDownloadTemplate.get(), nullptr);
1362     GST_TRACE("Reconfigured file download template from '%s' to '%s'", oldDownloadTemplate.get(), newDownloadTemplate.get());
1363
1364     player->purgeOldDownloadFiles(oldDownloadTemplate.get());
1365 }
1366
1367 void MediaPlayerPrivateGStreamer::downloadBufferFileCreatedCallback(MediaPlayerPrivateGStreamer* player)
1368 {
1369     ASSERT(player->m_downloadBuffer);
1370
1371     g_signal_handlers_disconnect_by_func(player->m_downloadBuffer.get(), reinterpret_cast<gpointer>(downloadBufferFileCreatedCallback), player);
1372
1373     GUniqueOutPtr<char> downloadFile;
1374     g_object_get(player->m_downloadBuffer.get(), "temp-location", &downloadFile.outPtr(), nullptr);
1375     player->m_downloadBuffer = nullptr;
1376
1377     if (UNLIKELY(!deleteFile(downloadFile.get()))) {
1378         GST_WARNING("Couldn't unlink media temporary file %s after creation", downloadFile.get());
1379         return;
1380     }
1381
1382     GST_TRACE("Unlinked media temporary file %s after creation", downloadFile.get());
1383 }
1384
1385 void MediaPlayerPrivateGStreamer::purgeOldDownloadFiles(const char* downloadFileTemplate)
1386 {
1387     if (!downloadFileTemplate)
1388         return;
1389
1390     GUniquePtr<char> templatePath(g_path_get_dirname(downloadFileTemplate));
1391     GUniquePtr<char> templateFile(g_path_get_basename(downloadFileTemplate));
1392     String templatePattern = String(templateFile.get()).replace("X", "?");
1393
1394     for (auto& filePath : listDirectory(templatePath.get(), templatePattern)) {
1395         if (UNLIKELY(!deleteFile(filePath))) {
1396             GST_WARNING("Couldn't unlink legacy media temporary file: %s", filePath.utf8().data());
1397             continue;
1398         }
1399
1400         GST_TRACE("Unlinked legacy media temporary file: %s", filePath.utf8().data());
1401     }
1402 }
1403
1404 void MediaPlayerPrivateGStreamer::sourceChanged()
1405 {
1406     if (WEBKIT_IS_WEB_SRC(m_source.get()) && GST_OBJECT_PARENT(m_source.get()))
1407         g_signal_handlers_disconnect_by_func(GST_ELEMENT_PARENT(m_source.get()), reinterpret_cast<gpointer>(uriDecodeBinElementAddedCallback), this);
1408
1409     m_source.clear();
1410     g_object_get(m_pipeline.get(), "source", &m_source.outPtr(), nullptr);
1411
1412     if (WEBKIT_IS_WEB_SRC(m_source.get())) {
1413         webKitWebSrcSetMediaPlayer(WEBKIT_WEB_SRC(m_source.get()), m_player);
1414         g_signal_connect(GST_ELEMENT_PARENT(m_source.get()), "element-added", G_CALLBACK(uriDecodeBinElementAddedCallback), this);
1415     }
1416 }
1417
1418 bool MediaPlayerPrivateGStreamer::hasSingleSecurityOrigin() const
1419 {
1420     if (!m_source)
1421         return false;
1422
1423     if (!WEBKIT_IS_WEB_SRC(m_source.get()))
1424         return true;
1425
1426     GUniqueOutPtr<char> originalURI, resolvedURI;
1427     g_object_get(m_source.get(), "location", &originalURI.outPtr(), "resolved-location", &resolvedURI.outPtr(), nullptr);
1428     if (!originalURI || !resolvedURI)
1429         return false;
1430     if (!g_strcmp0(originalURI.get(), resolvedURI.get()))
1431         return true;
1432
1433     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::createFromString(String::fromUTF8(resolvedURI.get())));
1434     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(String::fromUTF8(originalURI.get())));
1435     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1436 }
1437
1438 void MediaPlayerPrivateGStreamer::cancelLoad()
1439 {
1440     if (m_networkState < MediaPlayer::Loading || m_networkState == MediaPlayer::Loaded)
1441         return;
1442
1443     if (m_pipeline)
1444         changePipelineState(GST_STATE_READY);
1445 }
1446
1447 void MediaPlayerPrivateGStreamer::asyncStateChangeDone()
1448 {
1449     if (!m_pipeline || m_errorOccured)
1450         return;
1451
1452     if (m_seeking) {
1453         if (m_seekIsPending)
1454             updateStates();
1455         else {
1456             GST_DEBUG("[Seek] seeked to %f", m_seekTime);
1457             m_seeking = false;
1458             if (m_timeOfOverlappingSeek != m_seekTime && m_timeOfOverlappingSeek != -1) {
1459                 seek(m_timeOfOverlappingSeek);
1460                 m_timeOfOverlappingSeek = -1;
1461                 return;
1462             }
1463             m_timeOfOverlappingSeek = -1;
1464
1465             // The pipeline can still have a pending state. In this case a position query will fail.
1466             // Right now we can use m_seekTime as a fallback.
1467             m_canFallBackToLastFinishedSeekPosition = true;
1468             timeChanged();
1469         }
1470     } else
1471         updateStates();
1472 }
1473
1474 void MediaPlayerPrivateGStreamer::updateStates()
1475 {
1476     if (!m_pipeline)
1477         return;
1478
1479     if (m_errorOccured)
1480         return;
1481
1482     MediaPlayer::NetworkState oldNetworkState = m_networkState;
1483     MediaPlayer::ReadyState oldReadyState = m_readyState;
1484     GstState state;
1485     GstState pending;
1486
1487     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
1488
1489     bool shouldUpdatePlaybackState = false;
1490     switch (getStateResult) {
1491     case GST_STATE_CHANGE_SUCCESS: {
1492         GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
1493
1494         // Do nothing if on EOS and state changed to READY to avoid recreating the player
1495         // on HTMLMediaElement and properly generate the video 'ended' event.
1496         if (m_isEndReached && state == GST_STATE_READY)
1497             break;
1498
1499         m_resetPipeline = state <= GST_STATE_READY;
1500
1501         bool didBuffering = m_buffering;
1502
1503         // Update ready and network states.
1504         switch (state) {
1505         case GST_STATE_NULL:
1506             m_readyState = MediaPlayer::HaveNothing;
1507             m_networkState = MediaPlayer::Empty;
1508             break;
1509         case GST_STATE_READY:
1510             m_readyState = MediaPlayer::HaveMetadata;
1511             m_networkState = MediaPlayer::Empty;
1512             break;
1513         case GST_STATE_PAUSED:
1514         case GST_STATE_PLAYING:
1515             if (m_buffering) {
1516                 if (m_bufferingPercentage == 100) {
1517                     GST_DEBUG("[Buffering] Complete.");
1518                     m_buffering = false;
1519                     m_readyState = MediaPlayer::HaveEnoughData;
1520                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
1521                 } else {
1522                     m_readyState = MediaPlayer::HaveCurrentData;
1523                     m_networkState = MediaPlayer::Loading;
1524                 }
1525             } else if (m_downloadFinished) {
1526                 m_readyState = MediaPlayer::HaveEnoughData;
1527                 m_networkState = MediaPlayer::Loaded;
1528             } else {
1529                 m_readyState = MediaPlayer::HaveFutureData;
1530                 m_networkState = MediaPlayer::Loading;
1531             }
1532
1533             break;
1534         default:
1535             ASSERT_NOT_REACHED();
1536             break;
1537         }
1538
1539         // Sync states where needed.
1540         if (state == GST_STATE_PAUSED) {
1541             if (!m_volumeAndMuteInitialized) {
1542                 notifyPlayerOfVolumeChange();
1543                 notifyPlayerOfMute();
1544                 m_volumeAndMuteInitialized = true;
1545             }
1546
1547             if (didBuffering && !m_buffering && !m_paused && m_playbackRate) {
1548                 GST_DEBUG("[Buffering] Restarting playback.");
1549                 changePipelineState(GST_STATE_PLAYING);
1550             }
1551         } else if (state == GST_STATE_PLAYING) {
1552             m_paused = false;
1553
1554             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
1555                 GST_DEBUG("[Buffering] Pausing stream for buffering.");
1556                 changePipelineState(GST_STATE_PAUSED);
1557             }
1558         } else
1559             m_paused = true;
1560
1561         if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
1562             shouldUpdatePlaybackState = true;
1563             GST_DEBUG("Requested state change to %s was completed", gst_element_state_get_name(state));
1564         }
1565
1566         break;
1567     }
1568     case GST_STATE_CHANGE_ASYNC:
1569         GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
1570         // Change in progress.
1571         break;
1572     case GST_STATE_CHANGE_FAILURE:
1573         GST_DEBUG("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
1574         // Change failed
1575         return;
1576     case GST_STATE_CHANGE_NO_PREROLL:
1577         GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
1578
1579         // Live pipelines go in PAUSED without prerolling.
1580         m_isStreaming = true;
1581         setDownloadBuffering();
1582
1583         if (state == GST_STATE_READY)
1584             m_readyState = MediaPlayer::HaveNothing;
1585         else if (state == GST_STATE_PAUSED) {
1586             m_readyState = MediaPlayer::HaveEnoughData;
1587             m_paused = true;
1588         } else if (state == GST_STATE_PLAYING)
1589             m_paused = false;
1590
1591         if (!m_paused && m_playbackRate)
1592             changePipelineState(GST_STATE_PLAYING);
1593
1594         m_networkState = MediaPlayer::Loading;
1595         break;
1596     default:
1597         GST_DEBUG("Else : %d", getStateResult);
1598         break;
1599     }
1600
1601     m_requestedState = GST_STATE_VOID_PENDING;
1602
1603     if (shouldUpdatePlaybackState)
1604         m_player->playbackStateChanged();
1605
1606     if (m_networkState != oldNetworkState) {
1607         GST_DEBUG("Network State Changed from %u to %u", oldNetworkState, m_networkState);
1608         m_player->networkStateChanged();
1609     }
1610     if (m_readyState != oldReadyState) {
1611         GST_DEBUG("Ready State Changed from %u to %u", oldReadyState, m_readyState);
1612         m_player->readyStateChanged();
1613     }
1614
1615     if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) {
1616         updatePlaybackRate();
1617         if (m_seekIsPending) {
1618             GST_DEBUG("[Seek] committing pending seek to %f", m_seekTime);
1619             m_seekIsPending = false;
1620             m_seeking = doSeek(toGstClockTime(m_seekTime), m_player->rate(), static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE));
1621             if (!m_seeking)
1622                 GST_DEBUG("[Seek] seeking to %f failed", m_seekTime);
1623         }
1624     }
1625 }
1626
1627 void MediaPlayerPrivateGStreamer::mediaLocationChanged(GstMessage* message)
1628 {
1629     if (m_mediaLocations)
1630         gst_structure_free(m_mediaLocations);
1631
1632     const GstStructure* structure = gst_message_get_structure(message);
1633     if (structure) {
1634         // This structure can contain:
1635         // - both a new-location string and embedded locations structure
1636         // - or only a new-location string.
1637         m_mediaLocations = gst_structure_copy(structure);
1638         const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
1639
1640         if (locations)
1641             m_mediaLocationCurrentIndex = static_cast<int>(gst_value_list_get_size(locations)) -1;
1642
1643         loadNextLocation();
1644     }
1645 }
1646
1647 bool MediaPlayerPrivateGStreamer::loadNextLocation()
1648 {
1649     if (!m_mediaLocations)
1650         return false;
1651
1652     const GValue* locations = gst_structure_get_value(m_mediaLocations, "locations");
1653     const gchar* newLocation = nullptr;
1654
1655     if (!locations) {
1656         // Fallback on new-location string.
1657         newLocation = gst_structure_get_string(m_mediaLocations, "new-location");
1658         if (!newLocation)
1659             return false;
1660     }
1661
1662     if (!newLocation) {
1663         if (m_mediaLocationCurrentIndex < 0) {
1664             m_mediaLocations = nullptr;
1665             return false;
1666         }
1667
1668         const GValue* location = gst_value_list_get_value(locations,
1669                                                           m_mediaLocationCurrentIndex);
1670         const GstStructure* structure = gst_value_get_structure(location);
1671
1672         if (!structure) {
1673             m_mediaLocationCurrentIndex--;
1674             return false;
1675         }
1676
1677         newLocation = gst_structure_get_string(structure, "new-location");
1678     }
1679
1680     if (newLocation) {
1681         // Found a candidate. new-location is not always an absolute url
1682         // though. We need to take the base of the current url and
1683         // append the value of new-location to it.
1684         URL baseUrl = gst_uri_is_valid(newLocation) ? URL() : m_url;
1685         URL newUrl = URL(baseUrl, newLocation);
1686
1687         RefPtr<SecurityOrigin> securityOrigin = SecurityOrigin::create(m_url);
1688         if (securityOrigin->canRequest(newUrl)) {
1689             GST_INFO("New media url: %s", newUrl.string().utf8().data());
1690
1691             // Reset player states.
1692             m_networkState = MediaPlayer::Loading;
1693             m_player->networkStateChanged();
1694             m_readyState = MediaPlayer::HaveNothing;
1695             m_player->readyStateChanged();
1696
1697             // Reset pipeline state.
1698             m_resetPipeline = true;
1699             changePipelineState(GST_STATE_READY);
1700
1701             GstState state;
1702             gst_element_get_state(m_pipeline.get(), &state, nullptr, 0);
1703             if (state <= GST_STATE_READY) {
1704                 // Set the new uri and start playing.
1705                 g_object_set(m_pipeline.get(), "uri", newUrl.string().utf8().data(), nullptr);
1706                 m_url = newUrl;
1707                 changePipelineState(GST_STATE_PLAYING);
1708                 return true;
1709             }
1710         } else
1711             GST_INFO("Not allowed to load new media location: %s", newUrl.string().utf8().data());
1712     }
1713     m_mediaLocationCurrentIndex--;
1714     return false;
1715 }
1716
1717 void MediaPlayerPrivateGStreamer::loadStateChanged()
1718 {
1719     updateStates();
1720 }
1721
1722 void MediaPlayerPrivateGStreamer::timeChanged()
1723 {
1724     updateStates();
1725     m_player->timeChanged();
1726 }
1727
1728 void MediaPlayerPrivateGStreamer::didEnd()
1729 {
1730     // Synchronize position and duration values to not confuse the
1731     // HTMLMediaElement. In some cases like reverse playback the
1732     // position is not always reported as 0 for instance.
1733     MediaTime now = currentMediaTime();
1734     if (now > MediaTime { } && now <= durationMediaTime())
1735         m_player->durationChanged();
1736
1737     m_isEndReached = true;
1738     timeChanged();
1739
1740     if (!m_player->client().mediaPlayerIsLooping()) {
1741         m_paused = true;
1742         m_durationAtEOS = durationMediaTime().toDouble();
1743         changePipelineState(GST_STATE_READY);
1744         m_downloadFinished = false;
1745     }
1746 }
1747
1748 void MediaPlayerPrivateGStreamer::durationChanged()
1749 {
1750     float previousDuration = durationMediaTime().toDouble();
1751
1752     // Avoid emiting durationchanged in the case where the previous
1753     // duration was 0 because that case is already handled by the
1754     // HTMLMediaElement.
1755     if (previousDuration && durationMediaTime().toDouble() != previousDuration)
1756         m_player->durationChanged();
1757 }
1758
1759 void MediaPlayerPrivateGStreamer::loadingFailed(MediaPlayer::NetworkState error)
1760 {
1761     m_errorOccured = true;
1762     if (m_networkState != error) {
1763         m_networkState = error;
1764         m_player->networkStateChanged();
1765     }
1766     if (m_readyState != MediaPlayer::HaveNothing) {
1767         m_readyState = MediaPlayer::HaveNothing;
1768         m_player->readyStateChanged();
1769     }
1770
1771     // Loading failed, remove ready timer.
1772     m_readyTimerHandler.stop();
1773 }
1774
1775 static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeSet()
1776 {
1777     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> mimeTypes = []()
1778     {
1779         initializeGStreamerAndRegisterWebKitElements();
1780         HashSet<String, ASCIICaseInsensitiveHash> set;
1781
1782         GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
1783         GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
1784         GList* demuxerFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DEMUXER, GST_RANK_MARGINAL);
1785
1786         enum ElementType {
1787             AudioDecoder = 0,
1788             VideoDecoder,
1789             Demuxer
1790         };
1791         struct GstCapsWebKitMapping {
1792             ElementType elementType;
1793             const char* capsString;
1794             Vector<AtomicString> webkitMimeTypes;
1795         };
1796
1797         Vector<GstCapsWebKitMapping> mapping = {
1798             {AudioDecoder, "audio/midi", {"audio/midi", "audio/riff-midi"}},
1799             {AudioDecoder, "audio/x-sbc", { }},
1800             {AudioDecoder, "audio/x-sid", { }},
1801             {AudioDecoder, "audio/x-flac", {"audio/x-flac", "audio/flac"}},
1802             {AudioDecoder, "audio/x-wav", {"audio/x-wav", "audio/wav"}},
1803             {AudioDecoder, "audio/x-wavpack", {"audio/x-wavpack"}},
1804             {AudioDecoder, "audio/x-speex", {"audio/speex", "audio/x-speex"}},
1805             {AudioDecoder, "audio/x-ac3", { }},
1806             {AudioDecoder, "audio/x-eac3", {"audio/x-ac3"}},
1807             {AudioDecoder, "audio/x-dts", { }},
1808             {VideoDecoder, "video/x-h264, profile=(string)high", {"video/mp4", "video/x-m4v"}},
1809             {VideoDecoder, "video/x-msvideocodec", {"video/x-msvideo"}},
1810             {VideoDecoder, "video/x-h263", { }},
1811             {VideoDecoder, "video/mpegts", { }},
1812             {VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", {"video/mpeg"}},
1813             {VideoDecoder, "video/x-dirac", { }},
1814             {VideoDecoder, "video/x-flash-video", {"video/flv", "video/x-flv"}},
1815             {Demuxer, "video/quicktime", { }},
1816             {Demuxer, "video/quicktime, variant=(string)3gpp", {"video/3gpp"}},
1817             {Demuxer, "application/x-3gp", { }},
1818             {Demuxer, "video/x-ms-asf", { }},
1819             {Demuxer, "audio/x-aiff", { }},
1820             {Demuxer, "application/x-pn-realaudio", { }},
1821             {Demuxer, "application/vnd.rn-realmedia", { }},
1822             {Demuxer, "audio/x-wav", {"audio/x-wav", "audio/wav"}},
1823             {Demuxer, "application/x-hls", {"application/vnd.apple.mpegurl", "application/x-mpegurl"}}
1824         };
1825
1826         for (auto& current : mapping) {
1827             GList* factories = demuxerFactories;
1828             if (current.elementType == AudioDecoder)
1829                 factories = audioDecoderFactories;
1830             else if (current.elementType == VideoDecoder)
1831                 factories = videoDecoderFactories;
1832
1833             if (gstRegistryHasElementForMediaType(factories, current.capsString)) {
1834                 if (!current.webkitMimeTypes.isEmpty()) {
1835                     for (const auto& mimeType : current.webkitMimeTypes)
1836                         set.add(mimeType);
1837                 } else
1838                     set.add(AtomicString(current.capsString));
1839             }
1840         }
1841
1842         bool opusSupported = false;
1843         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-opus")) {
1844             opusSupported = true;
1845             set.add(AtomicString("audio/opus"));
1846         }
1847
1848         bool vorbisSupported = false;
1849         if (gstRegistryHasElementForMediaType(demuxerFactories, "application/ogg")) {
1850             set.add(AtomicString("application/ogg"));
1851
1852             vorbisSupported = gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/x-vorbis");
1853             if (vorbisSupported) {
1854                 set.add(AtomicString("audio/ogg"));
1855                 set.add(AtomicString("audio/x-vorbis+ogg"));
1856             }
1857
1858             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-theora"))
1859                 set.add(AtomicString("video/ogg"));
1860         }
1861
1862         bool audioMpegSupported = false;
1863         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
1864             audioMpegSupported = true;
1865             set.add(AtomicString("audio/mp1"));
1866             set.add(AtomicString("audio/mp3"));
1867             set.add(AtomicString("audio/x-mp3"));
1868         }
1869
1870         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
1871             audioMpegSupported = true;
1872             set.add(AtomicString("audio/aac"));
1873             set.add(AtomicString("audio/mp2"));
1874             set.add(AtomicString("audio/mp4"));
1875             set.add(AtomicString("audio/x-m4a"));
1876         }
1877
1878         if (audioMpegSupported) {
1879             set.add(AtomicString("audio/mpeg"));
1880             set.add(AtomicString("audio/x-mpeg"));
1881         }
1882
1883         if (gstRegistryHasElementForMediaType(demuxerFactories, "video/x-matroska")) {
1884             set.add(AtomicString("video/x-matroska"));
1885
1886             if (gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp8")
1887                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp9")
1888                 || gstRegistryHasElementForMediaType(videoDecoderFactories, "video/x-vp10"))
1889                 set.add(AtomicString("video/webm"));
1890
1891             if (vorbisSupported || opusSupported)
1892                 set.add(AtomicString("audio/webm"));
1893         }
1894
1895         gst_plugin_feature_list_free(audioDecoderFactories);
1896         gst_plugin_feature_list_free(videoDecoderFactories);
1897         gst_plugin_feature_list_free(demuxerFactories);
1898         return set;
1899     }();
1900     return mimeTypes;
1901 }
1902
1903 void MediaPlayerPrivateGStreamer::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
1904 {
1905     types = mimeTypeSet();
1906 }
1907
1908 MediaPlayer::SupportsType MediaPlayerPrivateGStreamer::supportsType(const MediaEngineSupportParameters& parameters)
1909 {
1910     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
1911 #if ENABLE(MEDIA_SOURCE)
1912     // MediaPlayerPrivateGStreamerMSE is in charge of mediasource playback, not us.
1913     if (parameters.isMediaSource)
1914         return result;
1915 #endif
1916
1917     // MediaStream playback is handled by the OpenWebRTC player.
1918     if (parameters.isMediaStream)
1919         return result;
1920
1921     if (parameters.type.isNull() || parameters.type.isEmpty())
1922         return result;
1923
1924     // spec says we should not return "probably" if the codecs string is empty
1925     if (mimeTypeSet().contains(parameters.type))
1926         result = parameters.codecs.isEmpty() ? MediaPlayer::MayBeSupported : MediaPlayer::IsSupported;
1927
1928     return extendedSupportsType(parameters, result);
1929 }
1930
1931 void MediaPlayerPrivateGStreamer::setDownloadBuffering()
1932 {
1933     if (!m_pipeline)
1934         return;
1935
1936     unsigned flags;
1937     g_object_get(m_pipeline.get(), "flags", &flags, nullptr);
1938
1939     unsigned flagDownload = getGstPlayFlag("download");
1940
1941     // We don't want to stop downloading if we already started it.
1942     if (flags & flagDownload && m_readyState > MediaPlayer::HaveNothing && !m_resetPipeline)
1943         return;
1944
1945     bool shouldDownload = !isLiveStream() && m_preload == MediaPlayer::Auto;
1946     if (shouldDownload) {
1947         GST_DEBUG("Enabling on-disk buffering");
1948         g_object_set(m_pipeline.get(), "flags", flags | flagDownload, nullptr);
1949         m_fillTimer.startRepeating(200_ms);
1950     } else {
1951         GST_DEBUG("Disabling on-disk buffering");
1952         g_object_set(m_pipeline.get(), "flags", flags & ~flagDownload, nullptr);
1953         m_fillTimer.stop();
1954     }
1955 }
1956
1957 void MediaPlayerPrivateGStreamer::setPreload(MediaPlayer::Preload preload)
1958 {
1959     if (preload == MediaPlayer::Auto && isLiveStream())
1960         return;
1961
1962     m_preload = preload;
1963     setDownloadBuffering();
1964
1965     if (m_delayingLoad && m_preload != MediaPlayer::None) {
1966         m_delayingLoad = false;
1967         commitLoad();
1968     }
1969 }
1970
1971 GstElement* MediaPlayerPrivateGStreamer::createAudioSink()
1972 {
1973     m_autoAudioSink = gst_element_factory_make("autoaudiosink", nullptr);
1974     if (!m_autoAudioSink) {
1975         GST_WARNING("GStreamer's autoaudiosink not found. Please check your gst-plugins-good installation");
1976         return nullptr;
1977     }
1978
1979     g_signal_connect_swapped(m_autoAudioSink.get(), "child-added", G_CALLBACK(setAudioStreamPropertiesCallback), this);
1980
1981     GstElement* audioSinkBin;
1982
1983     if (webkitGstCheckVersion(1, 4, 2)) {
1984 #if ENABLE(WEB_AUDIO)
1985         audioSinkBin = gst_bin_new("audio-sink");
1986         ensureAudioSourceProvider();
1987         m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
1988         return audioSinkBin;
1989 #else
1990         return m_autoAudioSink.get();
1991 #endif
1992     }
1993
1994     // Construct audio sink only if pitch preserving is enabled.
1995     // If GStreamer 1.4.2 is used the audio-filter playbin property is used instead.
1996     if (m_preservesPitch) {
1997         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
1998         if (!scale) {
1999             GST_WARNING("Failed to create scaletempo");
2000             return m_autoAudioSink.get();
2001         }
2002
2003         audioSinkBin = gst_bin_new("audio-sink");
2004         gst_bin_add(GST_BIN(audioSinkBin), scale);
2005         GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(scale, "sink"));
2006         gst_element_add_pad(audioSinkBin, gst_ghost_pad_new("sink", pad.get()));
2007
2008 #if ENABLE(WEB_AUDIO)
2009         ensureAudioSourceProvider();
2010         m_audioSourceProvider->configureAudioBin(audioSinkBin, scale);
2011 #else
2012         GstElement* convert = gst_element_factory_make("audioconvert", nullptr);
2013         GstElement* resample = gst_element_factory_make("audioresample", nullptr);
2014
2015         gst_bin_add_many(GST_BIN(audioSinkBin), convert, resample, m_autoAudioSink.get(), nullptr);
2016
2017         if (!gst_element_link_many(scale, convert, resample, m_autoAudioSink.get(), nullptr)) {
2018             GST_WARNING("Failed to link audio sink elements");
2019             gst_object_unref(audioSinkBin);
2020             return m_autoAudioSink.get();
2021         }
2022 #endif
2023         return audioSinkBin;
2024     }
2025
2026 #if ENABLE(WEB_AUDIO)
2027     audioSinkBin = gst_bin_new("audio-sink");
2028     ensureAudioSourceProvider();
2029     m_audioSourceProvider->configureAudioBin(audioSinkBin, nullptr);
2030     return audioSinkBin;
2031 #endif
2032     ASSERT_NOT_REACHED();
2033     return nullptr;
2034 }
2035
2036 GstElement* MediaPlayerPrivateGStreamer::audioSink() const
2037 {
2038     GstElement* sink;
2039     g_object_get(m_pipeline.get(), "audio-sink", &sink, nullptr);
2040     return sink;
2041 }
2042
2043 #if ENABLE(WEB_AUDIO)
2044 void MediaPlayerPrivateGStreamer::ensureAudioSourceProvider()
2045 {
2046     if (!m_audioSourceProvider)
2047         m_audioSourceProvider = std::make_unique<AudioSourceProviderGStreamer>();
2048 }
2049
2050 AudioSourceProvider* MediaPlayerPrivateGStreamer::audioSourceProvider()
2051 {
2052     ensureAudioSourceProvider();
2053     return m_audioSourceProvider.get();
2054 }
2055 #endif
2056
2057 void MediaPlayerPrivateGStreamer::createGSTPlayBin()
2058 {
2059     ASSERT(!m_pipeline);
2060
2061     // gst_element_factory_make() returns a floating reference so
2062     // we should not adopt.
2063     setPipeline(gst_element_factory_make("playbin", "play"));
2064     setStreamVolumeElement(GST_STREAM_VOLUME(m_pipeline.get()));
2065
2066     GRefPtr<GstBus> bus = adoptGRef(gst_pipeline_get_bus(GST_PIPELINE(m_pipeline.get())));
2067     gst_bus_set_sync_handler(bus.get(), [](GstBus*, GstMessage* message, gpointer userData) {
2068         auto& player = *static_cast<MediaPlayerPrivateGStreamer*>(userData);
2069
2070         if (player.handleSyncMessage(message)) {
2071             gst_message_unref(message);
2072             return GST_BUS_DROP;
2073         }
2074
2075         return GST_BUS_PASS;
2076     }, this, nullptr);
2077
2078     // Let also other listeners subscribe to (application) messages in this bus.
2079     gst_bus_add_signal_watch(bus.get());
2080     g_signal_connect(bus.get(), "message", G_CALLBACK(busMessageCallback), this);
2081
2082     g_object_set(m_pipeline.get(), "mute", m_player->muted(), nullptr);
2083
2084     g_signal_connect_swapped(m_pipeline.get(), "notify::source", G_CALLBACK(sourceChangedCallback), this);
2085     g_signal_connect_swapped(m_pipeline.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
2086     g_signal_connect_swapped(m_pipeline.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
2087 #if ENABLE(VIDEO_TRACK)
2088     g_signal_connect_swapped(m_pipeline.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
2089
2090     GstElement* textCombiner = webkitTextCombinerNew();
2091     ASSERT(textCombiner);
2092     g_object_set(m_pipeline.get(), "text-stream-combiner", textCombiner, nullptr);
2093
2094     m_textAppSink = webkitTextSinkNew();
2095     ASSERT(m_textAppSink);
2096
2097     m_textAppSinkPad = adoptGRef(gst_element_get_static_pad(m_textAppSink.get(), "sink"));
2098     ASSERT(m_textAppSinkPad);
2099
2100     g_object_set(m_textAppSink.get(), "emit-signals", true, "enable-last-sample", false, "caps", gst_caps_new_empty_simple("text/vtt"), nullptr);
2101     g_signal_connect_swapped(m_textAppSink.get(), "new-sample", G_CALLBACK(newTextSampleCallback), this);
2102
2103     g_object_set(m_pipeline.get(), "text-sink", m_textAppSink.get(), nullptr);
2104 #endif
2105
2106     g_object_set(m_pipeline.get(), "video-sink", createVideoSink(), "audio-sink", createAudioSink(), nullptr);
2107
2108     configurePlaySink();
2109
2110     // On 1.4.2 and newer we use the audio-filter property instead.
2111     // See https://bugzilla.gnome.org/show_bug.cgi?id=735748 for
2112     // the reason for using >= 1.4.2 instead of >= 1.4.0.
2113     if (m_preservesPitch && webkitGstCheckVersion(1, 4, 2)) {
2114         GstElement* scale = gst_element_factory_make("scaletempo", nullptr);
2115
2116         if (!scale)
2117             GST_WARNING("Failed to create scaletempo");
2118         else
2119             g_object_set(m_pipeline.get(), "audio-filter", scale, nullptr);
2120     }
2121
2122     if (!m_renderingCanBeAccelerated) {
2123         // If not using accelerated compositing, let GStreamer handle
2124         // the image-orientation tag.
2125         GstElement* videoFlip = gst_element_factory_make("videoflip", nullptr);
2126         g_object_set(videoFlip, "method", 8, nullptr);
2127         g_object_set(m_pipeline.get(), "video-filter", videoFlip, nullptr);
2128     }
2129
2130     GRefPtr<GstPad> videoSinkPad = adoptGRef(gst_element_get_static_pad(m_videoSink.get(), "sink"));
2131     if (videoSinkPad)
2132         g_signal_connect_swapped(videoSinkPad.get(), "notify::caps", G_CALLBACK(videoSinkCapsChangedCallback), this);
2133 }
2134
2135 void MediaPlayerPrivateGStreamer::simulateAudioInterruption()
2136 {
2137     GstMessage* message = gst_message_new_request_state(GST_OBJECT(m_pipeline.get()), GST_STATE_PAUSED);
2138     gst_element_post_message(m_pipeline.get(), message);
2139 }
2140
2141 bool MediaPlayerPrivateGStreamer::didPassCORSAccessCheck() const
2142 {
2143     if (WEBKIT_IS_WEB_SRC(m_source.get()))
2144         return webKitSrcPassedCORSAccessCheck(WEBKIT_WEB_SRC(m_source.get()));
2145     return false;
2146 }
2147
2148 bool MediaPlayerPrivateGStreamer::canSaveMediaData() const
2149 {
2150     if (isLiveStream())
2151         return false;
2152
2153     if (m_url.isLocalFile())
2154         return true;
2155
2156     if (m_url.protocolIsInHTTPFamily())
2157         return true;
2158
2159     return false;
2160 }
2161
2162 bool MediaPlayerPrivateGStreamer::handleSyncMessage(GstMessage* message)
2163 {
2164     return MediaPlayerPrivateGStreamerBase::handleSyncMessage(message);
2165 }
2166
2167 }
2168
2169 #endif // USE(GSTREAMER)