40eae9a1e16e67d77c1fab00eb97671af266d411
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / mse / MediaPlayerPrivateGStreamerMSE.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2016, 2017 Igalia S.L
7  * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com>
8  * Copyright (C) 2015, 2016, 2017 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamerMSE.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
30
31 #include "AppendPipeline.h"
32 #include "AudioTrackPrivateGStreamer.h"
33 #include "GStreamerUtilities.h"
34 #include "InbandTextTrackPrivateGStreamer.h"
35 #include "MIMETypeRegistry.h"
36 #include "MediaDescription.h"
37 #include "MediaPlayer.h"
38 #include "NotImplemented.h"
39 #include "SourceBufferPrivateGStreamer.h"
40 #include "TimeRanges.h"
41 #include "URL.h"
42 #include "VideoTrackPrivateGStreamer.h"
43
44 #include <fnmatch.h>
45 #include <gst/app/gstappsink.h>
46 #include <gst/app/gstappsrc.h>
47 #include <gst/gst.h>
48 #include <gst/pbutils/pbutils.h>
49 #include <gst/video/video.h>
50 #include <wtf/Condition.h>
51 #include <wtf/HashSet.h>
52 #include <wtf/NeverDestroyed.h>
53 #include <wtf/StringPrintStream.h>
54 #include <wtf/text/AtomicString.h>
55 #include <wtf/text/AtomicStringHash.h>
56
57 #if ENABLE(ENCRYPTED_MEDIA)
58 #include "CDMClearKey.h"
59 #include "SharedBuffer.h"
60 #endif
61
62 static const char* dumpReadyState(WebCore::MediaPlayer::ReadyState readyState)
63 {
64     switch (readyState) {
65     case WebCore::MediaPlayer::HaveNothing: return "HaveNothing";
66     case WebCore::MediaPlayer::HaveMetadata: return "HaveMetadata";
67     case WebCore::MediaPlayer::HaveCurrentData: return "HaveCurrentData";
68     case WebCore::MediaPlayer::HaveFutureData: return "HaveFutureData";
69     case WebCore::MediaPlayer::HaveEnoughData: return "HaveEnoughData";
70     default: return "(unknown)";
71     }
72 }
73
74 GST_DEBUG_CATEGORY(webkit_mse_debug);
75 #define GST_CAT_DEFAULT webkit_mse_debug
76
77 namespace WebCore {
78
79 void MediaPlayerPrivateGStreamerMSE::registerMediaEngine(MediaEngineRegistrar registrar)
80 {
81     if (isAvailable()) {
82         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamerMSE>(player); },
83             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
84     }
85 }
86
87 bool initializeGStreamerAndRegisterWebKitMSEElement()
88 {
89     if (UNLIKELY(!initializeGStreamer()))
90         return false;
91
92     registerWebKitGStreamerElements();
93
94     GST_DEBUG_CATEGORY_INIT(webkit_mse_debug, "webkitmse", 0, "WebKit MSE media player");
95
96     GRefPtr<GstElementFactory> WebKitMediaSrcFactory = adoptGRef(gst_element_factory_find("webkitmediasrc"));
97     if (UNLIKELY(!WebKitMediaSrcFactory))
98         gst_element_register(nullptr, "webkitmediasrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_SRC);
99     return true;
100 }
101
102 bool MediaPlayerPrivateGStreamerMSE::isAvailable()
103 {
104     if (UNLIKELY(!initializeGStreamerAndRegisterWebKitMSEElement()))
105         return false;
106
107     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
108     return factory;
109 }
110
111 MediaPlayerPrivateGStreamerMSE::MediaPlayerPrivateGStreamerMSE(MediaPlayer* player)
112     : MediaPlayerPrivateGStreamer(player)
113 {
114     GST_TRACE("creating the player (%p)", this);
115 }
116
117 MediaPlayerPrivateGStreamerMSE::~MediaPlayerPrivateGStreamerMSE()
118 {
119     GST_TRACE("destroying the player (%p)", this);
120
121     for (auto iterator : m_appendPipelinesMap)
122         iterator.value->clearPlayerPrivate();
123
124     if (m_source) {
125         webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), nullptr);
126         g_signal_handlers_disconnect_by_data(m_source.get(), this);
127     }
128
129     if (m_playbackPipeline)
130         m_playbackPipeline->setWebKitMediaSrc(nullptr);
131 }
132
133 void MediaPlayerPrivateGStreamerMSE::load(const String& urlString)
134 {
135     if (!urlString.startsWith("mediasource")) {
136         // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
137         m_networkState = MediaPlayer::FormatError;
138         m_player->networkStateChanged();
139         return;
140     }
141
142     if (UNLIKELY(!initializeGStreamerAndRegisterWebKitMSEElement()))
143         return;
144
145     if (!m_playbackPipeline)
146         m_playbackPipeline = PlaybackPipeline::create();
147
148     MediaPlayerPrivateGStreamer::load(urlString);
149 }
150
151 void MediaPlayerPrivateGStreamerMSE::load(const String& url, MediaSourcePrivateClient* mediaSource)
152 {
153     m_mediaSource = mediaSource;
154     load(String::format("mediasource%s", url.utf8().data()));
155 }
156
157 void MediaPlayerPrivateGStreamerMSE::pause()
158 {
159     m_paused = true;
160     MediaPlayerPrivateGStreamer::pause();
161 }
162
163 MediaTime MediaPlayerPrivateGStreamerMSE::durationMediaTime() const
164 {
165     if (UNLIKELY(!m_pipeline || m_errorOccured))
166         return MediaTime();
167
168     return m_mediaTimeDuration;
169 }
170
171 void MediaPlayerPrivateGStreamerMSE::seek(const MediaTime& time)
172 {
173     if (UNLIKELY(!m_pipeline || m_errorOccured))
174         return;
175
176     GST_INFO("[Seek] seek attempt to %s secs", toString(time).utf8().data());
177
178     // Avoid useless seeking.
179     MediaTime current = currentMediaTime();
180     if (time == current) {
181         if (!m_seeking)
182             timeChanged();
183         return;
184     }
185
186     if (isLiveStream())
187         return;
188
189     if (m_seeking && m_seekIsPending) {
190         m_seekTime = time;
191         return;
192     }
193
194     GST_DEBUG("Seeking from %s to %s seconds", toString(current).utf8().data(), toString(time).utf8().data());
195
196     MediaTime previousSeekTime = m_seekTime;
197     m_seekTime = time;
198
199     if (!doSeek()) {
200         m_seekTime = previousSeekTime;
201         GST_WARNING("Seeking to %s failed", toString(time).utf8().data());
202         return;
203     }
204
205     m_isEndReached = false;
206     GST_DEBUG("m_seeking=%s, m_seekTime=%s", boolForPrinting(m_seeking), toString(m_seekTime).utf8().data());
207 }
208
209 void MediaPlayerPrivateGStreamerMSE::configurePlaySink()
210 {
211     MediaPlayerPrivateGStreamer::configurePlaySink();
212
213     GRefPtr<GstElement> playsink = adoptGRef(gst_bin_get_by_name(GST_BIN(m_pipeline.get()), "playsink"));
214     if (playsink) {
215         // The default value (0) means "send events to all the sinks", instead
216         // of "only to the first that returns true". This is needed for MSE seek.
217         g_object_set(G_OBJECT(playsink.get()), "send-event-mode", 0, nullptr);
218     }
219 }
220
221 bool MediaPlayerPrivateGStreamerMSE::changePipelineState(GstState newState)
222 {
223     if (seeking()) {
224         GST_DEBUG("Rejected state change to %s while seeking",
225             gst_element_state_get_name(newState));
226         return true;
227     }
228
229     return MediaPlayerPrivateGStreamer::changePipelineState(newState);
230 }
231
232 void MediaPlayerPrivateGStreamerMSE::notifySeekNeedsDataForTime(const MediaTime& seekTime)
233 {
234     // Reenqueue samples needed to resume playback in the new position.
235     m_mediaSource->seekToTime(seekTime);
236
237     GST_DEBUG("MSE seek to %s finished", toString(seekTime).utf8().data());
238
239     if (!m_gstSeekCompleted) {
240         m_gstSeekCompleted = true;
241         maybeFinishSeek();
242     }
243 }
244
245 bool MediaPlayerPrivateGStreamerMSE::doSeek(const MediaTime&, float, GstSeekFlags)
246 {
247     // Use doSeek() instead. If anybody is calling this version of doSeek(), something is wrong.
248     ASSERT_NOT_REACHED();
249     return false;
250 }
251
252 bool MediaPlayerPrivateGStreamerMSE::doSeek()
253 {
254     MediaTime seekTime = m_seekTime;
255     double rate = m_player->rate();
256     GstSeekFlags seekType = static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE);
257
258     // Always move to seeking state to report correct 'currentTime' while pending for actual seek to complete.
259     m_seeking = true;
260
261     // Check if playback pipeline is ready for seek.
262     GstState state, newState;
263     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0);
264     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
265         GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
266         webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
267         m_seeking = false;
268         return false;
269     }
270     if ((getStateResult == GST_STATE_CHANGE_ASYNC
271         && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED))
272         || state < GST_STATE_PAUSED
273         || m_isEndReached
274         || !m_gstSeekCompleted) {
275         CString reason = "Unknown reason";
276         if (getStateResult == GST_STATE_CHANGE_ASYNC) {
277             reason = String::format("In async change %s --> %s",
278                 gst_element_state_get_name(state),
279                 gst_element_state_get_name(newState)).utf8();
280         } else if (state < GST_STATE_PAUSED)
281             reason = "State less than PAUSED";
282         else if (m_isEndReached)
283             reason = "End reached";
284         else if (!m_gstSeekCompleted)
285             reason = "Previous seek is not finished yet";
286
287         GST_DEBUG("[Seek] Delaying the seek: %s", reason.data());
288
289         m_seekIsPending = true;
290
291         if (m_isEndReached) {
292             GST_DEBUG("[Seek] reset pipeline");
293             m_resetPipeline = true;
294             m_seeking = false;
295             if (!changePipelineState(GST_STATE_PAUSED))
296                 loadingFailed(MediaPlayer::Empty);
297             else
298                 m_seeking = true;
299         }
300
301         return m_seeking;
302     }
303
304     // Stop accepting new samples until actual seek is finished.
305     webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), false);
306
307     // Correct seek time if it helps to fix a small gap.
308     if (!isTimeBuffered(seekTime)) {
309         // Look if a near future time (<0.1 sec.) is buffered and change the seek target time.
310         if (m_mediaSource) {
311             const MediaTime miniGap = MediaTime(1, 10);
312             MediaTime nearest = m_mediaSource->buffered()->nearest(seekTime);
313             if (nearest.isValid() && nearest > seekTime && (nearest - seekTime) <= miniGap && isTimeBuffered(nearest + miniGap)) {
314                 GST_DEBUG("[Seek] Changed the seek target time from %s to %s, a near point in the future", toString(seekTime).utf8().data(), toString(nearest).utf8().data());
315                 seekTime = nearest;
316             }
317         }
318     }
319
320     // Check if MSE has samples for requested time and defer actual seek if needed.
321     if (!isTimeBuffered(seekTime)) {
322         GST_DEBUG("[Seek] Delaying the seek: MSE is not ready");
323         GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
324         if (setStateResult == GST_STATE_CHANGE_FAILURE) {
325             GST_DEBUG("[Seek] Cannot seek, failed to pause playback pipeline.");
326             webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
327             m_seeking = false;
328             return false;
329         }
330         m_readyState = MediaPlayer::HaveMetadata;
331         notifySeekNeedsDataForTime(seekTime);
332         ASSERT(!m_mseSeekCompleted);
333         return true;
334     }
335
336     // Complete previous MSE seek if needed.
337     if (!m_mseSeekCompleted) {
338         m_mediaSource->monitorSourceBuffers();
339         ASSERT(m_mseSeekCompleted);
340         // Note: seekCompleted will recursively call us.
341         return m_seeking;
342     }
343
344     GST_DEBUG("We can seek now");
345
346     MediaTime startTime = seekTime, endTime = MediaTime::invalidTime();
347
348     if (rate < 0) {
349         startTime = MediaTime::zeroTime();
350         endTime = seekTime;
351     }
352
353     if (!rate)
354         rate = 1;
355
356     GST_DEBUG("Actual seek to %s, end time:  %s, rate: %f", toString(startTime).utf8().data(), toString(endTime).utf8().data(), rate);
357
358     // This will call notifySeekNeedsData() after some time to tell that the pipeline is ready for sample enqueuing.
359     webKitMediaSrcPrepareSeek(WEBKIT_MEDIA_SRC(m_source.get()), seekTime);
360
361     m_gstSeekCompleted = false;
362     if (!gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType, GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime))) {
363         webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
364         m_seeking = false;
365         m_gstSeekCompleted = true;
366         GST_DEBUG("doSeek(): gst_element_seek() failed, returning false");
367         return false;
368     }
369
370     // The samples will be enqueued in notifySeekNeedsData().
371     GST_DEBUG("doSeek(): gst_element_seek() succeeded, returning true");
372     return true;
373 }
374
375 void MediaPlayerPrivateGStreamerMSE::maybeFinishSeek()
376 {
377     if (!m_seeking || !m_mseSeekCompleted || !m_gstSeekCompleted)
378         return;
379
380     GstState state, newState;
381     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0);
382
383     if (getStateResult == GST_STATE_CHANGE_ASYNC
384         && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED)) {
385         GST_DEBUG("[Seek] Delaying seek finish");
386         return;
387     }
388
389     if (m_seekIsPending) {
390         GST_DEBUG("[Seek] Committing pending seek to %s", toString(m_seekTime).utf8().data());
391         m_seekIsPending = false;
392         if (!doSeek()) {
393             GST_WARNING("[Seek] Seeking to %s failed", toString(m_seekTime).utf8().data());
394             m_cachedPosition = MediaTime::invalidTime();
395         }
396         return;
397     }
398
399     GST_DEBUG("[Seek] Seeked to %s", toString(m_seekTime).utf8().data());
400
401     webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
402     m_seeking = false;
403     m_cachedPosition = MediaTime::invalidTime();
404     // The pipeline can still have a pending state. In this case a position query will fail.
405     // Right now we can use m_seekTime as a fallback.
406     m_canFallBackToLastFinishedSeekPosition = true;
407     timeChanged();
408 }
409
410 void MediaPlayerPrivateGStreamerMSE::updatePlaybackRate()
411 {
412     notImplemented();
413 }
414
415 bool MediaPlayerPrivateGStreamerMSE::seeking() const
416 {
417     return m_seeking;
418 }
419
420 // FIXME: MediaPlayerPrivateGStreamer manages the ReadyState on its own. We shouldn't change it manually.
421 void MediaPlayerPrivateGStreamerMSE::setReadyState(MediaPlayer::ReadyState readyState)
422 {
423     if (readyState == m_readyState)
424         return;
425
426     if (seeking()) {
427         GST_DEBUG("Skip ready state change(%s -> %s) due to seek\n", dumpReadyState(m_readyState), dumpReadyState(readyState));
428         return;
429     }
430
431     GST_DEBUG("Ready State Changed manually from %u to %u", m_readyState, readyState);
432     MediaPlayer::ReadyState oldReadyState = m_readyState;
433     m_readyState = readyState;
434     GST_DEBUG("m_readyState: %s -> %s", dumpReadyState(oldReadyState), dumpReadyState(m_readyState));
435
436     if (oldReadyState < MediaPlayer::HaveCurrentData && m_readyState >= MediaPlayer::HaveCurrentData) {
437         GST_DEBUG("[Seek] Reporting load state changed to trigger seek continuation");
438         loadStateChanged();
439     }
440     m_player->readyStateChanged();
441
442     GstState pipelineState;
443     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &pipelineState, nullptr, 250 * GST_NSECOND);
444     bool isPlaying = (getStateResult == GST_STATE_CHANGE_SUCCESS && pipelineState == GST_STATE_PLAYING);
445
446     if (m_readyState == MediaPlayer::HaveMetadata && oldReadyState > MediaPlayer::HaveMetadata && isPlaying) {
447         GST_TRACE("Changing pipeline to PAUSED...");
448         bool ok = changePipelineState(GST_STATE_PAUSED);
449         GST_TRACE("Changed pipeline to PAUSED: %s", ok ? "Success" : "Error");
450     }
451 }
452
453 void MediaPlayerPrivateGStreamerMSE::waitForSeekCompleted()
454 {
455     if (!m_seeking)
456         return;
457
458     GST_DEBUG("Waiting for MSE seek completed");
459     m_mseSeekCompleted = false;
460 }
461
462 void MediaPlayerPrivateGStreamerMSE::seekCompleted()
463 {
464     if (m_mseSeekCompleted)
465         return;
466
467     GST_DEBUG("MSE seek completed");
468     m_mseSeekCompleted = true;
469
470     doSeek();
471
472     if (!seeking() && m_readyState >= MediaPlayer::HaveFutureData)
473         changePipelineState(GST_STATE_PLAYING);
474
475     if (!seeking())
476         m_player->timeChanged();
477 }
478
479 void MediaPlayerPrivateGStreamerMSE::setRate(float)
480 {
481     notImplemented();
482 }
483
484 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamerMSE::buffered() const
485 {
486     return m_mediaSource ? m_mediaSource->buffered() : std::make_unique<PlatformTimeRanges>();
487 }
488
489 void MediaPlayerPrivateGStreamerMSE::sourceSetup(GstElement* sourceElement)
490 {
491     m_source = sourceElement;
492
493     ASSERT(WEBKIT_IS_MEDIA_SRC(m_source.get()));
494
495     m_playbackPipeline->setWebKitMediaSrc(WEBKIT_MEDIA_SRC(m_source.get()));
496
497     MediaSourceGStreamer::open(*m_mediaSource.get(), *this);
498     g_signal_connect_swapped(m_source.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
499     g_signal_connect_swapped(m_source.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
500     g_signal_connect_swapped(m_source.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
501     webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), this);
502 }
503
504 void MediaPlayerPrivateGStreamerMSE::updateStates()
505 {
506     if (UNLIKELY(!m_pipeline || m_errorOccured))
507         return;
508
509     MediaPlayer::NetworkState oldNetworkState = m_networkState;
510     MediaPlayer::ReadyState oldReadyState = m_readyState;
511     GstState state, pending;
512
513     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
514
515     bool shouldUpdatePlaybackState = false;
516     switch (getStateResult) {
517     case GST_STATE_CHANGE_SUCCESS: {
518         GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
519
520         // Do nothing if on EOS and state changed to READY to avoid recreating the player
521         // on HTMLMediaElement and properly generate the video 'ended' event.
522         if (m_isEndReached && state == GST_STATE_READY)
523             break;
524
525         m_resetPipeline = (state <= GST_STATE_READY);
526         if (m_resetPipeline)
527             m_mediaTimeDuration = MediaTime::zeroTime();
528
529         // Update ready and network states.
530         switch (state) {
531         case GST_STATE_NULL:
532             m_readyState = MediaPlayer::HaveNothing;
533             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
534             m_networkState = MediaPlayer::Empty;
535             break;
536         case GST_STATE_READY:
537             m_readyState = MediaPlayer::HaveMetadata;
538             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
539             m_networkState = MediaPlayer::Empty;
540             break;
541         case GST_STATE_PAUSED:
542         case GST_STATE_PLAYING:
543             if (seeking()) {
544                 m_readyState = MediaPlayer::HaveMetadata;
545                 // FIXME: Should we manage NetworkState too?
546                 GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
547             } else if (m_buffering) {
548                 if (m_bufferingPercentage == 100) {
549                     GST_DEBUG("[Buffering] Complete.");
550                     m_buffering = false;
551                     m_readyState = MediaPlayer::HaveEnoughData;
552                     GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
553                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
554                 } else {
555                     m_readyState = MediaPlayer::HaveCurrentData;
556                     GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
557                     m_networkState = MediaPlayer::Loading;
558                 }
559             } else if (m_downloadFinished) {
560                 m_readyState = MediaPlayer::HaveEnoughData;
561                 GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
562                 m_networkState = MediaPlayer::Loaded;
563             } else {
564                 m_readyState = MediaPlayer::HaveFutureData;
565                 GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
566                 m_networkState = MediaPlayer::Loading;
567             }
568
569             if (m_eosMarked && state == GST_STATE_PLAYING)
570                 m_eosPending = true;
571
572             break;
573         default:
574             ASSERT_NOT_REACHED();
575             break;
576         }
577
578         // Sync states where needed.
579         if (state == GST_STATE_PAUSED) {
580             if (!m_volumeAndMuteInitialized) {
581                 notifyPlayerOfVolumeChange();
582                 notifyPlayerOfMute();
583                 m_volumeAndMuteInitialized = true;
584             }
585
586             if (!seeking() && !m_buffering && !m_paused && m_playbackRate) {
587                 GST_DEBUG("[Buffering] Restarting playback.");
588                 changePipelineState(GST_STATE_PLAYING);
589             }
590         } else if (state == GST_STATE_PLAYING) {
591             m_paused = false;
592
593             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
594                 GST_DEBUG("[Buffering] Pausing stream for buffering.");
595                 changePipelineState(GST_STATE_PAUSED);
596             }
597         } else
598             m_paused = true;
599
600         if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
601             shouldUpdatePlaybackState = true;
602             GST_DEBUG("Requested state change to %s was completed", gst_element_state_get_name(state));
603         }
604
605         break;
606     }
607     case GST_STATE_CHANGE_ASYNC:
608         GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
609         // Change in progress.
610         break;
611     case GST_STATE_CHANGE_FAILURE:
612         GST_WARNING("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
613         // Change failed.
614         return;
615     case GST_STATE_CHANGE_NO_PREROLL:
616         GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
617
618         // Live pipelines go in PAUSED without prerolling.
619         m_isStreaming = true;
620
621         if (state == GST_STATE_READY) {
622             m_readyState = MediaPlayer::HaveNothing;
623             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
624         } else if (state == GST_STATE_PAUSED) {
625             m_readyState = MediaPlayer::HaveEnoughData;
626             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
627             m_paused = true;
628         } else if (state == GST_STATE_PLAYING)
629             m_paused = false;
630
631         if (!m_paused && m_playbackRate)
632             changePipelineState(GST_STATE_PLAYING);
633
634         m_networkState = MediaPlayer::Loading;
635         break;
636     default:
637         GST_DEBUG("Else : %d", getStateResult);
638         break;
639     }
640
641     m_requestedState = GST_STATE_VOID_PENDING;
642
643     if (shouldUpdatePlaybackState)
644         m_player->playbackStateChanged();
645
646     if (m_networkState != oldNetworkState) {
647         GST_DEBUG("Network State Changed from %u to %u", oldNetworkState, m_networkState);
648         m_player->networkStateChanged();
649     }
650     if (m_readyState != oldReadyState) {
651         GST_DEBUG("Ready State Changed from %u to %u", oldReadyState, m_readyState);
652         m_player->readyStateChanged();
653     }
654
655     if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) {
656         updatePlaybackRate();
657         maybeFinishSeek();
658     }
659 }
660 void MediaPlayerPrivateGStreamerMSE::asyncStateChangeDone()
661 {
662     if (UNLIKELY(!m_pipeline || m_errorOccured))
663         return;
664
665     if (m_seeking)
666         maybeFinishSeek();
667     else
668         updateStates();
669 }
670
671 bool MediaPlayerPrivateGStreamerMSE::isTimeBuffered(const MediaTime &time) const
672 {
673     bool result = m_mediaSource && m_mediaSource->buffered()->contain(time);
674     GST_DEBUG("Time %s buffered? %s", toString(time).utf8().data(), boolForPrinting(result));
675     return result;
676 }
677
678 void MediaPlayerPrivateGStreamerMSE::setMediaSourceClient(Ref<MediaSourceClientGStreamerMSE> client)
679 {
680     m_mediaSourceClient = client.ptr();
681 }
682
683 RefPtr<MediaSourceClientGStreamerMSE> MediaPlayerPrivateGStreamerMSE::mediaSourceClient()
684 {
685     return m_mediaSourceClient;
686 }
687
688 void MediaPlayerPrivateGStreamerMSE::durationChanged()
689 {
690     if (!m_mediaSourceClient) {
691         GST_DEBUG("m_mediaSourceClient is null, doing nothing");
692         return;
693     }
694
695     MediaTime previousDuration = m_mediaTimeDuration;
696     m_mediaTimeDuration = m_mediaSourceClient->duration();
697
698     GST_TRACE("previous=%s, new=%s", toString(previousDuration).utf8().data(), toString(m_mediaTimeDuration).utf8().data());
699
700     // Avoid emiting durationchanged in the case where the previous duration was 0 because that case is already handled
701     // by the HTMLMediaElement.
702     if (m_mediaTimeDuration != previousDuration && m_mediaTimeDuration.isValid() && previousDuration.isValid()) {
703         m_player->durationChanged();
704         m_playbackPipeline->notifyDurationChanged();
705         m_mediaSource->durationChanged(m_mediaTimeDuration);
706     }
707 }
708
709 static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeCache()
710 {
711     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []()
712     {
713         initializeGStreamerAndRegisterWebKitMSEElement();
714         HashSet<String, ASCIICaseInsensitiveHash> set;
715         const char* mimeTypes[] = {
716             "video/mp4",
717             "audio/mp4",
718             "video/webm",
719             "audio/webm"
720         };
721         for (auto& type : mimeTypes)
722             set.add(type);
723         return set;
724     }();
725     return cache;
726 }
727
728 void MediaPlayerPrivateGStreamerMSE::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
729 {
730     types = mimeTypeCache();
731 }
732
733 void MediaPlayerPrivateGStreamerMSE::trackDetected(RefPtr<AppendPipeline> appendPipeline, RefPtr<WebCore::TrackPrivateBase> newTrack, bool firstTrackDetected)
734 {
735     ASSERT(appendPipeline->track() == newTrack);
736
737     GstCaps* caps = appendPipeline->appsinkCaps();
738     ASSERT(caps);
739     GST_DEBUG("track ID: %s, caps: %" GST_PTR_FORMAT, newTrack->id().string().latin1().data(), caps);
740
741     if (doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) {
742         std::optional<FloatSize> size = getVideoResolutionFromCaps(caps);
743         if (size.has_value())
744             m_videoSize = size.value();
745     }
746
747     if (firstTrackDetected)
748         m_playbackPipeline->attachTrack(appendPipeline->sourceBufferPrivate(), newTrack, caps);
749     else
750         m_playbackPipeline->reattachTrack(appendPipeline->sourceBufferPrivate(), newTrack, caps);
751 }
752
753 const static HashSet<AtomicString>& codecSet()
754 {
755     static NeverDestroyed<HashSet<AtomicString>> codecTypes = []()
756     {
757         MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements();
758         HashSet<AtomicString> set;
759
760         GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
761         GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
762
763         enum ElementType {
764             AudioDecoder = 0,
765             VideoDecoder
766         };
767         struct GstCapsWebKitMapping {
768             ElementType elementType;
769             const char* capsString;
770             Vector<AtomicString> webkitCodecs;
771         };
772
773         GstCapsWebKitMapping mapping[] = {
774             { VideoDecoder, "video/x-h264,  profile=(string){ constrained-baseline, baseline }", { "x-h264", "avc*" } },
775             { VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", { "mpeg" } },
776             { VideoDecoder, "video/x-vp8", { "vp8", "x-vp8" } },
777             { VideoDecoder, "video/x-vp9", { "vp9", "x-vp9" } },
778             { AudioDecoder, "audio/x-vorbis", { "vorbis", "x-vorbis" } },
779             { AudioDecoder, "audio/x-opus", { "opus", "x-opus" } }
780         };
781
782         for (auto& current : mapping) {
783             GList* factories = nullptr;
784             switch (current.elementType) {
785             case AudioDecoder:
786                 factories = audioDecoderFactories;
787                 break;
788             case VideoDecoder:
789                 factories = videoDecoderFactories;
790                 break;
791             default:
792                 g_assert_not_reached();
793                 break;
794             }
795
796             g_assert_nonnull(factories);
797
798             if (gstRegistryHasElementForMediaType(factories, current.capsString) && factories != nullptr) {
799                 if (!current.webkitCodecs.isEmpty()) {
800                     for (const auto& mimeType : current.webkitCodecs)
801                         set.add(mimeType);
802                 } else
803                     set.add(AtomicString(current.capsString));
804             }
805         }
806
807         bool audioMpegSupported = false;
808         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
809             audioMpegSupported = true;
810             set.add(AtomicString("audio/mp3"));
811         }
812
813         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
814             audioMpegSupported = true;
815             set.add(AtomicString("mp4a*"));
816         }
817
818         if (audioMpegSupported) {
819             set.add(AtomicString("audio/mpeg"));
820             set.add(AtomicString("audio/x-mpeg"));
821         }
822
823
824         gst_plugin_feature_list_free(audioDecoderFactories);
825         gst_plugin_feature_list_free(videoDecoderFactories);
826
827         return set;
828     }();
829     return codecTypes;
830 }
831
832 bool MediaPlayerPrivateGStreamerMSE::supportsCodec(String codec)
833 {
834     // If the codec is named like a mimetype (eg: video/avc) remove the "video/" part.
835     size_t slashIndex = codec.find('/');
836     if (slashIndex != WTF::notFound)
837         codec = codec.substring(slashIndex+1);
838
839     for (const auto& pattern : codecSet()) {
840         bool codecMatchesPattern = !fnmatch(pattern.string().utf8().data(), codec.utf8().data(), 0);
841         if (codecMatchesPattern)
842             return true;
843     }
844
845     return false;
846 }
847
848 bool MediaPlayerPrivateGStreamerMSE::supportsAllCodecs(const Vector<String>& codecs)
849 {
850     for (String codec : codecs) {
851         if (!supportsCodec(codec))
852             return false;
853     }
854
855     return true;
856 }
857
858 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerMSE::supportsType(const MediaEngineSupportParameters& parameters)
859 {
860     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
861     if (!parameters.isMediaSource)
862         return result;
863
864     auto containerType = parameters.type.containerType();
865
866     // YouTube TV provides empty types for some videos and we want to be selected as best media engine for them.
867     if (containerType.isEmpty()) {
868         result = MediaPlayer::MayBeSupported;
869         return result;
870     }
871
872     // Spec says we should not return "probably" if the codecs string is empty.
873     if (mimeTypeCache().contains(containerType)) {
874         Vector<String> codecs = parameters.type.codecs();
875         if (codecs.isEmpty())
876             result = MediaPlayer::MayBeSupported;
877         else
878             result = supportsAllCodecs(codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
879     }
880
881     return extendedSupportsType(parameters, result);
882 }
883
884 void MediaPlayerPrivateGStreamerMSE::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus status)
885 {
886     if (status != MediaSourcePrivate::EosNoError)
887         return;
888
889     GST_DEBUG("Marking end of stream");
890     m_eosMarked = true;
891     updateStates();
892 }
893
894 MediaTime MediaPlayerPrivateGStreamerMSE::currentMediaTime() const
895 {
896     MediaTime position = MediaPlayerPrivateGStreamer::currentMediaTime();
897
898     if (m_eosPending && (paused() || (position >= durationMediaTime()))) {
899         if (m_networkState != MediaPlayer::Loaded) {
900             m_networkState = MediaPlayer::Loaded;
901             m_player->networkStateChanged();
902         }
903
904         m_eosPending = false;
905         m_isEndReached = true;
906         m_cachedPosition = m_mediaTimeDuration;
907         m_durationAtEOS = m_mediaTimeDuration;
908         m_player->timeChanged();
909     }
910     return position;
911 }
912
913 MediaTime MediaPlayerPrivateGStreamerMSE::maxMediaTimeSeekable() const
914 {
915     if (UNLIKELY(m_errorOccured))
916         return MediaTime::zeroTime();
917
918     GST_DEBUG("maxMediaTimeSeekable");
919     MediaTime result = durationMediaTime();
920     // Infinite duration means live stream.
921     if (result.isPositiveInfinite()) {
922         MediaTime maxBufferedTime = buffered()->maximumBufferedTime();
923         // Return the highest end time reported by the buffered attribute.
924         result = maxBufferedTime.isValid() ? maxBufferedTime : MediaTime::zeroTime();
925     }
926
927     return result;
928 }
929
930 #if ENABLE(ENCRYPTED_MEDIA)
931 void MediaPlayerPrivateGStreamerMSE::attemptToDecryptWithInstance(CDMInstance& instance)
932 {
933     if (is<CDMInstanceClearKey>(instance)) {
934         auto& ckInstance = downcast<CDMInstanceClearKey>(instance);
935         if (ckInstance.keys().isEmpty())
936             return;
937
938         GValue keyIDList = G_VALUE_INIT, keyValueList = G_VALUE_INIT;
939         g_value_init(&keyIDList, GST_TYPE_LIST);
940         g_value_init(&keyValueList, GST_TYPE_LIST);
941
942         auto appendBuffer =
943             [](GValue* valueList, const SharedBuffer& buffer)
944             {
945                 GValue* bufferValue = g_new0(GValue, 1);
946                 g_value_init(bufferValue, GST_TYPE_BUFFER);
947                 gst_value_take_buffer(bufferValue,
948                     gst_buffer_new_wrapped(g_memdup(buffer.data(), buffer.size()), buffer.size()));
949                 gst_value_list_append_and_take_value(valueList, bufferValue);
950             };
951
952         for (auto& key : ckInstance.keys()) {
953             appendBuffer(&keyIDList, *key.keyIDData);
954             appendBuffer(&keyValueList, *key.keyValueData);
955         }
956
957         GUniquePtr<GstStructure> structure(gst_structure_new_empty("drm-cipher-clearkey"));
958         gst_structure_set_value(structure.get(), "key-ids", &keyIDList);
959         gst_structure_set_value(structure.get(), "key-values", &keyValueList);
960
961         for (auto it : m_appendPipelinesMap)
962             it.value->dispatchDecryptionStructure(GUniquePtr<GstStructure>(gst_structure_copy(structure.get())));
963     }
964 }
965 #endif
966
967 } // namespace WebCore.
968
969 #endif // USE(GSTREAMER)