[GStreamer][MiniBrowser] Honor GStreamer command line parameters in MiniBrowser
[WebKit-https.git] / Source / WebCore / platform / graphics / gstreamer / mse / MediaPlayerPrivateGStreamerMSE.cpp
1 /*
2  * Copyright (C) 2007, 2009 Apple Inc.  All rights reserved.
3  * Copyright (C) 2007 Collabora Ltd.  All rights reserved.
4  * Copyright (C) 2007 Alp Toker <alp@atoker.com>
5  * Copyright (C) 2009 Gustavo Noronha Silva <gns@gnome.org>
6  * Copyright (C) 2009, 2010, 2011, 2012, 2013, 2016, 2017 Igalia S.L
7  * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com>
8  * Copyright (C) 2015, 2016, 2017 Metrological Group B.V.
9  *
10  * This library is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Library General Public
12  * License as published by the Free Software Foundation; either
13  * version 2 of the License, or (at your option) any later version.
14  *
15  * This library is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Library General Public License for more details.
19  *
20  * You should have received a copy of the GNU Library General Public License
21  * aint with this library; see the file COPYING.LIB.  If not, write to
22  * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
23  * Boston, MA 02110-1301, USA.
24  */
25
26 #include "config.h"
27 #include "MediaPlayerPrivateGStreamerMSE.h"
28
29 #if ENABLE(VIDEO) && USE(GSTREAMER) && ENABLE(MEDIA_SOURCE)
30
31 #include "AppendPipeline.h"
32 #include "AudioTrackPrivateGStreamer.h"
33 #include "GStreamerUtilities.h"
34 #include "InbandTextTrackPrivateGStreamer.h"
35 #include "MIMETypeRegistry.h"
36 #include "MediaDescription.h"
37 #include "MediaPlayer.h"
38 #include "NotImplemented.h"
39 #include "SourceBufferPrivateGStreamer.h"
40 #include "TimeRanges.h"
41 #include "URL.h"
42 #include "VideoTrackPrivateGStreamer.h"
43
44 #include <fnmatch.h>
45 #include <gst/app/gstappsink.h>
46 #include <gst/app/gstappsrc.h>
47 #include <gst/gst.h>
48 #include <gst/pbutils/pbutils.h>
49 #include <gst/video/video.h>
50 #include <wtf/Condition.h>
51 #include <wtf/HashSet.h>
52 #include <wtf/NeverDestroyed.h>
53 #include <wtf/StringPrintStream.h>
54 #include <wtf/text/AtomicString.h>
55 #include <wtf/text/AtomicStringHash.h>
56
57 #if ENABLE(ENCRYPTED_MEDIA)
58 #include "CDMClearKey.h"
59 #include "SharedBuffer.h"
60 #endif
61
62 static const char* dumpReadyState(WebCore::MediaPlayer::ReadyState readyState)
63 {
64     switch (readyState) {
65     case WebCore::MediaPlayer::HaveNothing: return "HaveNothing";
66     case WebCore::MediaPlayer::HaveMetadata: return "HaveMetadata";
67     case WebCore::MediaPlayer::HaveCurrentData: return "HaveCurrentData";
68     case WebCore::MediaPlayer::HaveFutureData: return "HaveFutureData";
69     case WebCore::MediaPlayer::HaveEnoughData: return "HaveEnoughData";
70     default: return "(unknown)";
71     }
72 }
73
74 GST_DEBUG_CATEGORY(webkit_mse_debug);
75 #define GST_CAT_DEFAULT webkit_mse_debug
76
77 namespace WebCore {
78
79 void MediaPlayerPrivateGStreamerMSE::registerMediaEngine(MediaEngineRegistrar registrar)
80 {
81     if (isAvailable()) {
82         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateGStreamerMSE>(player); },
83             getSupportedTypes, supportsType, nullptr, nullptr, nullptr, supportsKeySystem);
84     }
85 }
86
87 bool initializeGStreamerAndRegisterWebKitMSEElement()
88 {
89     registerWebKitGStreamerElements();
90
91     GST_DEBUG_CATEGORY_INIT(webkit_mse_debug, "webkitmse", 0, "WebKit MSE media player");
92
93     GRefPtr<GstElementFactory> WebKitMediaSrcFactory = adoptGRef(gst_element_factory_find("webkitmediasrc"));
94     if (UNLIKELY(!WebKitMediaSrcFactory))
95         gst_element_register(nullptr, "webkitmediasrc", GST_RANK_PRIMARY + 100, WEBKIT_TYPE_MEDIA_SRC);
96     return true;
97 }
98
99 bool MediaPlayerPrivateGStreamerMSE::isAvailable()
100 {
101     if (UNLIKELY(!initializeGStreamerAndRegisterWebKitMSEElement()))
102         return false;
103
104     GRefPtr<GstElementFactory> factory = adoptGRef(gst_element_factory_find("playbin"));
105     return factory;
106 }
107
108 MediaPlayerPrivateGStreamerMSE::MediaPlayerPrivateGStreamerMSE(MediaPlayer* player)
109     : MediaPlayerPrivateGStreamer(player)
110 {
111     GST_TRACE("creating the player (%p)", this);
112 }
113
114 MediaPlayerPrivateGStreamerMSE::~MediaPlayerPrivateGStreamerMSE()
115 {
116     GST_TRACE("destroying the player (%p)", this);
117
118     for (auto iterator : m_appendPipelinesMap)
119         iterator.value->clearPlayerPrivate();
120
121     if (m_source) {
122         webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), nullptr);
123         g_signal_handlers_disconnect_by_data(m_source.get(), this);
124     }
125
126     if (m_playbackPipeline)
127         m_playbackPipeline->setWebKitMediaSrc(nullptr);
128 }
129
130 void MediaPlayerPrivateGStreamerMSE::load(const String& urlString)
131 {
132     if (!urlString.startsWith("mediasource")) {
133         // Properly fail so the global MediaPlayer tries to fallback to the next MediaPlayerPrivate.
134         m_networkState = MediaPlayer::FormatError;
135         m_player->networkStateChanged();
136         return;
137     }
138
139     if (UNLIKELY(!initializeGStreamerAndRegisterWebKitMSEElement()))
140         return;
141
142     if (!m_playbackPipeline)
143         m_playbackPipeline = PlaybackPipeline::create();
144
145     MediaPlayerPrivateGStreamer::load(urlString);
146 }
147
148 void MediaPlayerPrivateGStreamerMSE::load(const String& url, MediaSourcePrivateClient* mediaSource)
149 {
150     m_mediaSource = mediaSource;
151     load(String::format("mediasource%s", url.utf8().data()));
152 }
153
154 void MediaPlayerPrivateGStreamerMSE::pause()
155 {
156     m_paused = true;
157     MediaPlayerPrivateGStreamer::pause();
158 }
159
160 MediaTime MediaPlayerPrivateGStreamerMSE::durationMediaTime() const
161 {
162     if (UNLIKELY(!m_pipeline || m_errorOccured))
163         return MediaTime();
164
165     return m_mediaTimeDuration;
166 }
167
168 void MediaPlayerPrivateGStreamerMSE::seek(const MediaTime& time)
169 {
170     if (UNLIKELY(!m_pipeline || m_errorOccured))
171         return;
172
173     GST_INFO("[Seek] seek attempt to %s secs", toString(time).utf8().data());
174
175     // Avoid useless seeking.
176     MediaTime current = currentMediaTime();
177     if (time == current) {
178         if (!m_seeking)
179             timeChanged();
180         return;
181     }
182
183     if (isLiveStream())
184         return;
185
186     if (m_seeking && m_seekIsPending) {
187         m_seekTime = time;
188         return;
189     }
190
191     GST_DEBUG("Seeking from %s to %s seconds", toString(current).utf8().data(), toString(time).utf8().data());
192
193     MediaTime previousSeekTime = m_seekTime;
194     m_seekTime = time;
195
196     if (!doSeek()) {
197         m_seekTime = previousSeekTime;
198         GST_WARNING("Seeking to %s failed", toString(time).utf8().data());
199         return;
200     }
201
202     m_isEndReached = false;
203     GST_DEBUG("m_seeking=%s, m_seekTime=%s", boolForPrinting(m_seeking), toString(m_seekTime).utf8().data());
204 }
205
206 void MediaPlayerPrivateGStreamerMSE::configurePlaySink()
207 {
208     MediaPlayerPrivateGStreamer::configurePlaySink();
209
210     GRefPtr<GstElement> playsink = adoptGRef(gst_bin_get_by_name(GST_BIN(m_pipeline.get()), "playsink"));
211     if (playsink) {
212         // The default value (0) means "send events to all the sinks", instead
213         // of "only to the first that returns true". This is needed for MSE seek.
214         g_object_set(G_OBJECT(playsink.get()), "send-event-mode", 0, nullptr);
215     }
216 }
217
218 bool MediaPlayerPrivateGStreamerMSE::changePipelineState(GstState newState)
219 {
220     if (seeking()) {
221         GST_DEBUG("Rejected state change to %s while seeking",
222             gst_element_state_get_name(newState));
223         return true;
224     }
225
226     return MediaPlayerPrivateGStreamer::changePipelineState(newState);
227 }
228
229 void MediaPlayerPrivateGStreamerMSE::notifySeekNeedsDataForTime(const MediaTime& seekTime)
230 {
231     // Reenqueue samples needed to resume playback in the new position.
232     m_mediaSource->seekToTime(seekTime);
233
234     GST_DEBUG("MSE seek to %s finished", toString(seekTime).utf8().data());
235
236     if (!m_gstSeekCompleted) {
237         m_gstSeekCompleted = true;
238         maybeFinishSeek();
239     }
240 }
241
242 bool MediaPlayerPrivateGStreamerMSE::doSeek(const MediaTime&, float, GstSeekFlags)
243 {
244     // Use doSeek() instead. If anybody is calling this version of doSeek(), something is wrong.
245     ASSERT_NOT_REACHED();
246     return false;
247 }
248
249 bool MediaPlayerPrivateGStreamerMSE::doSeek()
250 {
251     MediaTime seekTime = m_seekTime;
252     double rate = m_player->rate();
253     GstSeekFlags seekType = static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE);
254
255     // Always move to seeking state to report correct 'currentTime' while pending for actual seek to complete.
256     m_seeking = true;
257
258     // Check if playback pipeline is ready for seek.
259     GstState state, newState;
260     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0);
261     if (getStateResult == GST_STATE_CHANGE_FAILURE || getStateResult == GST_STATE_CHANGE_NO_PREROLL) {
262         GST_DEBUG("[Seek] cannot seek, current state change is %s", gst_element_state_change_return_get_name(getStateResult));
263         webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
264         m_seeking = false;
265         return false;
266     }
267     if ((getStateResult == GST_STATE_CHANGE_ASYNC
268         && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED))
269         || state < GST_STATE_PAUSED
270         || m_isEndReached
271         || !m_gstSeekCompleted) {
272         CString reason = "Unknown reason";
273         if (getStateResult == GST_STATE_CHANGE_ASYNC) {
274             reason = String::format("In async change %s --> %s",
275                 gst_element_state_get_name(state),
276                 gst_element_state_get_name(newState)).utf8();
277         } else if (state < GST_STATE_PAUSED)
278             reason = "State less than PAUSED";
279         else if (m_isEndReached)
280             reason = "End reached";
281         else if (!m_gstSeekCompleted)
282             reason = "Previous seek is not finished yet";
283
284         GST_DEBUG("[Seek] Delaying the seek: %s", reason.data());
285
286         m_seekIsPending = true;
287
288         if (m_isEndReached) {
289             GST_DEBUG("[Seek] reset pipeline");
290             m_resetPipeline = true;
291             m_seeking = false;
292             if (!changePipelineState(GST_STATE_PAUSED))
293                 loadingFailed(MediaPlayer::Empty);
294             else
295                 m_seeking = true;
296         }
297
298         return m_seeking;
299     }
300
301     // Stop accepting new samples until actual seek is finished.
302     webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), false);
303
304     // Correct seek time if it helps to fix a small gap.
305     if (!isTimeBuffered(seekTime)) {
306         // Look if a near future time (<0.1 sec.) is buffered and change the seek target time.
307         if (m_mediaSource) {
308             const MediaTime miniGap = MediaTime(1, 10);
309             MediaTime nearest = m_mediaSource->buffered()->nearest(seekTime);
310             if (nearest.isValid() && nearest > seekTime && (nearest - seekTime) <= miniGap && isTimeBuffered(nearest + miniGap)) {
311                 GST_DEBUG("[Seek] Changed the seek target time from %s to %s, a near point in the future", toString(seekTime).utf8().data(), toString(nearest).utf8().data());
312                 seekTime = nearest;
313             }
314         }
315     }
316
317     // Check if MSE has samples for requested time and defer actual seek if needed.
318     if (!isTimeBuffered(seekTime)) {
319         GST_DEBUG("[Seek] Delaying the seek: MSE is not ready");
320         GstStateChangeReturn setStateResult = gst_element_set_state(m_pipeline.get(), GST_STATE_PAUSED);
321         if (setStateResult == GST_STATE_CHANGE_FAILURE) {
322             GST_DEBUG("[Seek] Cannot seek, failed to pause playback pipeline.");
323             webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
324             m_seeking = false;
325             return false;
326         }
327         m_readyState = MediaPlayer::HaveMetadata;
328         notifySeekNeedsDataForTime(seekTime);
329         ASSERT(!m_mseSeekCompleted);
330         return true;
331     }
332
333     // Complete previous MSE seek if needed.
334     if (!m_mseSeekCompleted) {
335         m_mediaSource->monitorSourceBuffers();
336         ASSERT(m_mseSeekCompleted);
337         // Note: seekCompleted will recursively call us.
338         return m_seeking;
339     }
340
341     GST_DEBUG("We can seek now");
342
343     MediaTime startTime = seekTime, endTime = MediaTime::invalidTime();
344
345     if (rate < 0) {
346         startTime = MediaTime::zeroTime();
347         endTime = seekTime;
348     }
349
350     if (!rate)
351         rate = 1;
352
353     GST_DEBUG("Actual seek to %s, end time:  %s, rate: %f", toString(startTime).utf8().data(), toString(endTime).utf8().data(), rate);
354
355     // This will call notifySeekNeedsData() after some time to tell that the pipeline is ready for sample enqueuing.
356     webKitMediaSrcPrepareSeek(WEBKIT_MEDIA_SRC(m_source.get()), seekTime);
357
358     m_gstSeekCompleted = false;
359     if (!gst_element_seek(m_pipeline.get(), rate, GST_FORMAT_TIME, seekType, GST_SEEK_TYPE_SET, toGstClockTime(startTime), GST_SEEK_TYPE_SET, toGstClockTime(endTime))) {
360         webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
361         m_seeking = false;
362         m_gstSeekCompleted = true;
363         GST_DEBUG("doSeek(): gst_element_seek() failed, returning false");
364         return false;
365     }
366
367     // The samples will be enqueued in notifySeekNeedsData().
368     GST_DEBUG("doSeek(): gst_element_seek() succeeded, returning true");
369     return true;
370 }
371
372 void MediaPlayerPrivateGStreamerMSE::maybeFinishSeek()
373 {
374     if (!m_seeking || !m_mseSeekCompleted || !m_gstSeekCompleted)
375         return;
376
377     GstState state, newState;
378     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &newState, 0);
379
380     if (getStateResult == GST_STATE_CHANGE_ASYNC
381         && !(state == GST_STATE_PLAYING && newState == GST_STATE_PAUSED)) {
382         GST_DEBUG("[Seek] Delaying seek finish");
383         return;
384     }
385
386     if (m_seekIsPending) {
387         GST_DEBUG("[Seek] Committing pending seek to %s", toString(m_seekTime).utf8().data());
388         m_seekIsPending = false;
389         if (!doSeek()) {
390             GST_WARNING("[Seek] Seeking to %s failed", toString(m_seekTime).utf8().data());
391             m_cachedPosition = MediaTime::invalidTime();
392         }
393         return;
394     }
395
396     GST_DEBUG("[Seek] Seeked to %s", toString(m_seekTime).utf8().data());
397
398     webKitMediaSrcSetReadyForSamples(WEBKIT_MEDIA_SRC(m_source.get()), true);
399     m_seeking = false;
400     m_cachedPosition = MediaTime::invalidTime();
401     // The pipeline can still have a pending state. In this case a position query will fail.
402     // Right now we can use m_seekTime as a fallback.
403     m_canFallBackToLastFinishedSeekPosition = true;
404     timeChanged();
405 }
406
407 void MediaPlayerPrivateGStreamerMSE::updatePlaybackRate()
408 {
409     notImplemented();
410 }
411
412 bool MediaPlayerPrivateGStreamerMSE::seeking() const
413 {
414     return m_seeking;
415 }
416
417 // FIXME: MediaPlayerPrivateGStreamer manages the ReadyState on its own. We shouldn't change it manually.
418 void MediaPlayerPrivateGStreamerMSE::setReadyState(MediaPlayer::ReadyState readyState)
419 {
420     if (readyState == m_readyState)
421         return;
422
423     if (seeking()) {
424         GST_DEBUG("Skip ready state change(%s -> %s) due to seek\n", dumpReadyState(m_readyState), dumpReadyState(readyState));
425         return;
426     }
427
428     GST_DEBUG("Ready State Changed manually from %u to %u", m_readyState, readyState);
429     MediaPlayer::ReadyState oldReadyState = m_readyState;
430     m_readyState = readyState;
431     GST_DEBUG("m_readyState: %s -> %s", dumpReadyState(oldReadyState), dumpReadyState(m_readyState));
432
433     if (oldReadyState < MediaPlayer::HaveCurrentData && m_readyState >= MediaPlayer::HaveCurrentData) {
434         GST_DEBUG("[Seek] Reporting load state changed to trigger seek continuation");
435         loadStateChanged();
436     }
437     m_player->readyStateChanged();
438
439     GstState pipelineState;
440     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &pipelineState, nullptr, 250 * GST_NSECOND);
441     bool isPlaying = (getStateResult == GST_STATE_CHANGE_SUCCESS && pipelineState == GST_STATE_PLAYING);
442
443     if (m_readyState == MediaPlayer::HaveMetadata && oldReadyState > MediaPlayer::HaveMetadata && isPlaying) {
444         GST_TRACE("Changing pipeline to PAUSED...");
445         bool ok = changePipelineState(GST_STATE_PAUSED);
446         GST_TRACE("Changed pipeline to PAUSED: %s", ok ? "Success" : "Error");
447     }
448 }
449
450 void MediaPlayerPrivateGStreamerMSE::waitForSeekCompleted()
451 {
452     if (!m_seeking)
453         return;
454
455     GST_DEBUG("Waiting for MSE seek completed");
456     m_mseSeekCompleted = false;
457 }
458
459 void MediaPlayerPrivateGStreamerMSE::seekCompleted()
460 {
461     if (m_mseSeekCompleted)
462         return;
463
464     GST_DEBUG("MSE seek completed");
465     m_mseSeekCompleted = true;
466
467     doSeek();
468
469     if (!seeking() && m_readyState >= MediaPlayer::HaveFutureData)
470         changePipelineState(GST_STATE_PLAYING);
471
472     if (!seeking())
473         m_player->timeChanged();
474 }
475
476 void MediaPlayerPrivateGStreamerMSE::setRate(float)
477 {
478     notImplemented();
479 }
480
481 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateGStreamerMSE::buffered() const
482 {
483     return m_mediaSource ? m_mediaSource->buffered() : std::make_unique<PlatformTimeRanges>();
484 }
485
486 void MediaPlayerPrivateGStreamerMSE::sourceSetup(GstElement* sourceElement)
487 {
488     m_source = sourceElement;
489
490     ASSERT(WEBKIT_IS_MEDIA_SRC(m_source.get()));
491
492     m_playbackPipeline->setWebKitMediaSrc(WEBKIT_MEDIA_SRC(m_source.get()));
493
494     MediaSourceGStreamer::open(*m_mediaSource.get(), *this);
495     g_signal_connect_swapped(m_source.get(), "video-changed", G_CALLBACK(videoChangedCallback), this);
496     g_signal_connect_swapped(m_source.get(), "audio-changed", G_CALLBACK(audioChangedCallback), this);
497     g_signal_connect_swapped(m_source.get(), "text-changed", G_CALLBACK(textChangedCallback), this);
498     webKitMediaSrcSetMediaPlayerPrivate(WEBKIT_MEDIA_SRC(m_source.get()), this);
499 }
500
501 void MediaPlayerPrivateGStreamerMSE::updateStates()
502 {
503     if (UNLIKELY(!m_pipeline || m_errorOccured))
504         return;
505
506     MediaPlayer::NetworkState oldNetworkState = m_networkState;
507     MediaPlayer::ReadyState oldReadyState = m_readyState;
508     GstState state, pending;
509
510     GstStateChangeReturn getStateResult = gst_element_get_state(m_pipeline.get(), &state, &pending, 250 * GST_NSECOND);
511
512     bool shouldUpdatePlaybackState = false;
513     switch (getStateResult) {
514     case GST_STATE_CHANGE_SUCCESS: {
515         GST_DEBUG("State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
516
517         // Do nothing if on EOS and state changed to READY to avoid recreating the player
518         // on HTMLMediaElement and properly generate the video 'ended' event.
519         if (m_isEndReached && state == GST_STATE_READY)
520             break;
521
522         m_resetPipeline = (state <= GST_STATE_READY);
523         if (m_resetPipeline)
524             m_mediaTimeDuration = MediaTime::zeroTime();
525
526         // Update ready and network states.
527         switch (state) {
528         case GST_STATE_NULL:
529             m_readyState = MediaPlayer::HaveNothing;
530             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
531             m_networkState = MediaPlayer::Empty;
532             break;
533         case GST_STATE_READY:
534             m_readyState = MediaPlayer::HaveMetadata;
535             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
536             m_networkState = MediaPlayer::Empty;
537             break;
538         case GST_STATE_PAUSED:
539         case GST_STATE_PLAYING:
540             if (seeking()) {
541                 m_readyState = MediaPlayer::HaveMetadata;
542                 // FIXME: Should we manage NetworkState too?
543                 GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
544             } else if (m_buffering) {
545                 if (m_bufferingPercentage == 100) {
546                     GST_DEBUG("[Buffering] Complete.");
547                     m_buffering = false;
548                     m_readyState = MediaPlayer::HaveEnoughData;
549                     GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
550                     m_networkState = m_downloadFinished ? MediaPlayer::Idle : MediaPlayer::Loading;
551                 } else {
552                     m_readyState = MediaPlayer::HaveCurrentData;
553                     GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
554                     m_networkState = MediaPlayer::Loading;
555                 }
556             } else if (m_downloadFinished) {
557                 m_readyState = MediaPlayer::HaveEnoughData;
558                 GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
559                 m_networkState = MediaPlayer::Loaded;
560             } else {
561                 m_readyState = MediaPlayer::HaveFutureData;
562                 GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
563                 m_networkState = MediaPlayer::Loading;
564             }
565
566             if (m_eosMarked && state == GST_STATE_PLAYING)
567                 m_eosPending = true;
568
569             break;
570         default:
571             ASSERT_NOT_REACHED();
572             break;
573         }
574
575         // Sync states where needed.
576         if (state == GST_STATE_PAUSED) {
577             if (!m_volumeAndMuteInitialized) {
578                 notifyPlayerOfVolumeChange();
579                 notifyPlayerOfMute();
580                 m_volumeAndMuteInitialized = true;
581             }
582
583             if (!seeking() && !m_buffering && !m_paused && m_playbackRate) {
584                 GST_DEBUG("[Buffering] Restarting playback.");
585                 changePipelineState(GST_STATE_PLAYING);
586             }
587         } else if (state == GST_STATE_PLAYING) {
588             m_paused = false;
589
590             if ((m_buffering && !isLiveStream()) || !m_playbackRate) {
591                 GST_DEBUG("[Buffering] Pausing stream for buffering.");
592                 changePipelineState(GST_STATE_PAUSED);
593             }
594         } else
595             m_paused = true;
596
597         if (m_requestedState == GST_STATE_PAUSED && state == GST_STATE_PAUSED) {
598             shouldUpdatePlaybackState = true;
599             GST_DEBUG("Requested state change to %s was completed", gst_element_state_get_name(state));
600         }
601
602         break;
603     }
604     case GST_STATE_CHANGE_ASYNC:
605         GST_DEBUG("Async: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
606         // Change in progress.
607         break;
608     case GST_STATE_CHANGE_FAILURE:
609         GST_WARNING("Failure: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
610         // Change failed.
611         return;
612     case GST_STATE_CHANGE_NO_PREROLL:
613         GST_DEBUG("No preroll: State: %s, pending: %s", gst_element_state_get_name(state), gst_element_state_get_name(pending));
614
615         // Live pipelines go in PAUSED without prerolling.
616         m_isStreaming = true;
617
618         if (state == GST_STATE_READY) {
619             m_readyState = MediaPlayer::HaveNothing;
620             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
621         } else if (state == GST_STATE_PAUSED) {
622             m_readyState = MediaPlayer::HaveEnoughData;
623             GST_DEBUG("m_readyState=%s", dumpReadyState(m_readyState));
624             m_paused = true;
625         } else if (state == GST_STATE_PLAYING)
626             m_paused = false;
627
628         if (!m_paused && m_playbackRate)
629             changePipelineState(GST_STATE_PLAYING);
630
631         m_networkState = MediaPlayer::Loading;
632         break;
633     default:
634         GST_DEBUG("Else : %d", getStateResult);
635         break;
636     }
637
638     m_requestedState = GST_STATE_VOID_PENDING;
639
640     if (shouldUpdatePlaybackState)
641         m_player->playbackStateChanged();
642
643     if (m_networkState != oldNetworkState) {
644         GST_DEBUG("Network State Changed from %u to %u", oldNetworkState, m_networkState);
645         m_player->networkStateChanged();
646     }
647     if (m_readyState != oldReadyState) {
648         GST_DEBUG("Ready State Changed from %u to %u", oldReadyState, m_readyState);
649         m_player->readyStateChanged();
650     }
651
652     if (getStateResult == GST_STATE_CHANGE_SUCCESS && state >= GST_STATE_PAUSED) {
653         updatePlaybackRate();
654         maybeFinishSeek();
655     }
656 }
657 void MediaPlayerPrivateGStreamerMSE::asyncStateChangeDone()
658 {
659     if (UNLIKELY(!m_pipeline || m_errorOccured))
660         return;
661
662     if (m_seeking)
663         maybeFinishSeek();
664     else
665         updateStates();
666 }
667
668 bool MediaPlayerPrivateGStreamerMSE::isTimeBuffered(const MediaTime &time) const
669 {
670     bool result = m_mediaSource && m_mediaSource->buffered()->contain(time);
671     GST_DEBUG("Time %s buffered? %s", toString(time).utf8().data(), boolForPrinting(result));
672     return result;
673 }
674
675 void MediaPlayerPrivateGStreamerMSE::setMediaSourceClient(Ref<MediaSourceClientGStreamerMSE> client)
676 {
677     m_mediaSourceClient = client.ptr();
678 }
679
680 RefPtr<MediaSourceClientGStreamerMSE> MediaPlayerPrivateGStreamerMSE::mediaSourceClient()
681 {
682     return m_mediaSourceClient;
683 }
684
685 void MediaPlayerPrivateGStreamerMSE::durationChanged()
686 {
687     if (!m_mediaSourceClient) {
688         GST_DEBUG("m_mediaSourceClient is null, doing nothing");
689         return;
690     }
691
692     MediaTime previousDuration = m_mediaTimeDuration;
693     m_mediaTimeDuration = m_mediaSourceClient->duration();
694
695     GST_TRACE("previous=%s, new=%s", toString(previousDuration).utf8().data(), toString(m_mediaTimeDuration).utf8().data());
696
697     // Avoid emiting durationchanged in the case where the previous duration was 0 because that case is already handled
698     // by the HTMLMediaElement.
699     if (m_mediaTimeDuration != previousDuration && m_mediaTimeDuration.isValid() && previousDuration.isValid()) {
700         m_player->durationChanged();
701         m_playbackPipeline->notifyDurationChanged();
702         m_mediaSource->durationChanged(m_mediaTimeDuration);
703     }
704 }
705
706 static HashSet<String, ASCIICaseInsensitiveHash>& mimeTypeCache()
707 {
708     static NeverDestroyed<HashSet<String, ASCIICaseInsensitiveHash>> cache = []()
709     {
710         initializeGStreamerAndRegisterWebKitMSEElement();
711         HashSet<String, ASCIICaseInsensitiveHash> set;
712         const char* mimeTypes[] = {
713             "video/mp4",
714             "audio/mp4",
715             "video/webm",
716             "audio/webm"
717         };
718         for (auto& type : mimeTypes)
719             set.add(type);
720         return set;
721     }();
722     return cache;
723 }
724
725 void MediaPlayerPrivateGStreamerMSE::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
726 {
727     types = mimeTypeCache();
728 }
729
730 void MediaPlayerPrivateGStreamerMSE::trackDetected(RefPtr<AppendPipeline> appendPipeline, RefPtr<WebCore::TrackPrivateBase> newTrack, bool firstTrackDetected)
731 {
732     ASSERT(appendPipeline->track() == newTrack);
733
734     GstCaps* caps = appendPipeline->appsinkCaps();
735     ASSERT(caps);
736     GST_DEBUG("track ID: %s, caps: %" GST_PTR_FORMAT, newTrack->id().string().latin1().data(), caps);
737
738     if (doCapsHaveType(caps, GST_VIDEO_CAPS_TYPE_PREFIX)) {
739         std::optional<FloatSize> size = getVideoResolutionFromCaps(caps);
740         if (size.has_value())
741             m_videoSize = size.value();
742     }
743
744     if (firstTrackDetected)
745         m_playbackPipeline->attachTrack(appendPipeline->sourceBufferPrivate(), newTrack, caps);
746     else
747         m_playbackPipeline->reattachTrack(appendPipeline->sourceBufferPrivate(), newTrack, caps);
748 }
749
750 const static HashSet<AtomicString>& codecSet()
751 {
752     static NeverDestroyed<HashSet<AtomicString>> codecTypes = []()
753     {
754         MediaPlayerPrivateGStreamerBase::initializeGStreamerAndRegisterWebKitElements();
755         HashSet<AtomicString> set;
756
757         GList* audioDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
758         GList* videoDecoderFactories = gst_element_factory_list_get_elements(GST_ELEMENT_FACTORY_TYPE_DECODER | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO, GST_RANK_MARGINAL);
759
760         enum ElementType {
761             AudioDecoder = 0,
762             VideoDecoder
763         };
764         struct GstCapsWebKitMapping {
765             ElementType elementType;
766             const char* capsString;
767             Vector<AtomicString> webkitCodecs;
768         };
769
770         GstCapsWebKitMapping mapping[] = {
771             { VideoDecoder, "video/x-h264,  profile=(string){ constrained-baseline, baseline }", { "x-h264", "avc*" } },
772             { VideoDecoder, "video/mpeg, mpegversion=(int){1,2}, systemstream=(boolean)false", { "mpeg" } },
773             { VideoDecoder, "video/x-vp8", { "vp8", "x-vp8" } },
774             { VideoDecoder, "video/x-vp9", { "vp9", "x-vp9" } },
775             { AudioDecoder, "audio/x-vorbis", { "vorbis", "x-vorbis" } },
776             { AudioDecoder, "audio/x-opus", { "opus", "x-opus" } }
777         };
778
779         for (auto& current : mapping) {
780             GList* factories = nullptr;
781             switch (current.elementType) {
782             case AudioDecoder:
783                 factories = audioDecoderFactories;
784                 break;
785             case VideoDecoder:
786                 factories = videoDecoderFactories;
787                 break;
788             default:
789                 g_assert_not_reached();
790                 break;
791             }
792
793             g_assert_nonnull(factories);
794
795             if (gstRegistryHasElementForMediaType(factories, current.capsString) && factories != nullptr) {
796                 if (!current.webkitCodecs.isEmpty()) {
797                     for (const auto& mimeType : current.webkitCodecs)
798                         set.add(mimeType);
799                 } else
800                     set.add(AtomicString(current.capsString));
801             }
802         }
803
804         bool audioMpegSupported = false;
805         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int)1, layer=(int)[1, 3]")) {
806             audioMpegSupported = true;
807             set.add(AtomicString("audio/mp3"));
808         }
809
810         if (gstRegistryHasElementForMediaType(audioDecoderFactories, "audio/mpeg, mpegversion=(int){2, 4}")) {
811             audioMpegSupported = true;
812             set.add(AtomicString("mp4a*"));
813         }
814
815         if (audioMpegSupported) {
816             set.add(AtomicString("audio/mpeg"));
817             set.add(AtomicString("audio/x-mpeg"));
818         }
819
820
821         gst_plugin_feature_list_free(audioDecoderFactories);
822         gst_plugin_feature_list_free(videoDecoderFactories);
823
824         return set;
825     }();
826     return codecTypes;
827 }
828
829 bool MediaPlayerPrivateGStreamerMSE::supportsCodec(String codec)
830 {
831     // If the codec is named like a mimetype (eg: video/avc) remove the "video/" part.
832     size_t slashIndex = codec.find('/');
833     if (slashIndex != WTF::notFound)
834         codec = codec.substring(slashIndex+1);
835
836     for (const auto& pattern : codecSet()) {
837         bool codecMatchesPattern = !fnmatch(pattern.string().utf8().data(), codec.utf8().data(), 0);
838         if (codecMatchesPattern)
839             return true;
840     }
841
842     return false;
843 }
844
845 bool MediaPlayerPrivateGStreamerMSE::supportsAllCodecs(const Vector<String>& codecs)
846 {
847     for (String codec : codecs) {
848         if (!supportsCodec(codec))
849             return false;
850     }
851
852     return true;
853 }
854
855 MediaPlayer::SupportsType MediaPlayerPrivateGStreamerMSE::supportsType(const MediaEngineSupportParameters& parameters)
856 {
857     MediaPlayer::SupportsType result = MediaPlayer::IsNotSupported;
858     if (!parameters.isMediaSource)
859         return result;
860
861     auto containerType = parameters.type.containerType();
862
863     // YouTube TV provides empty types for some videos and we want to be selected as best media engine for them.
864     if (containerType.isEmpty()) {
865         result = MediaPlayer::MayBeSupported;
866         return result;
867     }
868
869     // Spec says we should not return "probably" if the codecs string is empty.
870     if (mimeTypeCache().contains(containerType)) {
871         Vector<String> codecs = parameters.type.codecs();
872         if (codecs.isEmpty())
873             result = MediaPlayer::MayBeSupported;
874         else
875             result = supportsAllCodecs(codecs) ? MediaPlayer::IsSupported : MediaPlayer::IsNotSupported;
876     }
877
878     return extendedSupportsType(parameters, result);
879 }
880
881 void MediaPlayerPrivateGStreamerMSE::markEndOfStream(MediaSourcePrivate::EndOfStreamStatus status)
882 {
883     if (status != MediaSourcePrivate::EosNoError)
884         return;
885
886     GST_DEBUG("Marking end of stream");
887     m_eosMarked = true;
888     updateStates();
889 }
890
891 MediaTime MediaPlayerPrivateGStreamerMSE::currentMediaTime() const
892 {
893     MediaTime position = MediaPlayerPrivateGStreamer::currentMediaTime();
894
895     if (m_eosPending && (paused() || (position >= durationMediaTime()))) {
896         if (m_networkState != MediaPlayer::Loaded) {
897             m_networkState = MediaPlayer::Loaded;
898             m_player->networkStateChanged();
899         }
900
901         m_eosPending = false;
902         m_isEndReached = true;
903         m_cachedPosition = m_mediaTimeDuration;
904         m_durationAtEOS = m_mediaTimeDuration;
905         m_player->timeChanged();
906     }
907     return position;
908 }
909
910 MediaTime MediaPlayerPrivateGStreamerMSE::maxMediaTimeSeekable() const
911 {
912     if (UNLIKELY(m_errorOccured))
913         return MediaTime::zeroTime();
914
915     GST_DEBUG("maxMediaTimeSeekable");
916     MediaTime result = durationMediaTime();
917     // Infinite duration means live stream.
918     if (result.isPositiveInfinite()) {
919         MediaTime maxBufferedTime = buffered()->maximumBufferedTime();
920         // Return the highest end time reported by the buffered attribute.
921         result = maxBufferedTime.isValid() ? maxBufferedTime : MediaTime::zeroTime();
922     }
923
924     return result;
925 }
926
927 #if ENABLE(ENCRYPTED_MEDIA)
928 void MediaPlayerPrivateGStreamerMSE::attemptToDecryptWithInstance(CDMInstance& instance)
929 {
930     if (is<CDMInstanceClearKey>(instance)) {
931         auto& ckInstance = downcast<CDMInstanceClearKey>(instance);
932         if (ckInstance.keys().isEmpty())
933             return;
934
935         GValue keyIDList = G_VALUE_INIT, keyValueList = G_VALUE_INIT;
936         g_value_init(&keyIDList, GST_TYPE_LIST);
937         g_value_init(&keyValueList, GST_TYPE_LIST);
938
939         auto appendBuffer =
940             [](GValue* valueList, const SharedBuffer& buffer)
941             {
942                 GValue* bufferValue = g_new0(GValue, 1);
943                 g_value_init(bufferValue, GST_TYPE_BUFFER);
944                 gst_value_take_buffer(bufferValue,
945                     gst_buffer_new_wrapped(g_memdup(buffer.data(), buffer.size()), buffer.size()));
946                 gst_value_list_append_and_take_value(valueList, bufferValue);
947             };
948
949         for (auto& key : ckInstance.keys()) {
950             appendBuffer(&keyIDList, *key.keyIDData);
951             appendBuffer(&keyValueList, *key.keyValueData);
952         }
953
954         GUniquePtr<GstStructure> structure(gst_structure_new_empty("drm-cipher-clearkey"));
955         gst_structure_set_value(structure.get(), "key-ids", &keyIDList);
956         gst_structure_set_value(structure.get(), "key-values", &keyValueList);
957
958         for (auto it : m_appendPipelinesMap)
959             it.value->dispatchDecryptionStructure(GUniquePtr<GstStructure>(gst_structure_copy(structure.get())));
960     }
961 }
962 #endif
963
964 } // namespace WebCore.
965
966 #endif // USE(GSTREAMER)