a3a45121c0a77219a2132e6c9e7561ca00edc24b
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaSourceAVFObjC.mm
1 /*
2  * Copyright (C) 2013, 2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVFoundationMIMETypeCache.h"
32 #import "AVFoundationSPI.h"
33 #import "CDMSessionAVStreamSession.h"
34 #import "CDMSessionMediaSourceAVFObjC.h"
35 #import "FileSystem.h"
36 #import "Logging.h"
37 #import "MediaSourcePrivateAVFObjC.h"
38 #import "MediaSourcePrivateClient.h"
39 #import "MediaTimeAVFoundation.h"
40 #import "PlatformClockCM.h"
41 #import "WebCoreSystemInterface.h"
42 #import <AVFoundation/AVAsset.h>
43 #import <AVFoundation/AVTime.h>
44 #import <QuartzCore/CALayer.h>
45 #import <objc_runtime.h>
46 #import <wtf/Deque.h>
47 #import <wtf/MainThread.h>
48 #import <wtf/NeverDestroyed.h>
49
50 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
51 #import "VideoFullscreenLayerManager.h"
52 #endif
53
54 #pragma mark - Soft Linking
55
56 #import "CoreMediaSoftLink.h"
57
58 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
59
60 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
61 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
62 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
63 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
64 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
65 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
66 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamSession);
67 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
68
69 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
70 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
71
72 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
73 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
74
75 #pragma mark -
76 #pragma mark AVVideoPerformanceMetrics
77
78 @interface AVVideoPerformanceMetrics : NSObject
79 - (unsigned long)totalNumberOfVideoFrames;
80 - (unsigned long)numberOfDroppedVideoFrames;
81 - (unsigned long)numberOfCorruptedVideoFrames;
82 - (double)totalFrameDelay;
83 @end
84
85 @interface AVSampleBufferDisplayLayer (WebCoreAVSampleBufferDisplayLayerPrivate)
86 - (AVVideoPerformanceMetrics *)videoPerformanceMetrics;
87 @end
88
89 #pragma mark -
90 #pragma mark AVStreamSession
91
92 @interface AVStreamSession : NSObject
93 - (instancetype)initWithStorageDirectoryAtURL:(NSURL *)storageDirectory;
94 @end
95
96 namespace WebCore {
97
98 #pragma mark -
99 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
100
101 static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
102 {
103     MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)listener;
104     callOnMainThread([weakThis = player->createWeakPtr()] {
105         if (!weakThis)
106             return;
107         weakThis.get()->effectiveRateChanged();
108     });
109 }
110
111 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
112     : m_player(player)
113     , m_weakPtrFactory(this)
114     , m_sizeChangeObserverWeakPtrFactory(this)
115     , m_synchronizer(adoptNS([allocAVSampleBufferRenderSynchronizerInstance() init]))
116     , m_seekTimer(*this, &MediaPlayerPrivateMediaSourceAVFObjC::seekInternal)
117     , m_session(nullptr)
118     , m_networkState(MediaPlayer::Empty)
119     , m_readyState(MediaPlayer::HaveNothing)
120     , m_rate(1)
121     , m_playing(0)
122     , m_seeking(false)
123     , m_seekCompleted(true)
124     , m_loadingProgressed(false)
125 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
126     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
127 #endif
128 {
129     CMTimebaseRef timebase = [m_synchronizer timebase];
130     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
131     CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
132
133     // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
134     // an arbitrarily large time value of once an hour:
135     __block auto weakThis = createWeakPtr();
136     m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
137 #if LOG_DISABLED
138         UNUSED_PARAM(time);
139 #endif
140         // FIXME: Remove the below once <rdar://problem/15798050> is fixed.
141         if (!weakThis)
142             return;
143
144         if (m_seeking && !m_pendingSeek) {
145             LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::m_timeJumpedObserver(%p) - time(%s)", weakThis.get(), toString(toMediaTime(time)).utf8().data());
146             m_seeking = false;
147
148             if (shouldBePlaying())
149                 [m_synchronizer setRate:m_rate];
150             if (!seeking())
151                 m_player->timeChanged();
152         }
153
154         if (m_pendingSeek)
155             seekInternal();
156     }];
157 }
158
159 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
160 {
161     CMTimebaseRef timebase = [m_synchronizer timebase];
162     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
163     CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
164
165     if (m_timeJumpedObserver)
166         [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
167     if (m_durationObserver)
168         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
169     flushPendingSizeChanges();
170
171     m_seekTimer.stop();
172 }
173
174 #pragma mark -
175 #pragma mark MediaPlayer Factory Methods
176
177 void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
178 {
179     if (!isAvailable())
180         return;
181
182     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaSourceAVFObjC>(player); },
183         getSupportedTypes, supportsType, 0, 0, 0, 0);
184     AVFoundationMIMETypeCache::singleton().loadTypes();
185 }
186
187 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
188 {
189     return AVFoundationLibrary()
190         && isCoreMediaFrameworkAvailable()
191         && getAVStreamDataParserClass()
192         && getAVSampleBufferAudioRendererClass()
193         && getAVSampleBufferRenderSynchronizerClass()
194         && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:));
195 }
196
197 void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
198 {
199     types = AVFoundationMIMETypeCache::singleton().types();
200 }
201
202 MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
203 {
204     // This engine does not support non-media-source sources.
205     if (!parameters.isMediaSource)
206         return MediaPlayer::IsNotSupported;
207 #if ENABLE(MEDIA_STREAM)
208     if (parameters.isMediaStream)
209         return MediaPlayer::IsNotSupported;
210 #endif
211
212     if (parameters.type.isEmpty() || !AVFoundationMIMETypeCache::singleton().types().contains(parameters.type))
213         return MediaPlayer::IsNotSupported;
214
215     // The spec says:
216     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
217     if (parameters.codecs.isEmpty())
218         return MediaPlayer::MayBeSupported;
219
220     NSString *outputCodecs = parameters.codecs;
221     if ([getAVStreamDataParserClass() respondsToSelector:@selector(outputMIMECodecParameterForInputMIMECodecParameter:)])
222         outputCodecs = [getAVStreamDataParserClass() outputMIMECodecParameterForInputMIMECodecParameter:outputCodecs];
223
224     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)outputCodecs];
225     return [getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
226 }
227
228 #pragma mark -
229 #pragma mark MediaPlayerPrivateInterface Overrides
230
231 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&)
232 {
233     // This media engine only supports MediaSource URLs.
234     m_networkState = MediaPlayer::FormatError;
235     m_player->networkStateChanged();
236 }
237
238 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String& url, MediaSourcePrivateClient* client)
239 {
240     UNUSED_PARAM(url);
241
242     m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client);
243 }
244
245 #if ENABLE(MEDIA_STREAM)
246 void MediaPlayerPrivateMediaSourceAVFObjC::load(MediaStreamPrivate&)
247 {
248     setNetworkState(MediaPlayer::FormatError);
249 }
250 #endif
251
252 void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad()
253 {
254 }
255
256 void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay()
257 {
258 }
259
260 PlatformMedia MediaPlayerPrivateMediaSourceAVFObjC::platformMedia() const
261 {
262     PlatformMedia pm;
263     pm.type = PlatformMedia::AVFoundationAssetType;
264     pm.media.avfAsset = m_asset.get();
265     return pm;
266 }
267
268 PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const
269 {
270 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
271     return m_videoFullscreenLayerManager->videoInlineLayer();
272 #else
273     return m_sampleBufferDisplayLayer.get();
274 #endif
275 }
276
277 void MediaPlayerPrivateMediaSourceAVFObjC::play()
278 {
279     callOnMainThread([weakThis = createWeakPtr()] {
280         if (!weakThis)
281             return;
282         weakThis.get()->playInternal();
283     });
284 }
285
286 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
287 {
288     if (currentMediaTime() >= m_mediaSourcePrivate->duration())
289         return;
290
291     m_playing = true;
292     if (shouldBePlaying())
293         [m_synchronizer setRate:m_rate];
294 }
295
296 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
297 {
298     callOnMainThread([weakThis = createWeakPtr()] {
299         if (!weakThis)
300             return;
301         weakThis.get()->pauseInternal();
302     });
303 }
304
305 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
306 {
307     m_playing = false;
308     [m_synchronizer setRate:0];
309 }
310
311 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
312 {
313     return ![m_synchronizer rate];
314 }
315
316 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
317 {
318     for (auto pair : m_sampleBufferAudioRendererMap)
319         [pair.key setVolume:volume];
320 }
321
322 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
323 {
324     return true;
325 }
326
327 void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
328 {
329     for (auto pair : m_sampleBufferAudioRendererMap)
330         [pair.key setMuted:muted];
331 }
332
333 FloatSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
334 {
335     return m_naturalSize;
336 }
337
338 bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const
339 {
340     if (!m_mediaSourcePrivate)
341         return false;
342
343     return m_mediaSourcePrivate->hasVideo();
344 }
345
346 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const
347 {
348     if (!m_mediaSourcePrivate)
349         return false;
350
351     return m_mediaSourcePrivate->hasAudio();
352 }
353
354 void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool)
355 {
356     // No-op.
357 }
358
359 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
360 {
361     return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
362 }
363
364 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
365 {
366     MediaTime synchronizerTime = toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
367     if (synchronizerTime < MediaTime::zeroTime())
368         return MediaTime::zeroTime();
369     if (synchronizerTime < m_lastSeekTime)
370         return m_lastSeekTime;
371     return synchronizerTime;
372 }
373
374 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
375 {
376     return MediaTime::zeroTime();
377 }
378
379 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
380 {
381     return MediaTime::zeroTime();
382 }
383
384 void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
385 {
386     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(%p) - time(%s), negativeThreshold(%s), positiveThreshold(%s)", this, toString(time).utf8().data(), toString(negativeThreshold).utf8().data(), toString(positiveThreshold).utf8().data());
387     m_seeking = true;
388     auto weakThis = createWeakPtr();
389     m_pendingSeek = std::make_unique<PendingSeek>(time, negativeThreshold, positiveThreshold);
390
391     if (m_seekTimer.isActive())
392         m_seekTimer.stop();
393     m_seekTimer.startOneShot(0_s);
394 }
395
396 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
397 {
398     std::unique_ptr<PendingSeek> pendingSeek;
399     pendingSeek.swap(m_pendingSeek);
400
401     if (!pendingSeek)
402         return;
403
404     if (!m_mediaSourcePrivate)
405         return;
406
407     if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
408         m_lastSeekTime = pendingSeek->targetTime;
409     else
410         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
411
412     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekInternal(%p) - seekTime(%s)", this, toString(m_lastSeekTime).utf8().data());
413
414     MediaTime synchronizerTime = toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
415     bool doesNotRequireSeek = synchronizerTime == m_lastSeekTime;
416
417     m_mediaSourcePrivate->willSeek();
418     [m_synchronizer setRate:0 time:toCMTime(m_lastSeekTime)];
419     m_mediaSourcePrivate->seekToTime(m_lastSeekTime);
420
421     // In cases where the destination seek time precisely matches the synchronizer's existing time
422     // no time jumped notification will be issued. In this case, just notify the MediaPlayer that
423     // the seek completed successfully.
424     if (doesNotRequireSeek) {
425         m_seeking = false;
426
427         if (shouldBePlaying())
428             [m_synchronizer setRate:m_rate];
429         if (!seeking() && m_seekCompleted)
430             m_player->timeChanged();
431     }
432 }
433
434 void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted()
435 {
436     if (!m_seeking)
437         return;
438     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted(%p)", this);
439     m_seekCompleted = false;
440 }
441
442 void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted()
443 {
444     if (m_seekCompleted)
445         return;
446     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted(%p)", this);
447     m_seekCompleted = true;
448     if (shouldBePlaying())
449         [m_synchronizer setRate:m_rate];
450     if (!m_seeking)
451         m_player->timeChanged();
452 }
453
454 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
455 {
456     return m_seeking || !m_seekCompleted;
457 }
458
459 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
460 {
461     // AVSampleBufferRenderSynchronizer does not support negative rate yet.
462     m_rate = std::max<double>(rate, 0);
463     if (shouldBePlaying())
464         [m_synchronizer setRate:m_rate];
465 }
466
467 void MediaPlayerPrivateMediaSourceAVFObjC::setPreservesPitch(bool preservesPitch)
468 {
469     NSString *algorithm = preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed;
470     for (auto pair : m_sampleBufferAudioRendererMap)
471         [pair.key setAudioTimePitchAlgorithm:algorithm];
472 }
473
474 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
475 {
476     return m_networkState;
477 }
478
479 MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
480 {
481     return m_readyState;
482 }
483
484 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
485 {
486     return std::make_unique<PlatformTimeRanges>(minMediaTimeSeekable(), maxMediaTimeSeekable());
487 }
488
489 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
490 {
491     return durationMediaTime();
492 }
493
494 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
495 {
496     return startTime();
497 }
498
499 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
500 {
501     return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : std::make_unique<PlatformTimeRanges>();
502 }
503
504 bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const
505 {
506     bool loadingProgressed = m_loadingProgressed;
507     m_loadingProgressed = false;
508     return loadingProgressed;
509 }
510
511 void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&)
512 {
513     // No-op.
514 }
515
516 void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext&, const FloatRect&)
517 {
518     // FIXME(125157): Implement painting.
519 }
520
521 void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext&, const FloatRect&)
522 {
523     // FIXME(125157): Implement painting.
524 }
525
526 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const
527 {
528     return m_hasAvailableVideoFrame;
529 }
530
531 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const
532 {
533     return true;
534 }
535
536 void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged()
537 {
538     if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
539         ensureLayer();
540     else
541         destroyLayer();
542 }
543
544 void MediaPlayerPrivateMediaSourceAVFObjC::notifyActiveSourceBuffersChanged()
545 {
546     m_player->client().mediaPlayerActiveSourceBuffersChanged(m_player);
547 }
548
549 MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const
550 {
551     return MediaPlayer::StoredStream;
552 }
553
554 void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering()
555 {
556     // No-op.
557 }
558
559 String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const
560 {
561     static NeverDestroyed<String> description(ASCIILiteral("AVFoundation MediaSource Engine"));
562     return description;
563 }
564
565 String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const
566 {
567     // FIXME(125158): implement languageOfPrimaryAudioTrack()
568     return emptyString();
569 }
570
571 size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const
572 {
573     return 0;
574 }
575
576 unsigned long MediaPlayerPrivateMediaSourceAVFObjC::totalVideoFrames()
577 {
578     return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] totalNumberOfVideoFrames];
579 }
580
581 unsigned long MediaPlayerPrivateMediaSourceAVFObjC::droppedVideoFrames()
582 {
583     return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] numberOfDroppedVideoFrames];
584 }
585
586 unsigned long MediaPlayerPrivateMediaSourceAVFObjC::corruptedVideoFrames()
587 {
588     return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] numberOfCorruptedVideoFrames];
589 }
590
591 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::totalFrameDelay()
592 {
593     return MediaTime::createWithDouble([[m_sampleBufferDisplayLayer videoPerformanceMetrics] totalFrameDelay]);
594 }
595
596 #pragma mark -
597 #pragma mark Utility Methods
598
599 void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
600 {
601     if (m_sampleBufferDisplayLayer)
602         return;
603
604     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
605 #ifndef NDEBUG
606     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaSource AVSampleBufferDisplayLayer"];
607 #endif
608
609     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
610 }
611
612 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
613 {
614     if (!m_sampleBufferDisplayLayer)
615         return;
616
617     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
618     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
619         // No-op.
620     }];
621     m_sampleBufferDisplayLayer = nullptr;
622 }
623
624 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const
625 {
626     return m_playing && !seeking() && allRenderersHaveAvailableSamples() && m_readyState >= MediaPlayer::HaveFutureData;
627 }
628
629 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableVideoFrame(bool flag)
630 {
631     if (m_hasAvailableVideoFrame == flag)
632         return;
633     m_hasAvailableVideoFrame = flag;
634     updateAllRenderersHaveAvailableSamples();
635 }
636
637 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableAudioSample(AVSampleBufferAudioRenderer* renderer, bool flag)
638 {
639     auto iter = m_sampleBufferAudioRendererMap.find(renderer);
640     if (iter == m_sampleBufferAudioRendererMap.end())
641         return;
642
643     auto& properties = iter->value;
644     if (properties.hasAudibleSample == flag)
645         return;
646     properties.hasAudibleSample = flag;
647     updateAllRenderersHaveAvailableSamples();
648 }
649
650 void MediaPlayerPrivateMediaSourceAVFObjC::updateAllRenderersHaveAvailableSamples()
651 {
652     bool allRenderersHaveAvailableSamples = true;
653
654     do {
655         if (m_sampleBufferDisplayLayer && !m_hasAvailableVideoFrame) {
656             allRenderersHaveAvailableSamples = false;
657             break;
658         }
659
660         for (auto& properties : m_sampleBufferAudioRendererMap.values()) {
661             if (!properties.hasAudibleSample) {
662                 allRenderersHaveAvailableSamples = false;
663                 break;
664             }
665         }
666     } while (0);
667
668     if (m_allRenderersHaveAvailableSamples == allRenderersHaveAvailableSamples)
669         return;
670
671     m_allRenderersHaveAvailableSamples = allRenderersHaveAvailableSamples;
672
673     if (shouldBePlaying() && [m_synchronizer rate] != m_rate)
674         [m_synchronizer setRate:m_rate];
675     else if (!shouldBePlaying() && [m_synchronizer rate])
676         [m_synchronizer setRate:0];
677 }
678
679 void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
680 {
681     m_player->durationChanged();
682
683     if (m_durationObserver)
684         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
685
686     if (!m_mediaSourcePrivate)
687         return;
688
689     MediaTime duration = m_mediaSourcePrivate->duration();
690     auto weakThis = createWeakPtr();
691     NSArray* times = @[[NSValue valueWithCMTime:toCMTime(duration)]];
692
693     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::durationChanged(%p) - duration = %s", this, toString(duration).utf8().data());
694
695     m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis, duration] {
696         if (!weakThis)
697             return;
698
699         MediaTime now = weakThis->currentMediaTime();
700         LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::durationChanged(%p) - boundary time observer called, now = %s", weakThis.get(), toString(now).utf8().data());
701
702         weakThis->pauseInternal();
703         if (now < duration) {
704             LOG(MediaSource, "   ERROR: boundary time observer called before duration!", weakThis.get());
705             [weakThis->m_synchronizer setRate:0 time:toCMTime(duration)];
706         }
707         weakThis->m_player->timeChanged();
708
709     }];
710
711     if (m_playing && duration <= currentMediaTime())
712         pauseInternal();
713 }
714
715 void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
716 {
717     m_player->rateChanged();
718 }
719
720 void MediaPlayerPrivateMediaSourceAVFObjC::sizeWillChangeAtTime(const MediaTime& time, const FloatSize& size)
721 {
722     auto weakThis = m_sizeChangeObserverWeakPtrFactory.createWeakPtr();
723     NSArray* times = @[[NSValue valueWithCMTime:toCMTime(time)]];
724     RetainPtr<id> observer = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[this, weakThis, size] {
725         if (!weakThis)
726             return;
727
728         ASSERT(!m_sizeChangeObservers.isEmpty());
729         if (!m_sizeChangeObservers.isEmpty()) {
730             RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
731             [m_synchronizer removeTimeObserver:observer.get()];
732         }
733         setNaturalSize(size);
734     }];
735     m_sizeChangeObservers.append(WTFMove(observer));
736
737     if (currentMediaTime() >= time)
738         setNaturalSize(size);
739 }
740
741 void MediaPlayerPrivateMediaSourceAVFObjC::setNaturalSize(const FloatSize& size)
742 {
743     if (size == m_naturalSize)
744         return;
745
746     m_naturalSize = size;
747     m_player->sizeChanged();
748 }
749
750 void MediaPlayerPrivateMediaSourceAVFObjC::flushPendingSizeChanges()
751 {
752     while (!m_sizeChangeObservers.isEmpty()) {
753         RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
754         [m_synchronizer removeTimeObserver:observer.get()];
755     }
756     m_sizeChangeObserverWeakPtrFactory.revokeAll();
757 }
758
759 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
760 AVStreamSession* MediaPlayerPrivateMediaSourceAVFObjC::streamSession()
761 {
762     if (!getAVStreamSessionClass() || ![getAVStreamSessionClass() instancesRespondToSelector:@selector(initWithStorageDirectoryAtURL:)])
763         return nil;
764
765     if (!m_streamSession) {
766         String storageDirectory = m_player->mediaKeysStorageDirectory();
767         if (storageDirectory.isEmpty())
768             return nil;
769
770         if (!fileExists(storageDirectory)) {
771             if (!makeAllDirectories(storageDirectory))
772                 return nil;
773         }
774
775         String storagePath = pathByAppendingComponent(storageDirectory, "SecureStop.plist");
776         m_streamSession = adoptNS([allocAVStreamSessionInstance() initWithStorageDirectoryAtURL:[NSURL fileURLWithPath:storagePath]]);
777     }
778     return m_streamSession.get();
779 }
780
781 void MediaPlayerPrivateMediaSourceAVFObjC::setCDMSession(CDMSession* session)
782 {
783     if (session == m_session)
784         return;
785
786     m_session = toCDMSessionMediaSourceAVFObjC(session);
787
788     if (CDMSessionAVStreamSession* cdmStreamSession = toCDMSessionAVStreamSession(m_session))
789         cdmStreamSession->setStreamSession(streamSession());
790     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
791         sourceBuffer->setCDMSession(m_session);
792 }
793
794 void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData)
795 {
796     m_player->keyNeeded(initData);
797 }
798 #endif
799
800 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
801 {
802     if (m_readyState == readyState)
803         return;
804
805     m_readyState = readyState;
806
807     if (shouldBePlaying())
808         [m_synchronizer setRate:m_rate];
809     else
810         [m_synchronizer setRate:0];
811
812     m_player->readyStateChanged();
813 }
814
815 void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
816 {
817     if (m_networkState == networkState)
818         return;
819
820     m_networkState = networkState;
821     m_player->networkStateChanged();
822 }
823
824 void MediaPlayerPrivateMediaSourceAVFObjC::addDisplayLayer(AVSampleBufferDisplayLayer* displayLayer)
825 {
826     ASSERT(displayLayer);
827     if (displayLayer == m_sampleBufferDisplayLayer)
828         return;
829
830     m_sampleBufferDisplayLayer = displayLayer;
831     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
832     m_player->client().mediaPlayerRenderingModeChanged(m_player);
833
834     // FIXME: move this somewhere appropriate:
835     m_player->firstVideoFrameAvailable();
836
837 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
838     m_videoFullscreenLayerManager->setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
839 #endif
840 }
841
842 void MediaPlayerPrivateMediaSourceAVFObjC::removeDisplayLayer(AVSampleBufferDisplayLayer* displayLayer)
843 {
844     if (displayLayer != m_sampleBufferDisplayLayer)
845         return;
846
847     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
848     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
849         // No-op.
850     }];
851
852     m_sampleBufferDisplayLayer = nullptr;
853     m_player->client().mediaPlayerRenderingModeChanged(m_player);
854
855 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
856     m_videoFullscreenLayerManager->didDestroyVideoLayer();
857 #endif
858 }
859
860 void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
861 {
862     if (m_sampleBufferAudioRendererMap.contains(audioRenderer))
863         return;
864
865     m_sampleBufferAudioRendererMap.add(audioRenderer, AudioRendererProperties());
866
867     [audioRenderer setMuted:m_player->muted()];
868     [audioRenderer setVolume:m_player->volume()];
869     [audioRenderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
870
871     [m_synchronizer addRenderer:audioRenderer];
872     m_player->client().mediaPlayerRenderingModeChanged(m_player);
873 }
874
875 void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
876 {
877     auto iter = m_sampleBufferAudioRendererMap.find(audioRenderer);
878     if (iter == m_sampleBufferAudioRendererMap.end())
879         return;
880
881     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
882     [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
883         // No-op.
884     }];
885
886     m_sampleBufferAudioRendererMap.remove(iter);
887     m_player->client().mediaPlayerRenderingModeChanged(m_player);
888 }
889
890 void MediaPlayerPrivateMediaSourceAVFObjC::characteristicsChanged()
891 {
892     m_player->characteristicChanged();
893 }
894
895 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
896 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, std::function<void()> completionHandler)
897 {
898     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, completionHandler);
899 }
900
901 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenFrame(FloatRect frame)
902 {
903     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
904 }
905 #endif
906
907 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
908 void MediaPlayerPrivateMediaSourceAVFObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
909 {
910     m_playbackTarget = WTFMove(target);
911 }
912
913 void MediaPlayerPrivateMediaSourceAVFObjC::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget)
914 {
915     if (shouldPlayToTarget == m_shouldPlayToTarget)
916         return;
917
918     m_shouldPlayToTarget = shouldPlayToTarget;
919
920     if (m_player)
921         m_player->currentPlaybackTargetIsWirelessChanged();
922 }
923
924 bool MediaPlayerPrivateMediaSourceAVFObjC::isCurrentPlaybackTargetWireless() const
925 {
926     if (!m_playbackTarget)
927         return false;
928
929     return m_shouldPlayToTarget && m_playbackTarget->hasActiveRoute();
930 }
931 #endif
932
933 }
934
935 #endif