Replace WTF::move with WTFMove
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaSourceAVFObjC.mm
1 /*
2  * Copyright (C) 2013, 2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "CDMSessionAVStreamSession.h"
32 #import "CDMSessionMediaSourceAVFObjC.h"
33 #import "FileSystem.h"
34 #import "Logging.h"
35 #import "MediaSourcePrivateAVFObjC.h"
36 #import "MediaSourcePrivateClient.h"
37 #import "MediaTimeAVFoundation.h"
38 #import "PlatformClockCM.h"
39 #import "WebCoreSystemInterface.h"
40 #import <AVFoundation/AVAsset.h>
41 #import <AVFoundation/AVTime.h>
42 #import <QuartzCore/CALayer.h>
43 #import <objc_runtime.h>
44 #import <wtf/MainThread.h>
45 #import <wtf/NeverDestroyed.h>
46
47 #pragma mark - Soft Linking
48
49 #import "CoreMediaSoftLink.h"
50
51 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
52
53 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
54 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
55 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
56 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
57 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
58 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
59 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamSession);
60 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
61
62 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
63 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
64
65 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
66 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
67
68 #pragma mark -
69 #pragma mark AVSampleBufferDisplayLayer
70
71 @interface AVSampleBufferDisplayLayer : CALayer
72 @end
73
74 #pragma mark -
75 #pragma mark AVVideoPerformanceMetrics
76
77 @interface AVVideoPerformanceMetrics : NSObject
78 - (unsigned long)totalNumberOfVideoFrames;
79 - (unsigned long)numberOfDroppedVideoFrames;
80 - (unsigned long)numberOfCorruptedVideoFrames;
81 - (double)totalFrameDelay;
82 @end
83
84 @interface AVSampleBufferDisplayLayer (WebCoreAVSampleBufferDisplayLayerPrivate)
85 - (AVVideoPerformanceMetrics *)videoPerformanceMetrics;
86 @end
87
88 #pragma mark -
89 #pragma mark AVSampleBufferAudioRenderer
90
91 @interface AVSampleBufferAudioRenderer : NSObject
92 - (void)setVolume:(float)volume;
93 - (void)setMuted:(BOOL)muted;
94 @property (nonatomic, copy) NSString *audioTimePitchAlgorithm;
95 @end
96
97 #pragma mark -
98 #pragma mark AVSampleBufferRenderSynchronizer
99
100 @interface AVSampleBufferRenderSynchronizer : NSObject
101 - (CMTimebaseRef)timebase;
102 - (float)rate;
103 - (void)setRate:(float)rate;
104 - (void)setRate:(float)rate time:(CMTime)time;
105 - (NSArray *)renderers;
106 - (void)addRenderer:(id)renderer;
107 - (void)removeRenderer:(id)renderer atTime:(CMTime)time withCompletionHandler:(void (^)(BOOL didRemoveRenderer))completionHandler;
108 - (id)addPeriodicTimeObserverForInterval:(CMTime)interval queue:(dispatch_queue_t)queue usingBlock:(void (^)(CMTime time))block;
109 - (id)addBoundaryTimeObserverForTimes:(NSArray *)times queue:(dispatch_queue_t)queue usingBlock:(void (^)(void))block;
110 - (void)removeTimeObserver:(id)observer;
111 @end
112
113 #pragma mark - 
114 #pragma mark AVStreamSession
115
116 @interface AVStreamSession : NSObject
117 - (instancetype)initWithStorageDirectoryAtURL:(NSURL *)storageDirectory;
118 @end
119
120 namespace WebCore {
121
122 #pragma mark -
123 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
124
125 static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
126 {
127     MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)listener;
128     auto weakThis = player->createWeakPtr();
129     callOnMainThread([weakThis]{
130         if (!weakThis)
131             return;
132         weakThis.get()->effectiveRateChanged();
133     });
134 }
135
136 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
137     : m_player(player)
138     , m_weakPtrFactory(this)
139     , m_synchronizer(adoptNS([allocAVSampleBufferRenderSynchronizerInstance() init]))
140     , m_seekTimer(*this, &MediaPlayerPrivateMediaSourceAVFObjC::seekInternal)
141     , m_session(nullptr)
142     , m_networkState(MediaPlayer::Empty)
143     , m_readyState(MediaPlayer::HaveNothing)
144     , m_rate(1)
145     , m_playing(0)
146     , m_seeking(false)
147     , m_seekCompleted(true)
148     , m_loadingProgressed(false)
149 {
150     CMTimebaseRef timebase = [m_synchronizer timebase];
151     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
152     CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
153
154     // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
155     // an arbitrarily large time value of once an hour:
156     __block auto weakThis = createWeakPtr();
157     m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
158 #if LOG_DISABLED
159         UNUSED_PARAM(time);
160 #endif
161         // FIXME: Remove the below once <rdar://problem/15798050> is fixed.
162         if (!weakThis)
163             return;
164
165         if (m_seeking && !m_pendingSeek) {
166             LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::m_timeJumpedObserver(%p) - time(%s)", weakThis.get(), toString(toMediaTime(time)).utf8().data());
167             m_seeking = false;
168
169             if (shouldBePlaying())
170                 [m_synchronizer setRate:m_rate];
171             if (!seeking())
172                 m_player->timeChanged();
173         }
174
175         if (m_pendingSeek)
176             seekInternal();
177     }];
178 }
179
180 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
181 {
182     CMTimebaseRef timebase = [m_synchronizer timebase];
183     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
184     CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
185
186     if (m_timeJumpedObserver)
187         [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
188     if (m_durationObserver)
189         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
190
191     m_seekTimer.stop();
192 }
193
194 #pragma mark -
195 #pragma mark MediaPlayer Factory Methods
196
197 void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
198 {
199     if (isAvailable())
200         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaSourceAVFObjC>(player); }, getSupportedTypes,
201             supportsType, 0, 0, 0, 0);
202 }
203
204 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
205 {
206     return AVFoundationLibrary()
207         && isCoreMediaFrameworkAvailable()
208         && getAVStreamDataParserClass()
209         && getAVSampleBufferAudioRendererClass()
210         && getAVSampleBufferRenderSynchronizerClass()
211         && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:));
212 }
213
214 static const HashSet<String>& mimeTypeCache()
215 {
216     static NeverDestroyed<HashSet<String>> cache;
217     static bool typeListInitialized = false;
218
219     if (typeListInitialized)
220         return cache;
221     typeListInitialized = true;
222
223     NSArray *types = [getAVURLAssetClass() audiovisualMIMETypes];
224     for (NSString *mimeType in types)
225         cache.get().add(mimeType);
226     
227     return cache;
228
229
230 void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String>& types)
231 {
232     types = mimeTypeCache();
233 }
234
235 MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
236 {
237     // This engine does not support non-media-source sources.
238     if (!parameters.isMediaSource)
239         return MediaPlayer::IsNotSupported;
240 #if ENABLE(MEDIA_STREAM)
241     if (parameters.isMediaStream)
242         return MediaPlayer::IsNotSupported;
243 #endif
244
245     if (parameters.type.isEmpty() || !mimeTypeCache().contains(parameters.type))
246         return MediaPlayer::IsNotSupported;
247
248     // The spec says:
249     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
250     if (parameters.codecs.isEmpty())
251         return MediaPlayer::MayBeSupported;
252
253     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
254     return [getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
255 }
256
257 #pragma mark -
258 #pragma mark MediaPlayerPrivateInterface Overrides
259
260 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&)
261 {
262     // This media engine only supports MediaSource URLs.
263     m_networkState = MediaPlayer::FormatError;
264     m_player->networkStateChanged();
265 }
266
267 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String& url, MediaSourcePrivateClient* client)
268 {
269     UNUSED_PARAM(url);
270
271     m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client);
272 }
273
274 #if ENABLE(MEDIA_STREAM)
275 void MediaPlayerPrivateMediaSourceAVFObjC::load(MediaStreamPrivate&)
276 {
277     setNetworkState(MediaPlayer::FormatError);
278 }
279 #endif
280
281 void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad()
282 {
283 }
284
285 void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay()
286 {
287 }
288
289 PlatformMedia MediaPlayerPrivateMediaSourceAVFObjC::platformMedia() const
290 {
291     PlatformMedia pm;
292     pm.type = PlatformMedia::AVFoundationAssetType;
293     pm.media.avfAsset = m_asset.get();
294     return pm;
295 }
296
297 PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const
298 {
299     return m_sampleBufferDisplayLayer.get();
300 }
301
302 void MediaPlayerPrivateMediaSourceAVFObjC::play()
303 {
304     auto weakThis = createWeakPtr();
305     callOnMainThread([weakThis]{
306         if (!weakThis)
307             return;
308         weakThis.get()->playInternal();
309     });
310 }
311
312 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
313 {
314     if (currentMediaTime() >= m_mediaSourcePrivate->duration())
315         return;
316
317     m_playing = true;
318     if (shouldBePlaying())
319         [m_synchronizer setRate:m_rate];
320 }
321
322 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
323 {
324     auto weakThis = createWeakPtr();
325     callOnMainThread([weakThis]{
326         if (!weakThis)
327             return;
328         weakThis.get()->pauseInternal();
329     });
330 }
331
332 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
333 {
334     m_playing = false;
335     [m_synchronizer setRate:0];
336 }
337
338 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
339 {
340     return ![m_synchronizer rate];
341 }
342
343 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
344 {
345     for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it)
346         [*it setVolume:volume];
347 }
348
349 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
350 {
351     return true;
352 }
353
354 void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
355 {
356     for (auto it = m_sampleBufferAudioRenderers.begin(), end = m_sampleBufferAudioRenderers.end(); it != end; ++it)
357         [*it setMuted:muted];
358 }
359
360 FloatSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
361 {
362     if (!m_mediaSourcePrivate)
363         return FloatSize();
364
365     return m_mediaSourcePrivate->naturalSize();
366 }
367
368 bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const
369 {
370     if (!m_mediaSourcePrivate)
371         return false;
372
373     return m_mediaSourcePrivate->hasVideo();
374 }
375
376 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const
377 {
378     if (!m_mediaSourcePrivate)
379         return false;
380
381     return m_mediaSourcePrivate->hasAudio();
382 }
383
384 void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool)
385 {
386     // No-op.
387 }
388
389 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
390 {
391     return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
392 }
393
394 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
395 {
396     MediaTime synchronizerTime = toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
397     if (synchronizerTime < MediaTime::zeroTime())
398         return MediaTime::zeroTime();
399     if (synchronizerTime < m_lastSeekTime)
400         return m_lastSeekTime;
401     return synchronizerTime;
402 }
403
404 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
405 {
406     return MediaTime::zeroTime();
407 }
408
409 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
410 {
411     return MediaTime::zeroTime();
412 }
413
414 void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
415 {
416     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(%p) - time(%s), negativeThreshold(%s), positiveThreshold(%s)", this, toString(time).utf8().data(), toString(negativeThreshold).utf8().data(), toString(positiveThreshold).utf8().data());
417     m_seeking = true;
418     auto weakThis = createWeakPtr();
419     m_pendingSeek = std::make_unique<PendingSeek>(time, negativeThreshold, positiveThreshold);
420
421     if (m_seekTimer.isActive())
422         m_seekTimer.stop();
423     m_seekTimer.startOneShot(0);
424 }
425
426 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
427 {
428     std::unique_ptr<PendingSeek> pendingSeek;
429     pendingSeek.swap(m_pendingSeek);
430
431     if (!pendingSeek)
432         return;
433
434     if (!m_mediaSourcePrivate)
435         return;
436
437     if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
438         m_lastSeekTime = pendingSeek->targetTime;
439     else
440         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
441
442     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekInternal(%p) - seekTime(%s)", this, toString(m_lastSeekTime).utf8().data());
443
444     [m_synchronizer setRate:0 time:toCMTime(m_lastSeekTime)];
445     m_mediaSourcePrivate->seekToTime(m_lastSeekTime);
446 }
447
448 void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted()
449 {
450     if (!m_seeking)
451         return;
452     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted(%p)", this);
453     m_seekCompleted = false;
454 }
455
456 void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted()
457 {
458     if (m_seekCompleted)
459         return;
460     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted(%p)", this);
461     m_seekCompleted = true;
462     if (shouldBePlaying())
463         [m_synchronizer setRate:m_rate];
464     if (!m_seeking)
465         m_player->timeChanged();
466 }
467
468 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
469 {
470     return m_seeking && !m_seekCompleted;
471 }
472
473 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
474 {
475     m_rate = rate;
476     if (shouldBePlaying())
477         [m_synchronizer setRate:m_rate];
478 }
479
480 void MediaPlayerPrivateMediaSourceAVFObjC::setPreservesPitch(bool preservesPitch)
481 {
482     NSString *algorithm = preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed;
483     for (auto& renderer : m_sampleBufferAudioRenderers)
484         [renderer setAudioTimePitchAlgorithm:algorithm];
485 }
486
487 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
488 {
489     return m_networkState;
490 }
491
492 MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
493 {
494     return m_readyState;
495 }
496
497 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
498 {
499     return std::make_unique<PlatformTimeRanges>(minMediaTimeSeekable(), maxMediaTimeSeekable());
500 }
501
502 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
503 {
504     return durationMediaTime();
505 }
506
507 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
508 {
509     return startTime();
510 }
511
512 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
513 {
514     return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : std::make_unique<PlatformTimeRanges>();
515 }
516
517 bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const
518 {
519     bool loadingProgressed = m_loadingProgressed;
520     m_loadingProgressed = false;
521     return loadingProgressed;
522 }
523
524 void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&)
525 {
526     // No-op.
527 }
528
529 void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext&, const FloatRect&)
530 {
531     // FIXME(125157): Implement painting.
532 }
533
534 void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext&, const FloatRect&)
535 {
536     // FIXME(125157): Implement painting.
537 }
538
539 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const
540 {
541     return m_hasAvailableVideoFrame;
542 }
543
544 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const
545 {
546     return true;
547 }
548
549 void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged()
550 {
551     if (m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player))
552         ensureLayer();
553     else
554         destroyLayer();
555 }
556
557 MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const
558 {
559     return MediaPlayer::StoredStream;
560 }
561
562 void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering()
563 {
564     // No-op.
565 }
566
567 String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const
568 {
569     static NeverDestroyed<String> description(ASCIILiteral("AVFoundation MediaSource Engine"));
570     return description;
571 }
572
573 String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const
574 {
575     // FIXME(125158): implement languageOfPrimaryAudioTrack()
576     return emptyString();
577 }
578
579 size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const
580 {
581     return 0;
582 }
583
584 unsigned long MediaPlayerPrivateMediaSourceAVFObjC::totalVideoFrames()
585 {
586     return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] totalNumberOfVideoFrames];
587 }
588
589 unsigned long MediaPlayerPrivateMediaSourceAVFObjC::droppedVideoFrames()
590 {
591     return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] numberOfDroppedVideoFrames];
592 }
593
594 unsigned long MediaPlayerPrivateMediaSourceAVFObjC::corruptedVideoFrames()
595 {
596     return [[m_sampleBufferDisplayLayer videoPerformanceMetrics] numberOfCorruptedVideoFrames];
597 }
598
599 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::totalFrameDelay()
600 {
601     return MediaTime::createWithDouble([[m_sampleBufferDisplayLayer videoPerformanceMetrics] totalFrameDelay]);
602 }
603
604 #pragma mark -
605 #pragma mark Utility Methods
606
607 void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
608 {
609     if (m_sampleBufferDisplayLayer)
610         return;
611
612     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
613 #ifndef NDEBUG
614     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaSource AVSampleBufferDisplayLayer"];
615 #endif
616
617     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
618 }
619
620 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
621 {
622     if (!m_sampleBufferDisplayLayer)
623         return;
624
625     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
626     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
627         // No-op.
628     }];
629     m_sampleBufferDisplayLayer = nullptr;
630 }
631
632 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const
633 {
634     return m_playing && !seeking() && m_readyState >= MediaPlayer::HaveFutureData;
635 }
636
637 void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
638 {
639     m_player->durationChanged();
640
641     if (m_durationObserver)
642         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
643
644     if (!m_mediaSourcePrivate)
645         return;
646
647     MediaTime duration = m_mediaSourcePrivate->duration();
648     auto weakThis = createWeakPtr();
649     NSArray* times = @[[NSValue valueWithCMTime:toCMTime(duration)]];
650
651     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::durationChanged(%p) - duration = %s", this, toString(duration).utf8().data());
652
653     m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis, duration] {
654         if (!weakThis)
655             return;
656
657         MediaTime now = weakThis->currentMediaTime();
658         LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::durationChanged(%p) - boundary time observer called, now = %s", weakThis.get(), toString(now).utf8().data());
659
660         weakThis->pauseInternal();
661         if (now < duration) {
662             LOG(MediaSource, "   ERROR: boundary time observer called before duration!", weakThis.get());
663             [weakThis->m_synchronizer setRate:0 time:toCMTime(duration)];
664         }
665         weakThis->m_player->timeChanged();
666
667     }];
668
669     if (m_playing && duration <= currentMediaTime())
670         pauseInternal();
671 }
672
673 void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
674 {
675     m_player->rateChanged();
676 }
677
678 void MediaPlayerPrivateMediaSourceAVFObjC::sizeChanged()
679 {
680     m_player->sizeChanged();
681 }
682
683 #if ENABLE(ENCRYPTED_MEDIA_V2)
684 AVStreamSession* MediaPlayerPrivateMediaSourceAVFObjC::streamSession()
685 {
686     if (!getAVStreamSessionClass() || ![getAVStreamSessionClass() instancesRespondToSelector:@selector(initWithStorageDirectoryAtURL:)])
687         return nil;
688
689     if (!m_streamSession) {
690         String storageDirectory = m_player->mediaKeysStorageDirectory();
691         if (storageDirectory.isEmpty())
692             return nil;
693
694         if (!fileExists(storageDirectory)) {
695             if (!makeAllDirectories(storageDirectory))
696                 return nil;
697         }
698
699         String storagePath = pathByAppendingComponent(storageDirectory, "SecureStop.plist");
700         m_streamSession = adoptNS([allocAVStreamSessionInstance() initWithStorageDirectoryAtURL:[NSURL fileURLWithPath:storagePath]]);
701     }
702     return m_streamSession.get();
703 }
704
705 void MediaPlayerPrivateMediaSourceAVFObjC::setCDMSession(CDMSession* session)
706 {
707     if (session == m_session)
708         return;
709
710     m_session = toCDMSessionMediaSourceAVFObjC(session);
711
712     if (CDMSessionAVStreamSession* cdmStreamSession = toCDMSessionAVStreamSession(m_session))
713         cdmStreamSession->setStreamSession(m_streamSession.get());
714     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
715         sourceBuffer->setCDMSession(m_session);
716 }
717
718 void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData)
719 {
720     m_player->keyNeeded(initData);
721 }
722 #endif
723
724 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
725 {
726     if (m_readyState == readyState)
727         return;
728
729     m_readyState = readyState;
730
731     if (shouldBePlaying())
732         [m_synchronizer setRate:m_rate];
733     else
734         [m_synchronizer setRate:0];
735
736     m_player->readyStateChanged();
737 }
738
739 void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
740 {
741     if (m_networkState == networkState)
742         return;
743
744     m_networkState = networkState;
745     m_player->networkStateChanged();
746 }
747
748 void MediaPlayerPrivateMediaSourceAVFObjC::addDisplayLayer(AVSampleBufferDisplayLayer* displayLayer)
749 {
750     ASSERT(displayLayer);
751     if (displayLayer == m_sampleBufferDisplayLayer)
752         return;
753
754     m_sampleBufferDisplayLayer = displayLayer;
755     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
756     m_player->client().mediaPlayerRenderingModeChanged(m_player);
757
758     // FIXME: move this somewhere appropriate:
759     m_player->firstVideoFrameAvailable();
760 }
761
762 void MediaPlayerPrivateMediaSourceAVFObjC::removeDisplayLayer(AVSampleBufferDisplayLayer* displayLayer)
763 {
764     if (displayLayer != m_sampleBufferDisplayLayer)
765         return;
766
767     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
768     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
769         // No-op.
770     }];
771
772     m_sampleBufferDisplayLayer = nullptr;
773     m_player->client().mediaPlayerRenderingModeChanged(m_player);
774 }
775
776 void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
777 {
778     if (m_sampleBufferAudioRenderers.contains(audioRenderer))
779         return;
780
781     m_sampleBufferAudioRenderers.append(audioRenderer);
782
783     [audioRenderer setMuted:m_player->muted()];
784     [audioRenderer setVolume:m_player->volume()];
785     [audioRenderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
786
787     [m_synchronizer addRenderer:audioRenderer];
788     m_player->client().mediaPlayerRenderingModeChanged(m_player);
789 }
790
791 void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
792 {
793     size_t pos = m_sampleBufferAudioRenderers.find(audioRenderer);
794     if (pos == notFound)
795         return;
796
797     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
798     [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
799         // No-op.
800     }];
801
802     m_sampleBufferAudioRenderers.remove(pos);
803     m_player->client().mediaPlayerRenderingModeChanged(m_player);
804 }
805
806 void MediaPlayerPrivateMediaSourceAVFObjC::characteristicsChanged()
807 {
808     m_player->characteristicChanged();
809 }
810
811 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
812 void MediaPlayerPrivateMediaSourceAVFObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
813 {
814     m_playbackTarget = WTFMove(target);
815 }
816
817 void MediaPlayerPrivateMediaSourceAVFObjC::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget)
818 {
819     if (shouldPlayToTarget == m_shouldPlayToTarget)
820         return;
821
822     m_shouldPlayToTarget = shouldPlayToTarget;
823
824     if (m_player)
825         m_player->currentPlaybackTargetIsWirelessChanged();
826 }
827
828 bool MediaPlayerPrivateMediaSourceAVFObjC::isCurrentPlaybackTargetWireless() const
829 {
830     if (!m_playbackTarget)
831         return false;
832
833     return m_shouldPlayToTarget && m_playbackTarget->hasActiveRoute();
834 }
835 #endif
836
837 }
838
839 #endif