Unreviewed, rolling out r244627.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaSourceAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "CDMSessionAVStreamSession.h"
34 #import "GraphicsContextCG.h"
35 #import "Logging.h"
36 #import "MediaSourcePrivateAVFObjC.h"
37 #import "MediaSourcePrivateClient.h"
38 #import "PixelBufferConformerCV.h"
39 #import "TextTrackRepresentation.h"
40 #import "TextureCacheCV.h"
41 #import "VideoFullscreenLayerManagerObjC.h"
42 #import "VideoTextureCopierCV.h"
43 #import "WebCoreDecompressionSession.h"
44 #import <AVFoundation/AVAsset.h>
45 #import <AVFoundation/AVTime.h>
46 #import <QuartzCore/CALayer.h>
47 #import <objc_runtime.h>
48 #import <pal/avfoundation/MediaTimeAVFoundation.h>
49 #import <pal/spi/mac/AVFoundationSPI.h>
50 #import <wtf/Deque.h>
51 #import <wtf/FileSystem.h>
52 #import <wtf/MainThread.h>
53 #import <wtf/NeverDestroyed.h>
54
55 #pragma mark - Soft Linking
56
57 #import <pal/cf/CoreMediaSoftLink.h>
58 #import "CoreVideoSoftLink.h"
59
60 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
61
62 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
63 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
64 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
65 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
66 ALLOW_NEW_API_WITHOUT_GUARDS_END
67 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
68 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
69 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
70 ALLOW_NEW_API_WITHOUT_GUARDS_END
71 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
72 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamSession);
73 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
74
75 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
76 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
77
78 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
79 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
80
81 #pragma mark -
82 #pragma mark AVStreamSession
83
84 @interface AVStreamSession : NSObject
85 - (instancetype)initWithStorageDirectoryAtURL:(NSURL *)storageDirectory;
86 @end
87
88 namespace WebCore {
89 using namespace PAL;
90
91 String convertEnumerationToString(MediaPlayerPrivateMediaSourceAVFObjC::SeekState enumerationValue)
92 {
93     static const NeverDestroyed<String> values[] = {
94         MAKE_STATIC_STRING_IMPL("Seeking"),
95         MAKE_STATIC_STRING_IMPL("WaitingForAvailableFame"),
96         MAKE_STATIC_STRING_IMPL("SeekCompleted"),
97     };
98     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::Seeking) == 0, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::Seeking is not 0 as expected");
99     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::WaitingForAvailableFame) == 1, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::WaitingForAvailableFame is not 1 as expected");
100     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::SeekCompleted) == 2, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::SeekCompleted is not 2 as expected");
101     ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
102     return values[static_cast<size_t>(enumerationValue)];
103 }
104     
105 #pragma mark -
106 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
107
108 static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
109 {
110     MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)const_cast<void*>(listener);
111     callOnMainThread([weakThis = player->createWeakPtr()] {
112         if (!weakThis)
113             return;
114         weakThis.get()->effectiveRateChanged();
115     });
116 }
117
118 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
119     : m_player(player)
120     , m_synchronizer(adoptNS([allocAVSampleBufferRenderSynchronizerInstance() init]))
121     , m_seekTimer(*this, &MediaPlayerPrivateMediaSourceAVFObjC::seekInternal)
122     , m_networkState(MediaPlayer::Empty)
123     , m_readyState(MediaPlayer::HaveNothing)
124     , m_rate(1)
125     , m_playing(0)
126     , m_seeking(false)
127     , m_loadingProgressed(false)
128     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
129 #if !RELEASE_LOG_DISABLED
130     , m_logger(player->mediaPlayerLogger())
131     , m_logIdentifier(player->mediaPlayerLogIdentifier())
132 #endif
133 {
134     CMTimebaseRef timebase = [m_synchronizer timebase];
135     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
136     CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
137
138     auto logSiteIdentifier = LOGIDENTIFIER;
139     ALWAYS_LOG(logSiteIdentifier);
140     UNUSED_PARAM(logSiteIdentifier);
141
142     // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
143     // an arbitrarily large time value of once an hour:
144     __block auto weakThis = createWeakPtr();
145     m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:PAL::toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
146 #if LOG_DISABLED
147         UNUSED_PARAM(time);
148 #endif
149         // FIXME: Remove the below once <rdar://problem/15798050> is fixed.
150         if (!weakThis)
151             return;
152
153         DEBUG_LOG(logSiteIdentifier, "synchronizer fired for ", toMediaTime(time), ", seeking = ", m_seeking, ", pending = ", !!m_pendingSeek);
154
155         if (m_seeking && !m_pendingSeek) {
156             m_seeking = false;
157
158             if (shouldBePlaying())
159                 [m_synchronizer setRate:m_rate];
160             if (!seeking() && m_seekCompleted == SeekCompleted)
161                 m_player->timeChanged();
162         }
163
164         if (m_pendingSeek)
165             seekInternal();
166     }];
167 }
168
169 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
170 {
171     ALWAYS_LOG(LOGIDENTIFIER);
172
173     CMTimebaseRef timebase = [m_synchronizer timebase];
174     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
175     CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
176
177     if (m_timeJumpedObserver)
178         [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
179     if (m_durationObserver)
180         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
181     flushPendingSizeChanges();
182
183     destroyLayer();
184     destroyDecompressionSession();
185
186     m_seekTimer.stop();
187 }
188
189 #pragma mark -
190 #pragma mark MediaPlayer Factory Methods
191
192 void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
193 {
194     if (!isAvailable())
195         return;
196
197     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaSourceAVFObjC>(player); },
198         getSupportedTypes, supportsType, 0, 0, 0, 0);
199     ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
200 }
201
202 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
203 {
204     return AVFoundationLibrary()
205         && isCoreMediaFrameworkAvailable()
206         && getAVStreamDataParserClass()
207         && getAVSampleBufferAudioRendererClass()
208         && getAVSampleBufferRenderSynchronizerClass()
209         && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:));
210 }
211
212 void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
213 {
214     types = AVFoundationMIMETypeCache::singleton().types();
215 }
216
217 MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
218 {
219     // This engine does not support non-media-source sources.
220     if (!parameters.isMediaSource)
221         return MediaPlayer::IsNotSupported;
222 #if ENABLE(MEDIA_STREAM)
223     if (parameters.isMediaStream)
224         return MediaPlayer::IsNotSupported;
225 #endif
226
227     if (parameters.type.isEmpty() || !AVFoundationMIMETypeCache::singleton().canDecodeType(parameters.type.containerType()))
228         return MediaPlayer::IsNotSupported;
229
230     // The spec says:
231     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
232     auto codecs = parameters.type.parameter(ContentType::codecsParameter());
233     if (codecs.isEmpty())
234         return MediaPlayer::MayBeSupported;
235
236     NSString *outputCodecs = codecs;
237     if ([getAVStreamDataParserClass() respondsToSelector:@selector(outputMIMECodecParameterForInputMIMECodecParameter:)])
238         outputCodecs = [getAVStreamDataParserClass() outputMIMECodecParameterForInputMIMECodecParameter:outputCodecs];
239
240     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
241         return MediaPlayer::IsNotSupported;
242
243     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type.containerType(), (NSString *)outputCodecs];
244     return [getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
245 }
246
247 #pragma mark -
248 #pragma mark MediaPlayerPrivateInterface Overrides
249
250 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&)
251 {
252     // This media engine only supports MediaSource URLs.
253     m_networkState = MediaPlayer::FormatError;
254     m_player->networkStateChanged();
255 }
256
257 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&, MediaSourcePrivateClient* client)
258 {
259     ALWAYS_LOG(LOGIDENTIFIER);
260
261     m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client);
262     m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
263     m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
264
265     acceleratedRenderingStateChanged();
266 }
267
268 #if ENABLE(MEDIA_STREAM)
269 void MediaPlayerPrivateMediaSourceAVFObjC::load(MediaStreamPrivate&)
270 {
271     setNetworkState(MediaPlayer::FormatError);
272 }
273 #endif
274
275 void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad()
276 {
277 }
278
279 void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay()
280 {
281 }
282
283 PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const
284 {
285     return m_videoFullscreenLayerManager->videoInlineLayer();
286 }
287
288 void MediaPlayerPrivateMediaSourceAVFObjC::play()
289 {
290     ALWAYS_LOG(LOGIDENTIFIER);
291     callOnMainThread([weakThis = createWeakPtr()] {
292         if (!weakThis)
293             return;
294         weakThis.get()->playInternal();
295     });
296 }
297
298 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
299 {
300     if (currentMediaTime() >= m_mediaSourcePrivate->duration())
301         return;
302
303     ALWAYS_LOG(LOGIDENTIFIER);
304     m_playing = true;
305     if (shouldBePlaying())
306         [m_synchronizer setRate:m_rate];
307 }
308
309 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
310 {
311     ALWAYS_LOG(LOGIDENTIFIER);
312     callOnMainThread([weakThis = createWeakPtr()] {
313         if (!weakThis)
314             return;
315         weakThis.get()->pauseInternal();
316     });
317 }
318
319 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
320 {
321     ALWAYS_LOG(LOGIDENTIFIER);
322     m_playing = false;
323     [m_synchronizer setRate:0];
324 }
325
326 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
327 {
328     return ![m_synchronizer rate];
329 }
330
331 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
332 {
333     ALWAYS_LOG(LOGIDENTIFIER, volume);
334     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
335         [(__bridge AVSampleBufferAudioRenderer *)key.get() setVolume:volume];
336 }
337
338 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
339 {
340     return true;
341 }
342
343 void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
344 {
345     ALWAYS_LOG(LOGIDENTIFIER, muted);
346     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
347         [(__bridge AVSampleBufferAudioRenderer *)key.get() setMuted:muted];
348 }
349
350 FloatSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
351 {
352     return m_naturalSize;
353 }
354
355 bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const
356 {
357     if (!m_mediaSourcePrivate)
358         return false;
359
360     return m_mediaSourcePrivate->hasVideo();
361 }
362
363 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const
364 {
365     if (!m_mediaSourcePrivate)
366         return false;
367
368     return m_mediaSourcePrivate->hasAudio();
369 }
370
371 void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool visible)
372 {
373     if (m_visible == visible)
374         return;
375
376     ALWAYS_LOG(LOGIDENTIFIER, visible);
377     m_visible = visible;
378     if (m_visible)
379         acceleratedRenderingStateChanged();
380 }
381
382 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
383 {
384     return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
385 }
386
387 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
388 {
389     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
390     if (synchronizerTime < MediaTime::zeroTime())
391         return MediaTime::zeroTime();
392     if (synchronizerTime < m_lastSeekTime)
393         return m_lastSeekTime;
394     return synchronizerTime;
395 }
396
397 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
398 {
399     return MediaTime::zeroTime();
400 }
401
402 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
403 {
404     return MediaTime::zeroTime();
405 }
406
407 void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
408 {
409     INFO_LOG(LOGIDENTIFIER, "time = ", time, ", negativeThreshold = ", negativeThreshold, ", positiveThreshold = ", positiveThreshold);
410
411     m_seeking = true;
412     auto weakThis = createWeakPtr();
413     m_pendingSeek = std::make_unique<PendingSeek>(time, negativeThreshold, positiveThreshold);
414
415     if (m_seekTimer.isActive())
416         m_seekTimer.stop();
417     m_seekTimer.startOneShot(0_s);
418 }
419
420 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
421 {
422     std::unique_ptr<PendingSeek> pendingSeek;
423     pendingSeek.swap(m_pendingSeek);
424
425     if (!pendingSeek)
426         return;
427
428     if (!m_mediaSourcePrivate)
429         return;
430
431     if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
432         m_lastSeekTime = pendingSeek->targetTime;
433     else
434         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
435
436     if (m_lastSeekTime.hasDoubleValue())
437         m_lastSeekTime = MediaTime::createWithDouble(m_lastSeekTime.toDouble(), MediaTime::DefaultTimeScale);
438
439     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
440     INFO_LOG(LOGIDENTIFIER, "seekTime = ", m_lastSeekTime, ", synchronizerTime = ", synchronizerTime);
441
442     bool doesNotRequireSeek = synchronizerTime == m_lastSeekTime;
443
444     m_mediaSourcePrivate->willSeek();
445     [m_synchronizer setRate:0 time:PAL::toCMTime(m_lastSeekTime)];
446     m_mediaSourcePrivate->seekToTime(m_lastSeekTime);
447
448     // In cases where the destination seek time precisely matches the synchronizer's existing time
449     // no time jumped notification will be issued. In this case, just notify the MediaPlayer that
450     // the seek completed successfully.
451     if (doesNotRequireSeek) {
452         m_seeking = false;
453
454         if (shouldBePlaying())
455             [m_synchronizer setRate:m_rate];
456         if (!seeking() && m_seekCompleted)
457             m_player->timeChanged();
458     }
459 }
460
461 void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted()
462 {
463     if (!m_seeking)
464         return;
465     ALWAYS_LOG(LOGIDENTIFIER);
466     m_seekCompleted = Seeking;
467 }
468
469 void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted()
470 {
471     if (m_seekCompleted == SeekCompleted)
472         return;
473     if (hasVideo() && !m_hasAvailableVideoFrame) {
474         ALWAYS_LOG(LOGIDENTIFIER, "waiting for video frame");
475         m_seekCompleted = WaitingForAvailableFame;
476         return;
477     }
478     ALWAYS_LOG(LOGIDENTIFIER);
479     m_seekCompleted = SeekCompleted;
480     if (shouldBePlaying())
481         [m_synchronizer setRate:m_rate];
482     if (!m_seeking)
483         m_player->timeChanged();
484 }
485
486 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
487 {
488     return m_seeking || m_seekCompleted != SeekCompleted;
489 }
490
491 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
492 {
493     // AVSampleBufferRenderSynchronizer does not support negative rate yet.
494     m_rate = std::max<double>(rate, 0);
495     if (shouldBePlaying())
496         [m_synchronizer setRate:m_rate];
497 }
498
499 void MediaPlayerPrivateMediaSourceAVFObjC::setPreservesPitch(bool preservesPitch)
500 {
501     ALWAYS_LOG(LOGIDENTIFIER, preservesPitch);
502     NSString *algorithm = preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed;
503     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
504         [(__bridge AVSampleBufferAudioRenderer *)key.get() setAudioTimePitchAlgorithm:algorithm];
505 }
506
507 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
508 {
509     return m_networkState;
510 }
511
512 MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
513 {
514     return m_readyState;
515 }
516
517 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
518 {
519     return std::make_unique<PlatformTimeRanges>(minMediaTimeSeekable(), maxMediaTimeSeekable());
520 }
521
522 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
523 {
524     return durationMediaTime();
525 }
526
527 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
528 {
529     return startTime();
530 }
531
532 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
533 {
534     return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : std::make_unique<PlatformTimeRanges>();
535 }
536
537 bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const
538 {
539     bool loadingProgressed = m_loadingProgressed;
540     m_loadingProgressed = false;
541     return loadingProgressed;
542 }
543
544 void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&)
545 {
546     // No-op.
547 }
548
549 NativeImagePtr MediaPlayerPrivateMediaSourceAVFObjC::nativeImageForCurrentTime()
550 {
551     updateLastImage();
552     return m_lastImage.get();
553 }
554
555 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastPixelBuffer()
556 {
557     if (m_sampleBufferDisplayLayer || !m_decompressionSession)
558         return false;
559
560     auto flags = !m_lastPixelBuffer ? WebCoreDecompressionSession::AllowLater : WebCoreDecompressionSession::ExactTime;
561     auto newPixelBuffer = m_decompressionSession->imageForTime(currentMediaTime(), flags);
562     if (!newPixelBuffer)
563         return false;
564
565     m_lastPixelBuffer = newPixelBuffer;
566     return true;
567 }
568
569 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastImage()
570 {
571     if (!updateLastPixelBuffer())
572         return false;
573
574     ASSERT(m_lastPixelBuffer);
575
576     if (!m_rgbConformer) {
577         auto attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
578         m_rgbConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
579     }
580
581     m_lastImage = m_rgbConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
582     return true;
583 }
584
585 void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
586 {
587     paintCurrentFrameInContext(context, rect);
588 }
589
590 void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& outputRect)
591 {
592     if (context.paintingDisabled())
593         return;
594
595     auto image = nativeImageForCurrentTime();
596     if (!image)
597         return;
598
599     GraphicsContextStateSaver stateSaver(context);
600     FloatRect imageRect(0, 0, CGImageGetWidth(image.get()), CGImageGetHeight(image.get()));
601     context.drawNativeImage(image, imageRect.size(), outputRect, imageRect);
602 }
603
604 bool MediaPlayerPrivateMediaSourceAVFObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
605 {
606     // We have been asked to paint into a WebGL canvas, so take that as a signal to create
607     // a decompression session, even if that means the native video can't also be displayed
608     // in page.
609     if (!m_hasBeenAskedToPaintGL) {
610         m_hasBeenAskedToPaintGL = true;
611         acceleratedRenderingStateChanged();
612     }
613
614     ASSERT(context);
615
616     if (updateLastPixelBuffer()) {
617         if (!m_lastPixelBuffer)
618             return false;
619     }
620
621     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
622     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
623
624     if (!m_videoTextureCopier)
625         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
626
627     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
628 }
629
630 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const
631 {
632     return m_hasAvailableVideoFrame;
633 }
634
635 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const
636 {
637     return true;
638 }
639
640 void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged()
641 {
642     if (!m_hasBeenAskedToPaintGL) {
643         destroyDecompressionSession();
644         ensureLayer();
645     } else {
646         destroyLayer();
647         ensureDecompressionSession();
648     }
649 }
650
651 void MediaPlayerPrivateMediaSourceAVFObjC::notifyActiveSourceBuffersChanged()
652 {
653     m_player->client().mediaPlayerActiveSourceBuffersChanged(m_player);
654 }
655
656 MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const
657 {
658     return MediaPlayer::StoredStream;
659 }
660
661 void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering()
662 {
663     // No-op.
664 }
665
666 String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const
667 {
668     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaSource Engine"));
669     return description;
670 }
671
672 String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const
673 {
674     // FIXME(125158): implement languageOfPrimaryAudioTrack()
675     return emptyString();
676 }
677
678 size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const
679 {
680     return 0;
681 }
682
683 Optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateMediaSourceAVFObjC::videoPlaybackQualityMetrics()
684 {
685     if (m_decompressionSession) {
686         return VideoPlaybackQualityMetrics {
687             m_decompressionSession->totalVideoFrames(),
688             m_decompressionSession->droppedVideoFrames(),
689             m_decompressionSession->corruptedVideoFrames(),
690             m_decompressionSession->totalFrameDelay().toDouble(),
691             0,
692         };
693     }
694
695     auto metrics = [m_sampleBufferDisplayLayer videoPerformanceMetrics];
696     if (!metrics)
697         return WTF::nullopt;
698
699     uint32_t displayCompositedFrames = 0;
700     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
701     if ([metrics respondsToSelector:@selector(numberOfDisplayCompositedVideoFrames)])
702         displayCompositedFrames = [metrics numberOfDisplayCompositedVideoFrames];
703     ALLOW_NEW_API_WITHOUT_GUARDS_END
704
705     return VideoPlaybackQualityMetrics {
706         static_cast<uint32_t>([metrics totalNumberOfVideoFrames]),
707         static_cast<uint32_t>([metrics numberOfDroppedVideoFrames]),
708         static_cast<uint32_t>([metrics numberOfCorruptedVideoFrames]),
709         [metrics totalFrameDelay],
710         displayCompositedFrames,
711     };
712 }
713
714 #pragma mark -
715 #pragma mark Utility Methods
716
717 void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
718 {
719     if (m_sampleBufferDisplayLayer)
720         return;
721
722     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
723 #ifndef NDEBUG
724     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaSource AVSampleBufferDisplayLayer"];
725 #endif
726
727     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
728     if (m_mediaSourcePrivate)
729         m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
730     m_videoFullscreenLayerManager->setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
731     m_player->client().mediaPlayerRenderingModeChanged(m_player);
732 }
733
734 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
735 {
736     if (!m_sampleBufferDisplayLayer)
737         return;
738
739     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
740     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
741         // No-op.
742     }];
743
744     if (m_mediaSourcePrivate)
745         m_mediaSourcePrivate->setVideoLayer(nullptr);
746     m_videoFullscreenLayerManager->didDestroyVideoLayer();
747     m_sampleBufferDisplayLayer = nullptr;
748     setHasAvailableVideoFrame(false);
749     m_player->client().mediaPlayerRenderingModeChanged(m_player);
750 }
751
752 void MediaPlayerPrivateMediaSourceAVFObjC::ensureDecompressionSession()
753 {
754     if (m_decompressionSession)
755         return;
756
757     m_decompressionSession = WebCoreDecompressionSession::createOpenGL();
758     m_decompressionSession->setTimebase([m_synchronizer timebase]);
759
760     if (m_mediaSourcePrivate)
761         m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
762
763     m_player->client().mediaPlayerRenderingModeChanged(m_player);
764 }
765
766 void MediaPlayerPrivateMediaSourceAVFObjC::destroyDecompressionSession()
767 {
768     if (!m_decompressionSession)
769         return;
770
771     if (m_mediaSourcePrivate)
772         m_mediaSourcePrivate->setDecompressionSession(nullptr);
773
774     m_decompressionSession->invalidate();
775     m_decompressionSession = nullptr;
776     setHasAvailableVideoFrame(false);
777 }
778
779 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const
780 {
781     return m_playing && !seeking() && allRenderersHaveAvailableSamples() && m_readyState >= MediaPlayer::HaveFutureData;
782 }
783
784 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableVideoFrame(bool flag)
785 {
786     if (m_hasAvailableVideoFrame == flag)
787         return;
788
789     DEBUG_LOG(LOGIDENTIFIER, flag);
790     m_hasAvailableVideoFrame = flag;
791     updateAllRenderersHaveAvailableSamples();
792
793     if (!m_hasAvailableVideoFrame)
794         return;
795
796     m_player->firstVideoFrameAvailable();
797     if (m_seekCompleted == WaitingForAvailableFame)
798         seekCompleted();
799
800     if (m_readyStateIsWaitingForAvailableFrame) {
801         m_readyStateIsWaitingForAvailableFrame = false;
802         m_player->readyStateChanged();
803     }
804 }
805
806 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
807 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableAudioSample(AVSampleBufferAudioRenderer* renderer, bool flag)
808 ALLOW_NEW_API_WITHOUT_GUARDS_END
809 {
810     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)renderer);
811     if (iter == m_sampleBufferAudioRendererMap.end())
812         return;
813
814     auto& properties = iter->value;
815     if (properties.hasAudibleSample == flag)
816         return;
817     DEBUG_LOG(LOGIDENTIFIER, flag);
818     properties.hasAudibleSample = flag;
819     updateAllRenderersHaveAvailableSamples();
820 }
821
822 void MediaPlayerPrivateMediaSourceAVFObjC::updateAllRenderersHaveAvailableSamples()
823 {
824     bool allRenderersHaveAvailableSamples = true;
825
826     do {
827         if (hasVideo() && !m_hasAvailableVideoFrame) {
828             allRenderersHaveAvailableSamples = false;
829             break;
830         }
831
832         for (auto& properties : m_sampleBufferAudioRendererMap.values()) {
833             if (!properties.hasAudibleSample) {
834                 allRenderersHaveAvailableSamples = false;
835                 break;
836             }
837         }
838     } while (0);
839
840     if (m_allRenderersHaveAvailableSamples == allRenderersHaveAvailableSamples)
841         return;
842
843     DEBUG_LOG(LOGIDENTIFIER, allRenderersHaveAvailableSamples);
844     m_allRenderersHaveAvailableSamples = allRenderersHaveAvailableSamples;
845
846     if (shouldBePlaying() && [m_synchronizer rate] != m_rate)
847         [m_synchronizer setRate:m_rate];
848     else if (!shouldBePlaying() && [m_synchronizer rate])
849         [m_synchronizer setRate:0];
850 }
851
852 void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
853 {
854     m_player->durationChanged();
855
856     if (m_durationObserver)
857         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
858
859     if (!m_mediaSourcePrivate)
860         return;
861
862     MediaTime duration = m_mediaSourcePrivate->duration();
863     auto weakThis = createWeakPtr();
864     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(duration)]];
865
866     auto logSiteIdentifier = LOGIDENTIFIER;
867     DEBUG_LOG(logSiteIdentifier, duration);
868     UNUSED_PARAM(logSiteIdentifier);
869
870     m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis, duration, logSiteIdentifier, this] {
871         if (!weakThis)
872             return;
873
874         MediaTime now = weakThis->currentMediaTime();
875         DEBUG_LOG(logSiteIdentifier, "boundary time observer called, now = ", now);
876
877         weakThis->pauseInternal();
878         if (now < duration) {
879             ERROR_LOG(logSiteIdentifier, "ERROR: boundary time observer called before duration");
880             [weakThis->m_synchronizer setRate:0 time:PAL::toCMTime(duration)];
881         }
882         weakThis->m_player->timeChanged();
883
884     }];
885
886     if (m_playing && duration <= currentMediaTime())
887         pauseInternal();
888 }
889
890 void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
891 {
892     m_player->rateChanged();
893 }
894
895 void MediaPlayerPrivateMediaSourceAVFObjC::sizeWillChangeAtTime(const MediaTime& time, const FloatSize& size)
896 {
897     auto weakThis = m_sizeChangeObserverWeakPtrFactory.createWeakPtr(*this);
898     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(time)]];
899     RetainPtr<id> observer = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[this, weakThis, size] {
900         if (!weakThis)
901             return;
902
903         ASSERT(!m_sizeChangeObservers.isEmpty());
904         if (!m_sizeChangeObservers.isEmpty()) {
905             RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
906             [m_synchronizer removeTimeObserver:observer.get()];
907         }
908         setNaturalSize(size);
909     }];
910     m_sizeChangeObservers.append(WTFMove(observer));
911
912     if (currentMediaTime() >= time)
913         setNaturalSize(size);
914 }
915
916 void MediaPlayerPrivateMediaSourceAVFObjC::setNaturalSize(const FloatSize& size)
917 {
918     if (size == m_naturalSize)
919         return;
920
921     ALWAYS_LOG(LOGIDENTIFIER, size);
922
923     m_naturalSize = size;
924     m_player->sizeChanged();
925 }
926
927 void MediaPlayerPrivateMediaSourceAVFObjC::flushPendingSizeChanges()
928 {
929     while (!m_sizeChangeObservers.isEmpty()) {
930         RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
931         [m_synchronizer removeTimeObserver:observer.get()];
932     }
933     m_sizeChangeObserverWeakPtrFactory.revokeAll();
934 }
935
936 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
937 #if HAVE(AVSTREAMSESSION)
938 AVStreamSession* MediaPlayerPrivateMediaSourceAVFObjC::streamSession()
939 {
940     if (!getAVStreamSessionClass() || ![getAVStreamSessionClass() instancesRespondToSelector:@selector(initWithStorageDirectoryAtURL:)])
941         return nil;
942
943     if (!m_streamSession) {
944         String storageDirectory = m_player->mediaKeysStorageDirectory();
945         if (storageDirectory.isEmpty())
946             return nil;
947
948         if (!FileSystem::fileExists(storageDirectory)) {
949             if (!FileSystem::makeAllDirectories(storageDirectory))
950                 return nil;
951         }
952
953         String storagePath = FileSystem::pathByAppendingComponent(storageDirectory, "SecureStop.plist");
954         m_streamSession = adoptNS([allocAVStreamSessionInstance() initWithStorageDirectoryAtURL:[NSURL fileURLWithPath:storagePath]]);
955     }
956     return m_streamSession.get();
957 }
958 #endif
959
960 void MediaPlayerPrivateMediaSourceAVFObjC::setCDMSession(LegacyCDMSession* session)
961 {
962     if (session == m_session)
963         return;
964
965     ALWAYS_LOG(LOGIDENTIFIER);
966
967     m_session = makeWeakPtr(toCDMSessionMediaSourceAVFObjC(session));
968
969 #if HAVE(AVSTREAMSESSION)
970     if (CDMSessionAVStreamSession* cdmStreamSession = toCDMSessionAVStreamSession(m_session.get()))
971         cdmStreamSession->setStreamSession(streamSession());
972 #endif
973
974     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
975         sourceBuffer->setCDMSession(m_session.get());
976 }
977 #endif // ENABLE(LEGACY_ENCRYPTED_MEDIA)
978
979 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
980 void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData)
981 {
982     m_player->keyNeeded(initData);
983 }
984 #endif
985
986 void MediaPlayerPrivateMediaSourceAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
987 {
988 #if ENABLE(ENCRYPTED_MEDIA)
989     ALWAYS_LOG(LOGIDENTIFIER, obscured);
990     if (m_mediaSourcePrivate)
991         m_mediaSourcePrivate->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
992 #else
993     UNUSED_PARAM(obscured);
994 #endif
995 }
996
997 #if ENABLE(ENCRYPTED_MEDIA)
998 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceAttached(CDMInstance& instance)
999 {
1000     ALWAYS_LOG(LOGIDENTIFIER);
1001     if (m_mediaSourcePrivate)
1002         m_mediaSourcePrivate->cdmInstanceAttached(instance);
1003 }
1004
1005 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceDetached(CDMInstance& instance)
1006 {
1007     ALWAYS_LOG(LOGIDENTIFIER);
1008     if (m_mediaSourcePrivate)
1009         m_mediaSourcePrivate->cdmInstanceDetached(instance);
1010 }
1011
1012 void MediaPlayerPrivateMediaSourceAVFObjC::attemptToDecryptWithInstance(CDMInstance& instance)
1013 {
1014     ALWAYS_LOG(LOGIDENTIFIER);
1015     if (m_mediaSourcePrivate)
1016         m_mediaSourcePrivate->attemptToDecryptWithInstance(instance);
1017 }
1018
1019 bool MediaPlayerPrivateMediaSourceAVFObjC::waitingForKey() const
1020 {
1021     return m_mediaSourcePrivate ? m_mediaSourcePrivate->waitingForKey() : false;
1022 }
1023
1024 void MediaPlayerPrivateMediaSourceAVFObjC::waitingForKeyChanged()
1025 {
1026     ALWAYS_LOG(LOGIDENTIFIER);
1027     m_player->waitingForKeyChanged();
1028 }
1029
1030 void MediaPlayerPrivateMediaSourceAVFObjC::initializationDataEncountered(const String& initDataType, RefPtr<ArrayBuffer>&& initData)
1031 {
1032     ALWAYS_LOG(LOGIDENTIFIER, initDataType);
1033     m_player->initializationDataEncountered(initDataType, WTFMove(initData));
1034 }
1035 #endif
1036
1037 const Vector<ContentType>& MediaPlayerPrivateMediaSourceAVFObjC::mediaContentTypesRequiringHardwareSupport() const
1038 {
1039     return m_player->mediaContentTypesRequiringHardwareSupport();
1040 }
1041
1042 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldCheckHardwareSupport() const
1043 {
1044     return m_player->shouldCheckHardwareSupport();
1045 }
1046
1047 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1048 {
1049     if (m_readyState == readyState)
1050         return;
1051
1052     ALWAYS_LOG(LOGIDENTIFIER, readyState);
1053     m_readyState = readyState;
1054
1055     if (shouldBePlaying())
1056         [m_synchronizer setRate:m_rate];
1057     else
1058         [m_synchronizer setRate:0];
1059
1060     if (m_readyState >= MediaPlayerEnums::HaveCurrentData && hasVideo() && !m_hasAvailableVideoFrame) {
1061         m_readyStateIsWaitingForAvailableFrame = true;
1062         return;
1063     }
1064
1065     m_player->readyStateChanged();
1066 }
1067
1068 void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1069 {
1070     if (m_networkState == networkState)
1071         return;
1072
1073     ALWAYS_LOG(LOGIDENTIFIER, networkState);
1074     m_networkState = networkState;
1075     m_player->networkStateChanged();
1076 }
1077
1078 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1079 void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1080 ALLOW_NEW_API_WITHOUT_GUARDS_END
1081 {
1082     if (!m_sampleBufferAudioRendererMap.add((__bridge CFTypeRef)audioRenderer, AudioRendererProperties()).isNewEntry)
1083         return;
1084
1085     [audioRenderer setMuted:m_player->muted()];
1086     [audioRenderer setVolume:m_player->volume()];
1087     [audioRenderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1088
1089     [m_synchronizer addRenderer:audioRenderer];
1090     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1091 }
1092
1093 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1094 void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1095 ALLOW_NEW_API_WITHOUT_GUARDS_END
1096 {
1097     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)audioRenderer);
1098     if (iter == m_sampleBufferAudioRendererMap.end())
1099         return;
1100
1101     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
1102     [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
1103         // No-op.
1104     }];
1105
1106     m_sampleBufferAudioRendererMap.remove(iter);
1107     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1108 }
1109
1110 void MediaPlayerPrivateMediaSourceAVFObjC::characteristicsChanged()
1111 {
1112     updateAllRenderersHaveAvailableSamples();
1113     m_player->characteristicChanged();
1114 }
1115
1116 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1117 {
1118     updateLastImage();
1119     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1120 }
1121
1122 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenFrame(FloatRect frame)
1123 {
1124     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1125 }
1126
1127 bool MediaPlayerPrivateMediaSourceAVFObjC::requiresTextTrackRepresentation() const
1128 {
1129     return m_videoFullscreenLayerManager->videoFullscreenLayer();
1130 }
1131     
1132 void MediaPlayerPrivateMediaSourceAVFObjC::syncTextTrackBounds()
1133 {
1134     m_videoFullscreenLayerManager->syncTextTrackBounds();
1135 }
1136     
1137 void MediaPlayerPrivateMediaSourceAVFObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1138 {
1139     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
1140 }
1141
1142 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1143 void MediaPlayerPrivateMediaSourceAVFObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
1144 {
1145     m_playbackTarget = WTFMove(target);
1146 }
1147
1148 void MediaPlayerPrivateMediaSourceAVFObjC::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget)
1149 {
1150     if (shouldPlayToTarget == m_shouldPlayToTarget)
1151         return;
1152
1153     ALWAYS_LOG(LOGIDENTIFIER, shouldPlayToTarget);
1154     m_shouldPlayToTarget = shouldPlayToTarget;
1155
1156     if (m_player)
1157         m_player->currentPlaybackTargetIsWirelessChanged();
1158 }
1159
1160 bool MediaPlayerPrivateMediaSourceAVFObjC::isCurrentPlaybackTargetWireless() const
1161 {
1162     if (!m_playbackTarget)
1163         return false;
1164
1165     auto hasTarget = m_shouldPlayToTarget && m_playbackTarget->hasActiveRoute();
1166     INFO_LOG(LOGIDENTIFIER, hasTarget);
1167     return hasTarget;
1168 }
1169 #endif
1170
1171 bool MediaPlayerPrivateMediaSourceAVFObjC::performTaskAtMediaTime(WTF::Function<void()>&& task, MediaTime time)
1172 {
1173     __block WTF::Function<void()> taskIn = WTFMove(task);
1174
1175     if (m_performTaskObserver)
1176         [m_synchronizer removeTimeObserver:m_performTaskObserver.get()];
1177
1178     m_performTaskObserver = [m_synchronizer addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:toCMTime(time)]] queue:dispatch_get_main_queue() usingBlock:^{
1179         taskIn();
1180     }];
1181     return true;
1182 }
1183
1184 #if !RELEASE_LOG_DISABLED
1185 WTFLogChannel& MediaPlayerPrivateMediaSourceAVFObjC::logChannel() const
1186 {
1187     return LogMediaSource;
1188 }
1189 #endif
1190
1191 }
1192
1193 #endif