[WTF] Add makeUnique<T>, which ensures T is fast-allocated, makeUnique / makeUniqueWi...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaSourceAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetMIMETypeCache.h"
32 #import "AVAssetTrackUtilities.h"
33 #import "AVStreamDataParserMIMETypeCache.h"
34 #import "CDMSessionAVStreamSession.h"
35 #import "GraphicsContextCG.h"
36 #import "Logging.h"
37 #import "MediaSourcePrivateAVFObjC.h"
38 #import "MediaSourcePrivateClient.h"
39 #import "PixelBufferConformerCV.h"
40 #import "TextTrackRepresentation.h"
41 #import "TextureCacheCV.h"
42 #import "VideoFullscreenLayerManagerObjC.h"
43 #import "VideoTextureCopierCV.h"
44 #import "WebCoreDecompressionSession.h"
45 #import <AVFoundation/AVAsset.h>
46 #import <AVFoundation/AVTime.h>
47 #import <QuartzCore/CALayer.h>
48 #import <objc_runtime.h>
49 #import <pal/avfoundation/MediaTimeAVFoundation.h>
50 #import <pal/spi/mac/AVFoundationSPI.h>
51 #import <wtf/Deque.h>
52 #import <wtf/FileSystem.h>
53 #import <wtf/MainThread.h>
54 #import <wtf/NeverDestroyed.h>
55 #import <wtf/WeakPtr.h>
56
57 #import "CoreVideoSoftLink.h"
58 #import <pal/cf/CoreMediaSoftLink.h>
59 #import <pal/cocoa/AVFoundationSoftLink.h>
60
61 #pragma mark -
62 #pragma mark AVStreamSession
63
64 @interface AVStreamSession : NSObject
65 - (instancetype)initWithStorageDirectoryAtURL:(NSURL *)storageDirectory;
66 @end
67
68 @interface AVSampleBufferDisplayLayer (WebCorePrivate)
69 @property (assign, nonatomic) BOOL preventDisplaySleepForVideoPlayback;
70 @end
71
72 namespace WebCore {
73 using namespace PAL;
74
75 String convertEnumerationToString(MediaPlayerPrivateMediaSourceAVFObjC::SeekState enumerationValue)
76 {
77     static const NeverDestroyed<String> values[] = {
78         MAKE_STATIC_STRING_IMPL("Seeking"),
79         MAKE_STATIC_STRING_IMPL("WaitingForAvailableFame"),
80         MAKE_STATIC_STRING_IMPL("SeekCompleted"),
81     };
82     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::Seeking) == 0, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::Seeking is not 0 as expected");
83     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::WaitingForAvailableFame) == 1, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::WaitingForAvailableFame is not 1 as expected");
84     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::SeekCompleted) == 2, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::SeekCompleted is not 2 as expected");
85     ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
86     return values[static_cast<size_t>(enumerationValue)];
87 }
88     
89 #pragma mark -
90 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
91
92 class EffectiveRateChangedListener : public ThreadSafeRefCounted<EffectiveRateChangedListener> {
93 public:
94     static Ref<EffectiveRateChangedListener> create(MediaPlayerPrivateMediaSourceAVFObjC& client, CMTimebaseRef timebase)
95     {
96         return adoptRef(*new EffectiveRateChangedListener(client, timebase));
97     }
98
99     void effectiveRateChanged()
100     {
101         callOnMainThread([this, protectedThis = makeRef(*this)] {
102             if (m_client)
103                 m_client->effectiveRateChanged();
104         });
105     }
106
107     void stop(CMTimebaseRef);
108
109 private:
110     EffectiveRateChangedListener(MediaPlayerPrivateMediaSourceAVFObjC&, CMTimebaseRef);
111
112     WeakPtr<MediaPlayerPrivateMediaSourceAVFObjC> m_client;
113 };
114
115 static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
116 {
117     auto* effectiveRateChangedListener = (EffectiveRateChangedListener*)const_cast<void*>(listener);
118     effectiveRateChangedListener->effectiveRateChanged();
119 }
120
121 void EffectiveRateChangedListener::stop(CMTimebaseRef timebase)
122 {
123     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
124     CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
125 }
126
127 EffectiveRateChangedListener::EffectiveRateChangedListener(MediaPlayerPrivateMediaSourceAVFObjC& client, CMTimebaseRef timebase)
128     : m_client(makeWeakPtr(client))
129 {
130     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
131     CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
132 }
133
134 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
135     : m_player(player)
136     , m_synchronizer(adoptNS([PAL::allocAVSampleBufferRenderSynchronizerInstance() init]))
137     , m_seekTimer(*this, &MediaPlayerPrivateMediaSourceAVFObjC::seekInternal)
138     , m_networkState(MediaPlayer::Empty)
139     , m_readyState(MediaPlayer::HaveNothing)
140     , m_rate(1)
141     , m_playing(0)
142     , m_seeking(false)
143     , m_loadingProgressed(false)
144     , m_videoFullscreenLayerManager(makeUnique<VideoFullscreenLayerManagerObjC>())
145     , m_effectiveRateChangedListener(EffectiveRateChangedListener::create(*this, [m_synchronizer timebase]))
146 #if !RELEASE_LOG_DISABLED
147     , m_logger(player->mediaPlayerLogger())
148     , m_logIdentifier(player->mediaPlayerLogIdentifier())
149 #endif
150 {
151     auto logSiteIdentifier = LOGIDENTIFIER;
152     ALWAYS_LOG(logSiteIdentifier);
153     UNUSED_PARAM(logSiteIdentifier);
154
155     // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
156     // an arbitrarily large time value of once an hour:
157     __block auto weakThis = makeWeakPtr(*this);
158     m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:PAL::toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
159 #if LOG_DISABLED
160         UNUSED_PARAM(time);
161 #endif
162         // FIXME: Remove the below once <rdar://problem/15798050> is fixed.
163         if (!weakThis)
164             return;
165
166         DEBUG_LOG(logSiteIdentifier, "synchronizer fired for ", toMediaTime(time), ", seeking = ", m_seeking, ", pending = ", !!m_pendingSeek);
167
168         if (m_seeking && !m_pendingSeek) {
169             m_seeking = false;
170
171             if (shouldBePlaying())
172                 [m_synchronizer setRate:m_rate];
173             if (!seeking() && m_seekCompleted == SeekCompleted)
174                 m_player->timeChanged();
175         }
176
177         if (m_pendingSeek)
178             seekInternal();
179     }];
180 }
181
182 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
183 {
184     ALWAYS_LOG(LOGIDENTIFIER);
185
186     m_effectiveRateChangedListener->stop([m_synchronizer timebase]);
187
188     if (m_timeJumpedObserver)
189         [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
190     if (m_durationObserver)
191         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
192     flushPendingSizeChanges();
193
194     destroyLayer();
195     destroyDecompressionSession();
196
197     m_seekTimer.stop();
198 }
199
200 #pragma mark -
201 #pragma mark MediaPlayer Factory Methods
202
203 void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
204 {
205     if (!isAvailable())
206         return;
207
208     registrar([](MediaPlayer* player) { return makeUnique<MediaPlayerPrivateMediaSourceAVFObjC>(player); },
209         getSupportedTypes, supportsType, 0, 0, 0, 0);
210     ASSERT(AVAssetMIMETypeCache::singleton().isAvailable());
211 }
212
213 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
214 {
215     return PAL::isAVFoundationFrameworkAvailable()
216         && isCoreMediaFrameworkAvailable()
217         && getAVStreamDataParserClass()
218         && getAVSampleBufferAudioRendererClass()
219         && getAVSampleBufferRenderSynchronizerClass()
220         && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:));
221 }
222
223 void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
224 {
225     auto& streamDataParserCache = AVStreamDataParserMIMETypeCache::singleton();
226     if (streamDataParserCache.isAvailable()) {
227         types = streamDataParserCache.types();
228         return;
229     }
230
231     auto& assetCache = AVAssetMIMETypeCache::singleton();
232     if (assetCache.isAvailable())
233         types = assetCache.types();
234 }
235
236 MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
237 {
238     // This engine does not support non-media-source sources.
239     if (!parameters.isMediaSource)
240         return MediaPlayer::IsNotSupported;
241 #if ENABLE(MEDIA_STREAM)
242     if (parameters.isMediaStream)
243         return MediaPlayer::IsNotSupported;
244 #endif
245
246     if (parameters.type.isEmpty())
247         return MediaPlayer::IsNotSupported;
248
249     if (AVStreamDataParserMIMETypeCache::singleton().isAvailable()) {
250         if (!AVStreamDataParserMIMETypeCache::singleton().supportsContentType(parameters.type))
251             return MediaPlayer::IsNotSupported;
252     } else if (AVAssetMIMETypeCache::singleton().isAvailable()) {
253         if (!AVAssetMIMETypeCache::singleton().supportsContentType(parameters.type))
254             return MediaPlayer::IsNotSupported;
255     } else
256         return MediaPlayer::IsNotSupported;
257
258     // The spec says:
259     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
260     auto codecs = parameters.type.parameter(ContentType::codecsParameter());
261     if (codecs.isEmpty())
262         return MediaPlayer::MayBeSupported;
263
264     String outputCodecs = codecs;
265     if ([PAL::getAVStreamDataParserClass() respondsToSelector:@selector(outputMIMECodecParameterForInputMIMECodecParameter:)])
266         outputCodecs = [PAL::getAVStreamDataParserClass() outputMIMECodecParameterForInputMIMECodecParameter:outputCodecs];
267
268     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
269         return MediaPlayer::IsNotSupported;
270
271     String type = makeString(parameters.type.containerType(), "; codecs=\"", outputCodecs, "\"");
272     if (AVStreamDataParserMIMETypeCache::singleton().isAvailable())
273         return AVStreamDataParserMIMETypeCache::singleton().canDecodeType(type) ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
274     return AVAssetMIMETypeCache::singleton().canDecodeType(type) ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
275 }
276
277 #pragma mark -
278 #pragma mark MediaPlayerPrivateInterface Overrides
279
280 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&)
281 {
282     // This media engine only supports MediaSource URLs.
283     m_networkState = MediaPlayer::FormatError;
284     m_player->networkStateChanged();
285 }
286
287 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&, MediaSourcePrivateClient* client)
288 {
289     ALWAYS_LOG(LOGIDENTIFIER);
290
291     m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client);
292     m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
293     m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
294
295     acceleratedRenderingStateChanged();
296 }
297
298 #if ENABLE(MEDIA_STREAM)
299 void MediaPlayerPrivateMediaSourceAVFObjC::load(MediaStreamPrivate&)
300 {
301     setNetworkState(MediaPlayer::FormatError);
302 }
303 #endif
304
305 void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad()
306 {
307 }
308
309 void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay()
310 {
311 }
312
313 PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const
314 {
315     return m_videoFullscreenLayerManager->videoInlineLayer();
316 }
317
318 void MediaPlayerPrivateMediaSourceAVFObjC::play()
319 {
320     ALWAYS_LOG(LOGIDENTIFIER);
321     callOnMainThread([weakThis = makeWeakPtr(*this)] {
322         if (!weakThis)
323             return;
324         weakThis.get()->playInternal();
325     });
326 }
327
328 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
329 {
330     if (currentMediaTime() >= m_mediaSourcePrivate->duration()) {
331         ALWAYS_LOG(LOGIDENTIFIER, "bailing, current time: ", currentMediaTime(), " greater than duration ", m_mediaSourcePrivate->duration());
332         return;
333     }
334
335     ALWAYS_LOG(LOGIDENTIFIER);
336     m_playing = true;
337     if (shouldBePlaying())
338         [m_synchronizer setRate:m_rate];
339 }
340
341 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
342 {
343     ALWAYS_LOG(LOGIDENTIFIER);
344     callOnMainThread([weakThis = makeWeakPtr(*this)] {
345         if (!weakThis)
346             return;
347         weakThis.get()->pauseInternal();
348     });
349 }
350
351 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
352 {
353     ALWAYS_LOG(LOGIDENTIFIER);
354     m_playing = false;
355     [m_synchronizer setRate:0];
356 }
357
358 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
359 {
360     return ![m_synchronizer rate];
361 }
362
363 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
364 {
365     ALWAYS_LOG(LOGIDENTIFIER, volume);
366     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
367         [(__bridge AVSampleBufferAudioRenderer *)key.get() setVolume:volume];
368 }
369
370 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
371 {
372     return true;
373 }
374
375 void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
376 {
377     ALWAYS_LOG(LOGIDENTIFIER, muted);
378     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
379         [(__bridge AVSampleBufferAudioRenderer *)key.get() setMuted:muted];
380 }
381
382 FloatSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
383 {
384     return m_naturalSize;
385 }
386
387 bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const
388 {
389     if (!m_mediaSourcePrivate)
390         return false;
391
392     return m_mediaSourcePrivate->hasVideo();
393 }
394
395 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const
396 {
397     if (!m_mediaSourcePrivate)
398         return false;
399
400     return m_mediaSourcePrivate->hasAudio();
401 }
402
403 void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool visible)
404 {
405     if (m_visible == visible)
406         return;
407
408     ALWAYS_LOG(LOGIDENTIFIER, visible);
409     m_visible = visible;
410     if (m_visible)
411         acceleratedRenderingStateChanged();
412 }
413
414 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
415 {
416     return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
417 }
418
419 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
420 {
421     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
422     if (synchronizerTime < MediaTime::zeroTime())
423         return MediaTime::zeroTime();
424     if (synchronizerTime < m_lastSeekTime)
425         return m_lastSeekTime;
426     return synchronizerTime;
427 }
428
429 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
430 {
431     return MediaTime::zeroTime();
432 }
433
434 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
435 {
436     return MediaTime::zeroTime();
437 }
438
439 void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
440 {
441     INFO_LOG(LOGIDENTIFIER, "time = ", time, ", negativeThreshold = ", negativeThreshold, ", positiveThreshold = ", positiveThreshold);
442
443     m_seeking = true;
444     m_pendingSeek = makeUnique<PendingSeek>(time, negativeThreshold, positiveThreshold);
445
446     if (m_seekTimer.isActive())
447         m_seekTimer.stop();
448     m_seekTimer.startOneShot(0_s);
449 }
450
451 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
452 {
453     std::unique_ptr<PendingSeek> pendingSeek;
454     pendingSeek.swap(m_pendingSeek);
455
456     if (!pendingSeek)
457         return;
458
459     if (!m_mediaSourcePrivate)
460         return;
461
462     if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
463         m_lastSeekTime = pendingSeek->targetTime;
464     else
465         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
466
467     if (m_lastSeekTime.hasDoubleValue())
468         m_lastSeekTime = MediaTime::createWithDouble(m_lastSeekTime.toDouble(), MediaTime::DefaultTimeScale);
469
470     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
471     INFO_LOG(LOGIDENTIFIER, "seekTime = ", m_lastSeekTime, ", synchronizerTime = ", synchronizerTime);
472
473     bool doesNotRequireSeek = synchronizerTime == m_lastSeekTime;
474
475     m_mediaSourcePrivate->willSeek();
476     [m_synchronizer setRate:0 time:PAL::toCMTime(m_lastSeekTime)];
477     m_mediaSourcePrivate->seekToTime(m_lastSeekTime);
478
479     // In cases where the destination seek time precisely matches the synchronizer's existing time
480     // no time jumped notification will be issued. In this case, just notify the MediaPlayer that
481     // the seek completed successfully.
482     if (doesNotRequireSeek) {
483         m_seeking = false;
484
485         if (shouldBePlaying())
486             [m_synchronizer setRate:m_rate];
487         if (!seeking() && m_seekCompleted)
488             m_player->timeChanged();
489     }
490 }
491
492 void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted()
493 {
494     if (!m_seeking)
495         return;
496     ALWAYS_LOG(LOGIDENTIFIER);
497     m_seekCompleted = Seeking;
498 }
499
500 void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted()
501 {
502     if (m_seekCompleted == SeekCompleted)
503         return;
504     if (hasVideo() && !m_hasAvailableVideoFrame) {
505         ALWAYS_LOG(LOGIDENTIFIER, "waiting for video frame");
506         m_seekCompleted = WaitingForAvailableFame;
507         return;
508     }
509     ALWAYS_LOG(LOGIDENTIFIER);
510     m_seekCompleted = SeekCompleted;
511     if (shouldBePlaying())
512         [m_synchronizer setRate:m_rate];
513     if (!m_seeking)
514         m_player->timeChanged();
515 }
516
517 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
518 {
519     return m_seeking || m_seekCompleted != SeekCompleted;
520 }
521
522 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
523 {
524     // AVSampleBufferRenderSynchronizer does not support negative rate yet.
525     m_rate = std::max<double>(rate, 0);
526     if (shouldBePlaying())
527         [m_synchronizer setRate:m_rate];
528 }
529
530 void MediaPlayerPrivateMediaSourceAVFObjC::setPreservesPitch(bool preservesPitch)
531 {
532     ALWAYS_LOG(LOGIDENTIFIER, preservesPitch);
533     NSString *algorithm = preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed;
534     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
535         [(__bridge AVSampleBufferAudioRenderer *)key.get() setAudioTimePitchAlgorithm:algorithm];
536 }
537
538 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
539 {
540     return m_networkState;
541 }
542
543 MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
544 {
545     return m_readyState;
546 }
547
548 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
549 {
550     return makeUnique<PlatformTimeRanges>(minMediaTimeSeekable(), maxMediaTimeSeekable());
551 }
552
553 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
554 {
555     return durationMediaTime();
556 }
557
558 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
559 {
560     return startTime();
561 }
562
563 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
564 {
565     return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : makeUnique<PlatformTimeRanges>();
566 }
567
568 bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const
569 {
570     bool loadingProgressed = m_loadingProgressed;
571     m_loadingProgressed = false;
572     return loadingProgressed;
573 }
574
575 void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&)
576 {
577     // No-op.
578 }
579
580 NativeImagePtr MediaPlayerPrivateMediaSourceAVFObjC::nativeImageForCurrentTime()
581 {
582     updateLastImage();
583     return m_lastImage.get();
584 }
585
586 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastPixelBuffer()
587 {
588     if (m_sampleBufferDisplayLayer || !m_decompressionSession)
589         return false;
590
591     auto flags = !m_lastPixelBuffer ? WebCoreDecompressionSession::AllowLater : WebCoreDecompressionSession::ExactTime;
592     auto newPixelBuffer = m_decompressionSession->imageForTime(currentMediaTime(), flags);
593     if (!newPixelBuffer)
594         return false;
595
596     m_lastPixelBuffer = newPixelBuffer;
597     return true;
598 }
599
600 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastImage()
601 {
602     if (!updateLastPixelBuffer())
603         return false;
604
605     ASSERT(m_lastPixelBuffer);
606
607     if (!m_rgbConformer) {
608         auto attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
609         m_rgbConformer = makeUnique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
610     }
611
612     m_lastImage = m_rgbConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
613     return true;
614 }
615
616 void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
617 {
618     paintCurrentFrameInContext(context, rect);
619 }
620
621 void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& outputRect)
622 {
623     if (context.paintingDisabled())
624         return;
625
626     auto image = nativeImageForCurrentTime();
627     if (!image)
628         return;
629
630     GraphicsContextStateSaver stateSaver(context);
631     FloatRect imageRect(0, 0, CGImageGetWidth(image.get()), CGImageGetHeight(image.get()));
632     context.drawNativeImage(image, imageRect.size(), outputRect, imageRect);
633 }
634
635 bool MediaPlayerPrivateMediaSourceAVFObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
636 {
637     // We have been asked to paint into a WebGL canvas, so take that as a signal to create
638     // a decompression session, even if that means the native video can't also be displayed
639     // in page.
640     if (!m_hasBeenAskedToPaintGL) {
641         m_hasBeenAskedToPaintGL = true;
642         acceleratedRenderingStateChanged();
643     }
644
645     ASSERT(context);
646
647     if (updateLastPixelBuffer()) {
648         if (!m_lastPixelBuffer)
649             return false;
650     }
651
652     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
653     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
654
655     if (!m_videoTextureCopier)
656         m_videoTextureCopier = makeUnique<VideoTextureCopierCV>(*context);
657
658     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
659 }
660
661 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const
662 {
663     return m_hasAvailableVideoFrame;
664 }
665
666 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const
667 {
668     return true;
669 }
670
671 void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged()
672 {
673     if (!m_hasBeenAskedToPaintGL) {
674         destroyDecompressionSession();
675         ensureLayer();
676     } else {
677         destroyLayer();
678         ensureDecompressionSession();
679     }
680 }
681
682 void MediaPlayerPrivateMediaSourceAVFObjC::notifyActiveSourceBuffersChanged()
683 {
684     m_player->client().mediaPlayerActiveSourceBuffersChanged(m_player);
685 }
686
687 MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const
688 {
689     return MediaPlayer::StoredStream;
690 }
691
692 void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering()
693 {
694     // No-op.
695 }
696
697 String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const
698 {
699     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaSource Engine"));
700     return description;
701 }
702
703 String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const
704 {
705     // FIXME(125158): implement languageOfPrimaryAudioTrack()
706     return emptyString();
707 }
708
709 size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const
710 {
711     return 0;
712 }
713
714 Optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateMediaSourceAVFObjC::videoPlaybackQualityMetrics()
715 {
716     if (m_decompressionSession) {
717         return VideoPlaybackQualityMetrics {
718             m_decompressionSession->totalVideoFrames(),
719             m_decompressionSession->droppedVideoFrames(),
720             m_decompressionSession->corruptedVideoFrames(),
721             m_decompressionSession->totalFrameDelay().toDouble(),
722             0,
723         };
724     }
725
726     auto metrics = [m_sampleBufferDisplayLayer videoPerformanceMetrics];
727     if (!metrics)
728         return WTF::nullopt;
729
730     uint32_t displayCompositedFrames = 0;
731     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
732     if ([metrics respondsToSelector:@selector(numberOfDisplayCompositedVideoFrames)])
733         displayCompositedFrames = [metrics numberOfDisplayCompositedVideoFrames];
734     ALLOW_NEW_API_WITHOUT_GUARDS_END
735
736     return VideoPlaybackQualityMetrics {
737         static_cast<uint32_t>([metrics totalNumberOfVideoFrames]),
738         static_cast<uint32_t>([metrics numberOfDroppedVideoFrames]),
739         static_cast<uint32_t>([metrics numberOfCorruptedVideoFrames]),
740         [metrics totalFrameDelay],
741         displayCompositedFrames,
742     };
743 }
744
745 #pragma mark -
746 #pragma mark Utility Methods
747
748 void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
749 {
750     if (m_sampleBufferDisplayLayer)
751         return;
752
753     m_sampleBufferDisplayLayer = adoptNS([PAL::allocAVSampleBufferDisplayLayerInstance() init]);
754 #ifndef NDEBUG
755     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaSource AVSampleBufferDisplayLayer"];
756 #endif
757
758     ASSERT(m_sampleBufferDisplayLayer);
759     if (!m_sampleBufferDisplayLayer) {
760         ERROR_LOG(LOGIDENTIFIER, "Failed to create AVSampleBufferDisplayLayer");
761         setNetworkState(MediaPlayer::DecodeError);
762         return;
763     }
764
765     if ([m_sampleBufferDisplayLayer respondsToSelector:@selector(setPreventDisplaySleepForVideoPlayback:)])
766         m_sampleBufferDisplayLayer.get().preventDisplaySleepForVideoPlayback = NO;
767
768     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
769     if (m_mediaSourcePrivate)
770         m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
771     m_videoFullscreenLayerManager->setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
772     m_player->client().mediaPlayerRenderingModeChanged(m_player);
773 }
774
775 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
776 {
777     if (!m_sampleBufferDisplayLayer)
778         return;
779
780     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
781     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
782         // No-op.
783     }];
784
785     if (m_mediaSourcePrivate)
786         m_mediaSourcePrivate->setVideoLayer(nullptr);
787     m_videoFullscreenLayerManager->didDestroyVideoLayer();
788     m_sampleBufferDisplayLayer = nullptr;
789     setHasAvailableVideoFrame(false);
790     m_player->client().mediaPlayerRenderingModeChanged(m_player);
791 }
792
793 void MediaPlayerPrivateMediaSourceAVFObjC::ensureDecompressionSession()
794 {
795     if (m_decompressionSession)
796         return;
797
798     m_decompressionSession = WebCoreDecompressionSession::createOpenGL();
799     m_decompressionSession->setTimebase([m_synchronizer timebase]);
800
801     if (m_mediaSourcePrivate)
802         m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
803
804     m_player->client().mediaPlayerRenderingModeChanged(m_player);
805 }
806
807 void MediaPlayerPrivateMediaSourceAVFObjC::destroyDecompressionSession()
808 {
809     if (!m_decompressionSession)
810         return;
811
812     if (m_mediaSourcePrivate)
813         m_mediaSourcePrivate->setDecompressionSession(nullptr);
814
815     m_decompressionSession->invalidate();
816     m_decompressionSession = nullptr;
817     setHasAvailableVideoFrame(false);
818 }
819
820 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const
821 {
822     return m_playing && !seeking() && allRenderersHaveAvailableSamples() && m_readyState >= MediaPlayer::HaveFutureData;
823 }
824
825 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableVideoFrame(bool flag)
826 {
827     if (m_hasAvailableVideoFrame == flag)
828         return;
829
830     DEBUG_LOG(LOGIDENTIFIER, flag);
831     m_hasAvailableVideoFrame = flag;
832     updateAllRenderersHaveAvailableSamples();
833
834     if (!m_hasAvailableVideoFrame)
835         return;
836
837     m_player->firstVideoFrameAvailable();
838     if (m_seekCompleted == WaitingForAvailableFame)
839         seekCompleted();
840
841     if (m_readyStateIsWaitingForAvailableFrame) {
842         m_readyStateIsWaitingForAvailableFrame = false;
843         m_player->readyStateChanged();
844     }
845 }
846
847 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
848 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableAudioSample(AVSampleBufferAudioRenderer* renderer, bool flag)
849 ALLOW_NEW_API_WITHOUT_GUARDS_END
850 {
851     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)renderer);
852     if (iter == m_sampleBufferAudioRendererMap.end())
853         return;
854
855     auto& properties = iter->value;
856     if (properties.hasAudibleSample == flag)
857         return;
858     DEBUG_LOG(LOGIDENTIFIER, flag);
859     properties.hasAudibleSample = flag;
860     updateAllRenderersHaveAvailableSamples();
861 }
862
863 void MediaPlayerPrivateMediaSourceAVFObjC::updateAllRenderersHaveAvailableSamples()
864 {
865     bool allRenderersHaveAvailableSamples = true;
866
867     do {
868         if (hasVideo() && !m_hasAvailableVideoFrame) {
869             allRenderersHaveAvailableSamples = false;
870             break;
871         }
872
873         for (auto& properties : m_sampleBufferAudioRendererMap.values()) {
874             if (!properties.hasAudibleSample) {
875                 allRenderersHaveAvailableSamples = false;
876                 break;
877             }
878         }
879     } while (0);
880
881     if (m_allRenderersHaveAvailableSamples == allRenderersHaveAvailableSamples)
882         return;
883
884     DEBUG_LOG(LOGIDENTIFIER, allRenderersHaveAvailableSamples);
885     m_allRenderersHaveAvailableSamples = allRenderersHaveAvailableSamples;
886
887     if (shouldBePlaying() && [m_synchronizer rate] != m_rate)
888         [m_synchronizer setRate:m_rate];
889     else if (!shouldBePlaying() && [m_synchronizer rate])
890         [m_synchronizer setRate:0];
891 }
892
893 void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
894 {
895     m_player->durationChanged();
896
897     if (m_durationObserver)
898         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
899
900     if (!m_mediaSourcePrivate)
901         return;
902
903     MediaTime duration = m_mediaSourcePrivate->duration();
904     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(duration)]];
905
906     auto logSiteIdentifier = LOGIDENTIFIER;
907     DEBUG_LOG(logSiteIdentifier, duration);
908     UNUSED_PARAM(logSiteIdentifier);
909
910     m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis = makeWeakPtr(*this), duration, logSiteIdentifier, this] {
911         if (!weakThis)
912             return;
913
914         MediaTime now = weakThis->currentMediaTime();
915         DEBUG_LOG(logSiteIdentifier, "boundary time observer called, now = ", now);
916
917         weakThis->pauseInternal();
918         if (now < duration) {
919             ERROR_LOG(logSiteIdentifier, "ERROR: boundary time observer called before duration");
920             [weakThis->m_synchronizer setRate:0 time:PAL::toCMTime(duration)];
921         }
922         weakThis->m_player->timeChanged();
923
924     }];
925
926     if (m_playing && duration <= currentMediaTime())
927         pauseInternal();
928 }
929
930 void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
931 {
932     m_player->rateChanged();
933 }
934
935 void MediaPlayerPrivateMediaSourceAVFObjC::sizeWillChangeAtTime(const MediaTime& time, const FloatSize& size)
936 {
937     auto weakThis = m_sizeChangeObserverWeakPtrFactory.createWeakPtr(*this);
938     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(time)]];
939     RetainPtr<id> observer = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[this, weakThis, size] {
940         if (!weakThis)
941             return;
942
943         ASSERT(!m_sizeChangeObservers.isEmpty());
944         if (!m_sizeChangeObservers.isEmpty()) {
945             RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
946             [m_synchronizer removeTimeObserver:observer.get()];
947         }
948         setNaturalSize(size);
949     }];
950     m_sizeChangeObservers.append(WTFMove(observer));
951
952     if (currentMediaTime() >= time)
953         setNaturalSize(size);
954 }
955
956 void MediaPlayerPrivateMediaSourceAVFObjC::setNaturalSize(const FloatSize& size)
957 {
958     if (size == m_naturalSize)
959         return;
960
961     ALWAYS_LOG(LOGIDENTIFIER, size);
962
963     m_naturalSize = size;
964     m_player->sizeChanged();
965 }
966
967 void MediaPlayerPrivateMediaSourceAVFObjC::flushPendingSizeChanges()
968 {
969     while (!m_sizeChangeObservers.isEmpty()) {
970         RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
971         [m_synchronizer removeTimeObserver:observer.get()];
972     }
973     m_sizeChangeObserverWeakPtrFactory.revokeAll();
974 }
975
976 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
977 #if HAVE(AVSTREAMSESSION)
978 AVStreamSession* MediaPlayerPrivateMediaSourceAVFObjC::streamSession()
979 {
980     if (!getAVStreamSessionClass() || ![PAL::getAVStreamSessionClass() instancesRespondToSelector:@selector(initWithStorageDirectoryAtURL:)])
981         return nil;
982
983     if (!m_streamSession) {
984         String storageDirectory = m_player->mediaKeysStorageDirectory();
985         if (storageDirectory.isEmpty())
986             return nil;
987
988         if (!FileSystem::fileExists(storageDirectory)) {
989             if (!FileSystem::makeAllDirectories(storageDirectory))
990                 return nil;
991         }
992
993         String storagePath = FileSystem::pathByAppendingComponent(storageDirectory, "SecureStop.plist");
994         m_streamSession = adoptNS([PAL::allocAVStreamSessionInstance() initWithStorageDirectoryAtURL:[NSURL fileURLWithPath:storagePath]]);
995     }
996     return m_streamSession.get();
997 }
998 #endif
999
1000 CDMSessionMediaSourceAVFObjC* MediaPlayerPrivateMediaSourceAVFObjC::cdmSession() const
1001 {
1002     return m_session.get();
1003 }
1004
1005 void MediaPlayerPrivateMediaSourceAVFObjC::setCDMSession(LegacyCDMSession* session)
1006 {
1007     if (session == m_session)
1008         return;
1009
1010     ALWAYS_LOG(LOGIDENTIFIER);
1011
1012     m_session = makeWeakPtr(toCDMSessionMediaSourceAVFObjC(session));
1013
1014 #if HAVE(AVSTREAMSESSION)
1015     if (CDMSessionAVStreamSession* cdmStreamSession = toCDMSessionAVStreamSession(m_session.get()))
1016         cdmStreamSession->setStreamSession(streamSession());
1017 #endif
1018
1019     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
1020         sourceBuffer->setCDMSession(m_session.get());
1021 }
1022 #endif // ENABLE(LEGACY_ENCRYPTED_MEDIA)
1023
1024 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1025 void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData)
1026 {
1027     m_player->keyNeeded(initData);
1028 }
1029 #endif
1030
1031 void MediaPlayerPrivateMediaSourceAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
1032 {
1033 #if ENABLE(ENCRYPTED_MEDIA)
1034     ALWAYS_LOG(LOGIDENTIFIER, obscured);
1035     if (m_mediaSourcePrivate)
1036         m_mediaSourcePrivate->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
1037 #else
1038     UNUSED_PARAM(obscured);
1039 #endif
1040 }
1041
1042 #if ENABLE(ENCRYPTED_MEDIA)
1043 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceAttached(CDMInstance& instance)
1044 {
1045     ALWAYS_LOG(LOGIDENTIFIER);
1046     if (m_mediaSourcePrivate)
1047         m_mediaSourcePrivate->cdmInstanceAttached(instance);
1048 }
1049
1050 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceDetached(CDMInstance& instance)
1051 {
1052     ALWAYS_LOG(LOGIDENTIFIER);
1053     if (m_mediaSourcePrivate)
1054         m_mediaSourcePrivate->cdmInstanceDetached(instance);
1055 }
1056
1057 void MediaPlayerPrivateMediaSourceAVFObjC::attemptToDecryptWithInstance(CDMInstance& instance)
1058 {
1059     ALWAYS_LOG(LOGIDENTIFIER);
1060     if (m_mediaSourcePrivate)
1061         m_mediaSourcePrivate->attemptToDecryptWithInstance(instance);
1062 }
1063
1064 bool MediaPlayerPrivateMediaSourceAVFObjC::waitingForKey() const
1065 {
1066     return m_mediaSourcePrivate ? m_mediaSourcePrivate->waitingForKey() : false;
1067 }
1068
1069 void MediaPlayerPrivateMediaSourceAVFObjC::waitingForKeyChanged()
1070 {
1071     ALWAYS_LOG(LOGIDENTIFIER);
1072     m_player->waitingForKeyChanged();
1073 }
1074
1075 void MediaPlayerPrivateMediaSourceAVFObjC::initializationDataEncountered(const String& initDataType, RefPtr<ArrayBuffer>&& initData)
1076 {
1077     ALWAYS_LOG(LOGIDENTIFIER, initDataType);
1078     m_player->initializationDataEncountered(initDataType, WTFMove(initData));
1079 }
1080 #endif
1081
1082 const Vector<ContentType>& MediaPlayerPrivateMediaSourceAVFObjC::mediaContentTypesRequiringHardwareSupport() const
1083 {
1084     return m_player->mediaContentTypesRequiringHardwareSupport();
1085 }
1086
1087 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldCheckHardwareSupport() const
1088 {
1089     return m_player->shouldCheckHardwareSupport();
1090 }
1091
1092 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1093 {
1094     if (m_readyState == readyState)
1095         return;
1096
1097     ALWAYS_LOG(LOGIDENTIFIER, readyState);
1098     m_readyState = readyState;
1099
1100     if (shouldBePlaying())
1101         [m_synchronizer setRate:m_rate];
1102     else
1103         [m_synchronizer setRate:0];
1104
1105     if (m_readyState >= MediaPlayerEnums::HaveCurrentData && hasVideo() && !m_hasAvailableVideoFrame) {
1106         m_readyStateIsWaitingForAvailableFrame = true;
1107         return;
1108     }
1109
1110     m_player->readyStateChanged();
1111 }
1112
1113 void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1114 {
1115     if (m_networkState == networkState)
1116         return;
1117
1118     ALWAYS_LOG(LOGIDENTIFIER, networkState);
1119     m_networkState = networkState;
1120     m_player->networkStateChanged();
1121 }
1122
1123 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1124 void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1125 ALLOW_NEW_API_WITHOUT_GUARDS_END
1126 {
1127     if (!m_sampleBufferAudioRendererMap.add((__bridge CFTypeRef)audioRenderer, AudioRendererProperties()).isNewEntry)
1128         return;
1129
1130     [audioRenderer setMuted:m_player->muted()];
1131     [audioRenderer setVolume:m_player->volume()];
1132     [audioRenderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1133
1134     [m_synchronizer addRenderer:audioRenderer];
1135     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1136 }
1137
1138 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1139 void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1140 ALLOW_NEW_API_WITHOUT_GUARDS_END
1141 {
1142     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)audioRenderer);
1143     if (iter == m_sampleBufferAudioRendererMap.end())
1144         return;
1145
1146     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
1147     [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
1148         // No-op.
1149     }];
1150
1151     m_sampleBufferAudioRendererMap.remove(iter);
1152     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1153 }
1154
1155 void MediaPlayerPrivateMediaSourceAVFObjC::characteristicsChanged()
1156 {
1157     updateAllRenderersHaveAvailableSamples();
1158     m_player->characteristicChanged();
1159 }
1160
1161 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1162 {
1163     updateLastImage();
1164     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1165 }
1166
1167 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenFrame(FloatRect frame)
1168 {
1169     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1170 }
1171
1172 bool MediaPlayerPrivateMediaSourceAVFObjC::requiresTextTrackRepresentation() const
1173 {
1174     return m_videoFullscreenLayerManager->videoFullscreenLayer();
1175 }
1176     
1177 void MediaPlayerPrivateMediaSourceAVFObjC::syncTextTrackBounds()
1178 {
1179     m_videoFullscreenLayerManager->syncTextTrackBounds();
1180 }
1181     
1182 void MediaPlayerPrivateMediaSourceAVFObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1183 {
1184     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
1185 }
1186
1187 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1188 void MediaPlayerPrivateMediaSourceAVFObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
1189 {
1190     m_playbackTarget = WTFMove(target);
1191 }
1192
1193 void MediaPlayerPrivateMediaSourceAVFObjC::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget)
1194 {
1195     if (shouldPlayToTarget == m_shouldPlayToTarget)
1196         return;
1197
1198     ALWAYS_LOG(LOGIDENTIFIER, shouldPlayToTarget);
1199     m_shouldPlayToTarget = shouldPlayToTarget;
1200
1201     if (m_player)
1202         m_player->currentPlaybackTargetIsWirelessChanged();
1203 }
1204
1205 bool MediaPlayerPrivateMediaSourceAVFObjC::isCurrentPlaybackTargetWireless() const
1206 {
1207     if (!m_playbackTarget)
1208         return false;
1209
1210     auto hasTarget = m_shouldPlayToTarget && m_playbackTarget->hasActiveRoute();
1211     INFO_LOG(LOGIDENTIFIER, hasTarget);
1212     return hasTarget;
1213 }
1214 #endif
1215
1216 bool MediaPlayerPrivateMediaSourceAVFObjC::performTaskAtMediaTime(WTF::Function<void()>&& task, MediaTime time)
1217 {
1218     __block WTF::Function<void()> taskIn = WTFMove(task);
1219
1220     if (m_performTaskObserver)
1221         [m_synchronizer removeTimeObserver:m_performTaskObserver.get()];
1222
1223     m_performTaskObserver = [m_synchronizer addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:toCMTime(time)]] queue:dispatch_get_main_queue() usingBlock:^{
1224         taskIn();
1225     }];
1226     return true;
1227 }
1228
1229 #if !RELEASE_LOG_DISABLED
1230 WTFLogChannel& MediaPlayerPrivateMediaSourceAVFObjC::logChannel() const
1231 {
1232     return LogMediaSource;
1233 }
1234 #endif
1235
1236 }
1237
1238 #endif