Unreviewed, rolling out r245857.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaSourceAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetMIMETypeCache.h"
32 #import "AVAssetTrackUtilities.h"
33 #import "AVStreamDataParserMIMETypeCache.h"
34 #import "CDMSessionAVStreamSession.h"
35 #import "GraphicsContextCG.h"
36 #import "Logging.h"
37 #import "MediaSourcePrivateAVFObjC.h"
38 #import "MediaSourcePrivateClient.h"
39 #import "PixelBufferConformerCV.h"
40 #import "TextTrackRepresentation.h"
41 #import "TextureCacheCV.h"
42 #import "VideoFullscreenLayerManagerObjC.h"
43 #import "VideoTextureCopierCV.h"
44 #import "WebCoreDecompressionSession.h"
45 #import <AVFoundation/AVAsset.h>
46 #import <AVFoundation/AVTime.h>
47 #import <QuartzCore/CALayer.h>
48 #import <objc_runtime.h>
49 #import <pal/avfoundation/MediaTimeAVFoundation.h>
50 #import <pal/spi/mac/AVFoundationSPI.h>
51 #import <wtf/Deque.h>
52 #import <wtf/FileSystem.h>
53 #import <wtf/MainThread.h>
54 #import <wtf/NeverDestroyed.h>
55
56 #import "CoreVideoSoftLink.h"
57 #import <pal/cf/CoreMediaSoftLink.h>
58 #import <pal/cocoa/AVFoundationSoftLink.h>
59
60 #pragma mark -
61 #pragma mark AVStreamSession
62
63 @interface AVStreamSession : NSObject
64 - (instancetype)initWithStorageDirectoryAtURL:(NSURL *)storageDirectory;
65 @end
66
67 namespace WebCore {
68 using namespace PAL;
69
70 String convertEnumerationToString(MediaPlayerPrivateMediaSourceAVFObjC::SeekState enumerationValue)
71 {
72     static const NeverDestroyed<String> values[] = {
73         MAKE_STATIC_STRING_IMPL("Seeking"),
74         MAKE_STATIC_STRING_IMPL("WaitingForAvailableFame"),
75         MAKE_STATIC_STRING_IMPL("SeekCompleted"),
76     };
77     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::Seeking) == 0, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::Seeking is not 0 as expected");
78     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::WaitingForAvailableFame) == 1, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::WaitingForAvailableFame is not 1 as expected");
79     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::SeekCompleted) == 2, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::SeekCompleted is not 2 as expected");
80     ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
81     return values[static_cast<size_t>(enumerationValue)];
82 }
83     
84 #pragma mark -
85 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
86
87 static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
88 {
89     MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)const_cast<void*>(listener);
90     callOnMainThread([weakThis = player->createWeakPtr()] {
91         if (!weakThis)
92             return;
93         weakThis.get()->effectiveRateChanged();
94     });
95 }
96
97 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
98     : m_player(player)
99     , m_synchronizer(adoptNS([PAL::allocAVSampleBufferRenderSynchronizerInstance() init]))
100     , m_seekTimer(*this, &MediaPlayerPrivateMediaSourceAVFObjC::seekInternal)
101     , m_networkState(MediaPlayer::Empty)
102     , m_readyState(MediaPlayer::HaveNothing)
103     , m_rate(1)
104     , m_playing(0)
105     , m_seeking(false)
106     , m_loadingProgressed(false)
107     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
108 #if !RELEASE_LOG_DISABLED
109     , m_logger(player->mediaPlayerLogger())
110     , m_logIdentifier(player->mediaPlayerLogIdentifier())
111 #endif
112 {
113     CMTimebaseRef timebase = [m_synchronizer timebase];
114     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
115     CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
116
117     auto logSiteIdentifier = LOGIDENTIFIER;
118     ALWAYS_LOG(logSiteIdentifier);
119     UNUSED_PARAM(logSiteIdentifier);
120
121     // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
122     // an arbitrarily large time value of once an hour:
123     __block auto weakThis = createWeakPtr();
124     m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:PAL::toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
125 #if LOG_DISABLED
126         UNUSED_PARAM(time);
127 #endif
128         // FIXME: Remove the below once <rdar://problem/15798050> is fixed.
129         if (!weakThis)
130             return;
131
132         DEBUG_LOG(logSiteIdentifier, "synchronizer fired for ", toMediaTime(time), ", seeking = ", m_seeking, ", pending = ", !!m_pendingSeek);
133
134         if (m_seeking && !m_pendingSeek) {
135             m_seeking = false;
136
137             if (shouldBePlaying())
138                 [m_synchronizer setRate:m_rate];
139             if (!seeking() && m_seekCompleted == SeekCompleted)
140                 m_player->timeChanged();
141         }
142
143         if (m_pendingSeek)
144             seekInternal();
145     }];
146 }
147
148 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
149 {
150     ALWAYS_LOG(LOGIDENTIFIER);
151
152     CMTimebaseRef timebase = [m_synchronizer timebase];
153     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
154     CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
155
156     if (m_timeJumpedObserver)
157         [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
158     if (m_durationObserver)
159         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
160     flushPendingSizeChanges();
161
162     destroyLayer();
163     destroyDecompressionSession();
164
165     m_seekTimer.stop();
166 }
167
168 #pragma mark -
169 #pragma mark MediaPlayer Factory Methods
170
171 void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
172 {
173     if (!isAvailable())
174         return;
175
176     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaSourceAVFObjC>(player); },
177         getSupportedTypes, supportsType, 0, 0, 0, 0);
178     ASSERT(AVAssetMIMETypeCache::singleton().isAvailable());
179 }
180
181 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
182 {
183     return PAL::isAVFoundationFrameworkAvailable()
184         && isCoreMediaFrameworkAvailable()
185         && getAVStreamDataParserClass()
186         && getAVSampleBufferAudioRendererClass()
187         && getAVSampleBufferRenderSynchronizerClass()
188         && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:));
189 }
190
191 void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
192 {
193     auto& streamDataParserCache = AVStreamDataParserMIMETypeCache::singleton();
194     if (streamDataParserCache.isAvailable()) {
195         types = streamDataParserCache.types();
196         return;
197     }
198
199     auto& assetCache = AVAssetMIMETypeCache::singleton();
200     if (assetCache.isAvailable())
201         types = assetCache.types();
202 }
203
204 MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
205 {
206     // This engine does not support non-media-source sources.
207     if (!parameters.isMediaSource)
208         return MediaPlayer::IsNotSupported;
209 #if ENABLE(MEDIA_STREAM)
210     if (parameters.isMediaStream)
211         return MediaPlayer::IsNotSupported;
212 #endif
213
214     if (parameters.type.isEmpty())
215         return MediaPlayer::IsNotSupported;
216
217     if (AVStreamDataParserMIMETypeCache::singleton().isAvailable()) {
218         if (!AVStreamDataParserMIMETypeCache::singleton().supportsContentType(parameters.type))
219             return MediaPlayer::IsNotSupported;
220     } else if (AVAssetMIMETypeCache::singleton().isAvailable()) {
221         if (!AVAssetMIMETypeCache::singleton().supportsContentType(parameters.type))
222             return MediaPlayer::IsNotSupported;
223     } else
224         return MediaPlayer::IsNotSupported;
225
226     // The spec says:
227     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
228     auto codecs = parameters.type.parameter(ContentType::codecsParameter());
229     if (codecs.isEmpty())
230         return MediaPlayer::MayBeSupported;
231
232     String outputCodecs = codecs;
233     if ([PAL::getAVStreamDataParserClass() respondsToSelector:@selector(outputMIMECodecParameterForInputMIMECodecParameter:)])
234         outputCodecs = [PAL::getAVStreamDataParserClass() outputMIMECodecParameterForInputMIMECodecParameter:outputCodecs];
235
236     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
237         return MediaPlayer::IsNotSupported;
238
239     String type = makeString(parameters.type.containerType(), "; codecs=\"", outputCodecs, "\"");
240     if (AVStreamDataParserMIMETypeCache::singleton().isAvailable())
241         return AVStreamDataParserMIMETypeCache::singleton().canDecodeType(type) ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
242     return AVAssetMIMETypeCache::singleton().canDecodeType(type) ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
243 }
244
245 #pragma mark -
246 #pragma mark MediaPlayerPrivateInterface Overrides
247
248 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&)
249 {
250     // This media engine only supports MediaSource URLs.
251     m_networkState = MediaPlayer::FormatError;
252     m_player->networkStateChanged();
253 }
254
255 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&, MediaSourcePrivateClient* client)
256 {
257     ALWAYS_LOG(LOGIDENTIFIER);
258
259     m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client);
260     m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
261     m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
262
263     acceleratedRenderingStateChanged();
264 }
265
266 #if ENABLE(MEDIA_STREAM)
267 void MediaPlayerPrivateMediaSourceAVFObjC::load(MediaStreamPrivate&)
268 {
269     setNetworkState(MediaPlayer::FormatError);
270 }
271 #endif
272
273 void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad()
274 {
275 }
276
277 void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay()
278 {
279 }
280
281 PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const
282 {
283     return m_videoFullscreenLayerManager->videoInlineLayer();
284 }
285
286 void MediaPlayerPrivateMediaSourceAVFObjC::play()
287 {
288     ALWAYS_LOG(LOGIDENTIFIER);
289     callOnMainThread([weakThis = createWeakPtr()] {
290         if (!weakThis)
291             return;
292         weakThis.get()->playInternal();
293     });
294 }
295
296 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
297 {
298     if (currentMediaTime() >= m_mediaSourcePrivate->duration())
299         return;
300
301     ALWAYS_LOG(LOGIDENTIFIER);
302     m_playing = true;
303     if (shouldBePlaying())
304         [m_synchronizer setRate:m_rate];
305 }
306
307 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
308 {
309     ALWAYS_LOG(LOGIDENTIFIER);
310     callOnMainThread([weakThis = createWeakPtr()] {
311         if (!weakThis)
312             return;
313         weakThis.get()->pauseInternal();
314     });
315 }
316
317 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
318 {
319     ALWAYS_LOG(LOGIDENTIFIER);
320     m_playing = false;
321     [m_synchronizer setRate:0];
322 }
323
324 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
325 {
326     return ![m_synchronizer rate];
327 }
328
329 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
330 {
331     ALWAYS_LOG(LOGIDENTIFIER, volume);
332     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
333         [(__bridge AVSampleBufferAudioRenderer *)key.get() setVolume:volume];
334 }
335
336 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
337 {
338     return true;
339 }
340
341 void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
342 {
343     ALWAYS_LOG(LOGIDENTIFIER, muted);
344     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
345         [(__bridge AVSampleBufferAudioRenderer *)key.get() setMuted:muted];
346 }
347
348 FloatSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
349 {
350     return m_naturalSize;
351 }
352
353 bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const
354 {
355     if (!m_mediaSourcePrivate)
356         return false;
357
358     return m_mediaSourcePrivate->hasVideo();
359 }
360
361 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const
362 {
363     if (!m_mediaSourcePrivate)
364         return false;
365
366     return m_mediaSourcePrivate->hasAudio();
367 }
368
369 void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool visible)
370 {
371     if (m_visible == visible)
372         return;
373
374     ALWAYS_LOG(LOGIDENTIFIER, visible);
375     m_visible = visible;
376     if (m_visible)
377         acceleratedRenderingStateChanged();
378 }
379
380 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
381 {
382     return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
383 }
384
385 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
386 {
387     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
388     if (synchronizerTime < MediaTime::zeroTime())
389         return MediaTime::zeroTime();
390     if (synchronizerTime < m_lastSeekTime)
391         return m_lastSeekTime;
392     return synchronizerTime;
393 }
394
395 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
396 {
397     return MediaTime::zeroTime();
398 }
399
400 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
401 {
402     return MediaTime::zeroTime();
403 }
404
405 void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
406 {
407     INFO_LOG(LOGIDENTIFIER, "time = ", time, ", negativeThreshold = ", negativeThreshold, ", positiveThreshold = ", positiveThreshold);
408
409     m_seeking = true;
410     auto weakThis = createWeakPtr();
411     m_pendingSeek = std::make_unique<PendingSeek>(time, negativeThreshold, positiveThreshold);
412
413     if (m_seekTimer.isActive())
414         m_seekTimer.stop();
415     m_seekTimer.startOneShot(0_s);
416 }
417
418 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
419 {
420     std::unique_ptr<PendingSeek> pendingSeek;
421     pendingSeek.swap(m_pendingSeek);
422
423     if (!pendingSeek)
424         return;
425
426     if (!m_mediaSourcePrivate)
427         return;
428
429     if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
430         m_lastSeekTime = pendingSeek->targetTime;
431     else
432         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
433
434     if (m_lastSeekTime.hasDoubleValue())
435         m_lastSeekTime = MediaTime::createWithDouble(m_lastSeekTime.toDouble(), MediaTime::DefaultTimeScale);
436
437     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
438     INFO_LOG(LOGIDENTIFIER, "seekTime = ", m_lastSeekTime, ", synchronizerTime = ", synchronizerTime);
439
440     bool doesNotRequireSeek = synchronizerTime == m_lastSeekTime;
441
442     m_mediaSourcePrivate->willSeek();
443     [m_synchronizer setRate:0 time:PAL::toCMTime(m_lastSeekTime)];
444     m_mediaSourcePrivate->seekToTime(m_lastSeekTime);
445
446     // In cases where the destination seek time precisely matches the synchronizer's existing time
447     // no time jumped notification will be issued. In this case, just notify the MediaPlayer that
448     // the seek completed successfully.
449     if (doesNotRequireSeek) {
450         m_seeking = false;
451
452         if (shouldBePlaying())
453             [m_synchronizer setRate:m_rate];
454         if (!seeking() && m_seekCompleted)
455             m_player->timeChanged();
456     }
457 }
458
459 void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted()
460 {
461     if (!m_seeking)
462         return;
463     ALWAYS_LOG(LOGIDENTIFIER);
464     m_seekCompleted = Seeking;
465 }
466
467 void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted()
468 {
469     if (m_seekCompleted == SeekCompleted)
470         return;
471     if (hasVideo() && !m_hasAvailableVideoFrame) {
472         ALWAYS_LOG(LOGIDENTIFIER, "waiting for video frame");
473         m_seekCompleted = WaitingForAvailableFame;
474         return;
475     }
476     ALWAYS_LOG(LOGIDENTIFIER);
477     m_seekCompleted = SeekCompleted;
478     if (shouldBePlaying())
479         [m_synchronizer setRate:m_rate];
480     if (!m_seeking)
481         m_player->timeChanged();
482 }
483
484 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
485 {
486     return m_seeking || m_seekCompleted != SeekCompleted;
487 }
488
489 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
490 {
491     // AVSampleBufferRenderSynchronizer does not support negative rate yet.
492     m_rate = std::max<double>(rate, 0);
493     if (shouldBePlaying())
494         [m_synchronizer setRate:m_rate];
495 }
496
497 void MediaPlayerPrivateMediaSourceAVFObjC::setPreservesPitch(bool preservesPitch)
498 {
499     ALWAYS_LOG(LOGIDENTIFIER, preservesPitch);
500     NSString *algorithm = preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed;
501     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
502         [(__bridge AVSampleBufferAudioRenderer *)key.get() setAudioTimePitchAlgorithm:algorithm];
503 }
504
505 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
506 {
507     return m_networkState;
508 }
509
510 MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
511 {
512     return m_readyState;
513 }
514
515 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
516 {
517     return std::make_unique<PlatformTimeRanges>(minMediaTimeSeekable(), maxMediaTimeSeekable());
518 }
519
520 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
521 {
522     return durationMediaTime();
523 }
524
525 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
526 {
527     return startTime();
528 }
529
530 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
531 {
532     return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : std::make_unique<PlatformTimeRanges>();
533 }
534
535 bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const
536 {
537     bool loadingProgressed = m_loadingProgressed;
538     m_loadingProgressed = false;
539     return loadingProgressed;
540 }
541
542 void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&)
543 {
544     // No-op.
545 }
546
547 NativeImagePtr MediaPlayerPrivateMediaSourceAVFObjC::nativeImageForCurrentTime()
548 {
549     updateLastImage();
550     return m_lastImage.get();
551 }
552
553 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastPixelBuffer()
554 {
555     if (m_sampleBufferDisplayLayer || !m_decompressionSession)
556         return false;
557
558     auto flags = !m_lastPixelBuffer ? WebCoreDecompressionSession::AllowLater : WebCoreDecompressionSession::ExactTime;
559     auto newPixelBuffer = m_decompressionSession->imageForTime(currentMediaTime(), flags);
560     if (!newPixelBuffer)
561         return false;
562
563     m_lastPixelBuffer = newPixelBuffer;
564     return true;
565 }
566
567 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastImage()
568 {
569     if (!updateLastPixelBuffer())
570         return false;
571
572     ASSERT(m_lastPixelBuffer);
573
574     if (!m_rgbConformer) {
575         auto attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
576         m_rgbConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
577     }
578
579     m_lastImage = m_rgbConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
580     return true;
581 }
582
583 void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
584 {
585     paintCurrentFrameInContext(context, rect);
586 }
587
588 void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& outputRect)
589 {
590     if (context.paintingDisabled())
591         return;
592
593     auto image = nativeImageForCurrentTime();
594     if (!image)
595         return;
596
597     GraphicsContextStateSaver stateSaver(context);
598     FloatRect imageRect(0, 0, CGImageGetWidth(image.get()), CGImageGetHeight(image.get()));
599     context.drawNativeImage(image, imageRect.size(), outputRect, imageRect);
600 }
601
602 bool MediaPlayerPrivateMediaSourceAVFObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
603 {
604     // We have been asked to paint into a WebGL canvas, so take that as a signal to create
605     // a decompression session, even if that means the native video can't also be displayed
606     // in page.
607     if (!m_hasBeenAskedToPaintGL) {
608         m_hasBeenAskedToPaintGL = true;
609         acceleratedRenderingStateChanged();
610     }
611
612     ASSERT(context);
613
614     if (updateLastPixelBuffer()) {
615         if (!m_lastPixelBuffer)
616             return false;
617     }
618
619     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
620     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
621
622     if (!m_videoTextureCopier)
623         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
624
625     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
626 }
627
628 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const
629 {
630     return m_hasAvailableVideoFrame;
631 }
632
633 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const
634 {
635     return true;
636 }
637
638 void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged()
639 {
640     if (!m_hasBeenAskedToPaintGL) {
641         destroyDecompressionSession();
642         ensureLayer();
643     } else {
644         destroyLayer();
645         ensureDecompressionSession();
646     }
647 }
648
649 void MediaPlayerPrivateMediaSourceAVFObjC::notifyActiveSourceBuffersChanged()
650 {
651     m_player->client().mediaPlayerActiveSourceBuffersChanged(m_player);
652 }
653
654 MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const
655 {
656     return MediaPlayer::StoredStream;
657 }
658
659 void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering()
660 {
661     // No-op.
662 }
663
664 String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const
665 {
666     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaSource Engine"));
667     return description;
668 }
669
670 String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const
671 {
672     // FIXME(125158): implement languageOfPrimaryAudioTrack()
673     return emptyString();
674 }
675
676 size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const
677 {
678     return 0;
679 }
680
681 Optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateMediaSourceAVFObjC::videoPlaybackQualityMetrics()
682 {
683     if (m_decompressionSession) {
684         return VideoPlaybackQualityMetrics {
685             m_decompressionSession->totalVideoFrames(),
686             m_decompressionSession->droppedVideoFrames(),
687             m_decompressionSession->corruptedVideoFrames(),
688             m_decompressionSession->totalFrameDelay().toDouble(),
689             0,
690         };
691     }
692
693     auto metrics = [m_sampleBufferDisplayLayer videoPerformanceMetrics];
694     if (!metrics)
695         return WTF::nullopt;
696
697     uint32_t displayCompositedFrames = 0;
698     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
699     if ([metrics respondsToSelector:@selector(numberOfDisplayCompositedVideoFrames)])
700         displayCompositedFrames = [metrics numberOfDisplayCompositedVideoFrames];
701     ALLOW_NEW_API_WITHOUT_GUARDS_END
702
703     return VideoPlaybackQualityMetrics {
704         static_cast<uint32_t>([metrics totalNumberOfVideoFrames]),
705         static_cast<uint32_t>([metrics numberOfDroppedVideoFrames]),
706         static_cast<uint32_t>([metrics numberOfCorruptedVideoFrames]),
707         [metrics totalFrameDelay],
708         displayCompositedFrames,
709     };
710 }
711
712 #pragma mark -
713 #pragma mark Utility Methods
714
715 void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
716 {
717     if (m_sampleBufferDisplayLayer)
718         return;
719
720     m_sampleBufferDisplayLayer = adoptNS([PAL::allocAVSampleBufferDisplayLayerInstance() init]);
721 #ifndef NDEBUG
722     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaSource AVSampleBufferDisplayLayer"];
723 #endif
724
725     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
726     if (m_mediaSourcePrivate)
727         m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
728     m_videoFullscreenLayerManager->setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
729     m_player->client().mediaPlayerRenderingModeChanged(m_player);
730 }
731
732 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
733 {
734     if (!m_sampleBufferDisplayLayer)
735         return;
736
737     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
738     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
739         // No-op.
740     }];
741
742     if (m_mediaSourcePrivate)
743         m_mediaSourcePrivate->setVideoLayer(nullptr);
744     m_videoFullscreenLayerManager->didDestroyVideoLayer();
745     m_sampleBufferDisplayLayer = nullptr;
746     setHasAvailableVideoFrame(false);
747     m_player->client().mediaPlayerRenderingModeChanged(m_player);
748 }
749
750 void MediaPlayerPrivateMediaSourceAVFObjC::ensureDecompressionSession()
751 {
752     if (m_decompressionSession)
753         return;
754
755     m_decompressionSession = WebCoreDecompressionSession::createOpenGL();
756     m_decompressionSession->setTimebase([m_synchronizer timebase]);
757
758     if (m_mediaSourcePrivate)
759         m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
760
761     m_player->client().mediaPlayerRenderingModeChanged(m_player);
762 }
763
764 void MediaPlayerPrivateMediaSourceAVFObjC::destroyDecompressionSession()
765 {
766     if (!m_decompressionSession)
767         return;
768
769     if (m_mediaSourcePrivate)
770         m_mediaSourcePrivate->setDecompressionSession(nullptr);
771
772     m_decompressionSession->invalidate();
773     m_decompressionSession = nullptr;
774     setHasAvailableVideoFrame(false);
775 }
776
777 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const
778 {
779     return m_playing && !seeking() && allRenderersHaveAvailableSamples() && m_readyState >= MediaPlayer::HaveFutureData;
780 }
781
782 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableVideoFrame(bool flag)
783 {
784     if (m_hasAvailableVideoFrame == flag)
785         return;
786
787     DEBUG_LOG(LOGIDENTIFIER, flag);
788     m_hasAvailableVideoFrame = flag;
789     updateAllRenderersHaveAvailableSamples();
790
791     if (!m_hasAvailableVideoFrame)
792         return;
793
794     m_player->firstVideoFrameAvailable();
795     if (m_seekCompleted == WaitingForAvailableFame)
796         seekCompleted();
797
798     if (m_readyStateIsWaitingForAvailableFrame) {
799         m_readyStateIsWaitingForAvailableFrame = false;
800         m_player->readyStateChanged();
801     }
802 }
803
804 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
805 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableAudioSample(AVSampleBufferAudioRenderer* renderer, bool flag)
806 ALLOW_NEW_API_WITHOUT_GUARDS_END
807 {
808     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)renderer);
809     if (iter == m_sampleBufferAudioRendererMap.end())
810         return;
811
812     auto& properties = iter->value;
813     if (properties.hasAudibleSample == flag)
814         return;
815     DEBUG_LOG(LOGIDENTIFIER, flag);
816     properties.hasAudibleSample = flag;
817     updateAllRenderersHaveAvailableSamples();
818 }
819
820 void MediaPlayerPrivateMediaSourceAVFObjC::updateAllRenderersHaveAvailableSamples()
821 {
822     bool allRenderersHaveAvailableSamples = true;
823
824     do {
825         if (hasVideo() && !m_hasAvailableVideoFrame) {
826             allRenderersHaveAvailableSamples = false;
827             break;
828         }
829
830         for (auto& properties : m_sampleBufferAudioRendererMap.values()) {
831             if (!properties.hasAudibleSample) {
832                 allRenderersHaveAvailableSamples = false;
833                 break;
834             }
835         }
836     } while (0);
837
838     if (m_allRenderersHaveAvailableSamples == allRenderersHaveAvailableSamples)
839         return;
840
841     DEBUG_LOG(LOGIDENTIFIER, allRenderersHaveAvailableSamples);
842     m_allRenderersHaveAvailableSamples = allRenderersHaveAvailableSamples;
843
844     if (shouldBePlaying() && [m_synchronizer rate] != m_rate)
845         [m_synchronizer setRate:m_rate];
846     else if (!shouldBePlaying() && [m_synchronizer rate])
847         [m_synchronizer setRate:0];
848 }
849
850 void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
851 {
852     m_player->durationChanged();
853
854     if (m_durationObserver)
855         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
856
857     if (!m_mediaSourcePrivate)
858         return;
859
860     MediaTime duration = m_mediaSourcePrivate->duration();
861     auto weakThis = createWeakPtr();
862     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(duration)]];
863
864     auto logSiteIdentifier = LOGIDENTIFIER;
865     DEBUG_LOG(logSiteIdentifier, duration);
866     UNUSED_PARAM(logSiteIdentifier);
867
868     m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis, duration, logSiteIdentifier, this] {
869         if (!weakThis)
870             return;
871
872         MediaTime now = weakThis->currentMediaTime();
873         DEBUG_LOG(logSiteIdentifier, "boundary time observer called, now = ", now);
874
875         weakThis->pauseInternal();
876         if (now < duration) {
877             ERROR_LOG(logSiteIdentifier, "ERROR: boundary time observer called before duration");
878             [weakThis->m_synchronizer setRate:0 time:PAL::toCMTime(duration)];
879         }
880         weakThis->m_player->timeChanged();
881
882     }];
883
884     if (m_playing && duration <= currentMediaTime())
885         pauseInternal();
886 }
887
888 void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
889 {
890     m_player->rateChanged();
891 }
892
893 void MediaPlayerPrivateMediaSourceAVFObjC::sizeWillChangeAtTime(const MediaTime& time, const FloatSize& size)
894 {
895     auto weakThis = m_sizeChangeObserverWeakPtrFactory.createWeakPtr(*this);
896     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(time)]];
897     RetainPtr<id> observer = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[this, weakThis, size] {
898         if (!weakThis)
899             return;
900
901         ASSERT(!m_sizeChangeObservers.isEmpty());
902         if (!m_sizeChangeObservers.isEmpty()) {
903             RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
904             [m_synchronizer removeTimeObserver:observer.get()];
905         }
906         setNaturalSize(size);
907     }];
908     m_sizeChangeObservers.append(WTFMove(observer));
909
910     if (currentMediaTime() >= time)
911         setNaturalSize(size);
912 }
913
914 void MediaPlayerPrivateMediaSourceAVFObjC::setNaturalSize(const FloatSize& size)
915 {
916     if (size == m_naturalSize)
917         return;
918
919     ALWAYS_LOG(LOGIDENTIFIER, size);
920
921     m_naturalSize = size;
922     m_player->sizeChanged();
923 }
924
925 void MediaPlayerPrivateMediaSourceAVFObjC::flushPendingSizeChanges()
926 {
927     while (!m_sizeChangeObservers.isEmpty()) {
928         RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
929         [m_synchronizer removeTimeObserver:observer.get()];
930     }
931     m_sizeChangeObserverWeakPtrFactory.revokeAll();
932 }
933
934 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
935 #if HAVE(AVSTREAMSESSION)
936 AVStreamSession* MediaPlayerPrivateMediaSourceAVFObjC::streamSession()
937 {
938     if (!getAVStreamSessionClass() || ![PAL::getAVStreamSessionClass() instancesRespondToSelector:@selector(initWithStorageDirectoryAtURL:)])
939         return nil;
940
941     if (!m_streamSession) {
942         String storageDirectory = m_player->mediaKeysStorageDirectory();
943         if (storageDirectory.isEmpty())
944             return nil;
945
946         if (!FileSystem::fileExists(storageDirectory)) {
947             if (!FileSystem::makeAllDirectories(storageDirectory))
948                 return nil;
949         }
950
951         String storagePath = FileSystem::pathByAppendingComponent(storageDirectory, "SecureStop.plist");
952         m_streamSession = adoptNS([PAL::allocAVStreamSessionInstance() initWithStorageDirectoryAtURL:[NSURL fileURLWithPath:storagePath]]);
953     }
954     return m_streamSession.get();
955 }
956 #endif
957
958 void MediaPlayerPrivateMediaSourceAVFObjC::setCDMSession(LegacyCDMSession* session)
959 {
960     if (session == m_session)
961         return;
962
963     ALWAYS_LOG(LOGIDENTIFIER);
964
965     m_session = makeWeakPtr(toCDMSessionMediaSourceAVFObjC(session));
966
967 #if HAVE(AVSTREAMSESSION)
968     if (CDMSessionAVStreamSession* cdmStreamSession = toCDMSessionAVStreamSession(m_session.get()))
969         cdmStreamSession->setStreamSession(streamSession());
970 #endif
971
972     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
973         sourceBuffer->setCDMSession(m_session.get());
974 }
975 #endif // ENABLE(LEGACY_ENCRYPTED_MEDIA)
976
977 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
978 void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData)
979 {
980     m_player->keyNeeded(initData);
981 }
982 #endif
983
984 void MediaPlayerPrivateMediaSourceAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
985 {
986 #if ENABLE(ENCRYPTED_MEDIA)
987     ALWAYS_LOG(LOGIDENTIFIER, obscured);
988     if (m_mediaSourcePrivate)
989         m_mediaSourcePrivate->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
990 #else
991     UNUSED_PARAM(obscured);
992 #endif
993 }
994
995 #if ENABLE(ENCRYPTED_MEDIA)
996 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceAttached(CDMInstance& instance)
997 {
998     ALWAYS_LOG(LOGIDENTIFIER);
999     if (m_mediaSourcePrivate)
1000         m_mediaSourcePrivate->cdmInstanceAttached(instance);
1001 }
1002
1003 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceDetached(CDMInstance& instance)
1004 {
1005     ALWAYS_LOG(LOGIDENTIFIER);
1006     if (m_mediaSourcePrivate)
1007         m_mediaSourcePrivate->cdmInstanceDetached(instance);
1008 }
1009
1010 void MediaPlayerPrivateMediaSourceAVFObjC::attemptToDecryptWithInstance(CDMInstance& instance)
1011 {
1012     ALWAYS_LOG(LOGIDENTIFIER);
1013     if (m_mediaSourcePrivate)
1014         m_mediaSourcePrivate->attemptToDecryptWithInstance(instance);
1015 }
1016
1017 bool MediaPlayerPrivateMediaSourceAVFObjC::waitingForKey() const
1018 {
1019     return m_mediaSourcePrivate ? m_mediaSourcePrivate->waitingForKey() : false;
1020 }
1021
1022 void MediaPlayerPrivateMediaSourceAVFObjC::waitingForKeyChanged()
1023 {
1024     ALWAYS_LOG(LOGIDENTIFIER);
1025     m_player->waitingForKeyChanged();
1026 }
1027
1028 void MediaPlayerPrivateMediaSourceAVFObjC::initializationDataEncountered(const String& initDataType, RefPtr<ArrayBuffer>&& initData)
1029 {
1030     ALWAYS_LOG(LOGIDENTIFIER, initDataType);
1031     m_player->initializationDataEncountered(initDataType, WTFMove(initData));
1032 }
1033 #endif
1034
1035 const Vector<ContentType>& MediaPlayerPrivateMediaSourceAVFObjC::mediaContentTypesRequiringHardwareSupport() const
1036 {
1037     return m_player->mediaContentTypesRequiringHardwareSupport();
1038 }
1039
1040 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldCheckHardwareSupport() const
1041 {
1042     return m_player->shouldCheckHardwareSupport();
1043 }
1044
1045 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1046 {
1047     if (m_readyState == readyState)
1048         return;
1049
1050     ALWAYS_LOG(LOGIDENTIFIER, readyState);
1051     m_readyState = readyState;
1052
1053     if (shouldBePlaying())
1054         [m_synchronizer setRate:m_rate];
1055     else
1056         [m_synchronizer setRate:0];
1057
1058     if (m_readyState >= MediaPlayerEnums::HaveCurrentData && hasVideo() && !m_hasAvailableVideoFrame) {
1059         m_readyStateIsWaitingForAvailableFrame = true;
1060         return;
1061     }
1062
1063     m_player->readyStateChanged();
1064 }
1065
1066 void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1067 {
1068     if (m_networkState == networkState)
1069         return;
1070
1071     ALWAYS_LOG(LOGIDENTIFIER, networkState);
1072     m_networkState = networkState;
1073     m_player->networkStateChanged();
1074 }
1075
1076 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1077 void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1078 ALLOW_NEW_API_WITHOUT_GUARDS_END
1079 {
1080     if (!m_sampleBufferAudioRendererMap.add((__bridge CFTypeRef)audioRenderer, AudioRendererProperties()).isNewEntry)
1081         return;
1082
1083     [audioRenderer setMuted:m_player->muted()];
1084     [audioRenderer setVolume:m_player->volume()];
1085     [audioRenderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1086
1087     [m_synchronizer addRenderer:audioRenderer];
1088     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1089 }
1090
1091 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1092 void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1093 ALLOW_NEW_API_WITHOUT_GUARDS_END
1094 {
1095     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)audioRenderer);
1096     if (iter == m_sampleBufferAudioRendererMap.end())
1097         return;
1098
1099     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
1100     [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
1101         // No-op.
1102     }];
1103
1104     m_sampleBufferAudioRendererMap.remove(iter);
1105     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1106 }
1107
1108 void MediaPlayerPrivateMediaSourceAVFObjC::characteristicsChanged()
1109 {
1110     updateAllRenderersHaveAvailableSamples();
1111     m_player->characteristicChanged();
1112 }
1113
1114 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1115 {
1116     updateLastImage();
1117     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1118 }
1119
1120 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenFrame(FloatRect frame)
1121 {
1122     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1123 }
1124
1125 bool MediaPlayerPrivateMediaSourceAVFObjC::requiresTextTrackRepresentation() const
1126 {
1127     return m_videoFullscreenLayerManager->videoFullscreenLayer();
1128 }
1129     
1130 void MediaPlayerPrivateMediaSourceAVFObjC::syncTextTrackBounds()
1131 {
1132     m_videoFullscreenLayerManager->syncTextTrackBounds();
1133 }
1134     
1135 void MediaPlayerPrivateMediaSourceAVFObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1136 {
1137     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
1138 }
1139
1140 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1141 void MediaPlayerPrivateMediaSourceAVFObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
1142 {
1143     m_playbackTarget = WTFMove(target);
1144 }
1145
1146 void MediaPlayerPrivateMediaSourceAVFObjC::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget)
1147 {
1148     if (shouldPlayToTarget == m_shouldPlayToTarget)
1149         return;
1150
1151     ALWAYS_LOG(LOGIDENTIFIER, shouldPlayToTarget);
1152     m_shouldPlayToTarget = shouldPlayToTarget;
1153
1154     if (m_player)
1155         m_player->currentPlaybackTargetIsWirelessChanged();
1156 }
1157
1158 bool MediaPlayerPrivateMediaSourceAVFObjC::isCurrentPlaybackTargetWireless() const
1159 {
1160     if (!m_playbackTarget)
1161         return false;
1162
1163     auto hasTarget = m_shouldPlayToTarget && m_playbackTarget->hasActiveRoute();
1164     INFO_LOG(LOGIDENTIFIER, hasTarget);
1165     return hasTarget;
1166 }
1167 #endif
1168
1169 bool MediaPlayerPrivateMediaSourceAVFObjC::performTaskAtMediaTime(WTF::Function<void()>&& task, MediaTime time)
1170 {
1171     __block WTF::Function<void()> taskIn = WTFMove(task);
1172
1173     if (m_performTaskObserver)
1174         [m_synchronizer removeTimeObserver:m_performTaskObserver.get()];
1175
1176     m_performTaskObserver = [m_synchronizer addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:toCMTime(time)]] queue:dispatch_get_main_queue() usingBlock:^{
1177         taskIn();
1178     }];
1179     return true;
1180 }
1181
1182 #if !RELEASE_LOG_DISABLED
1183 WTFLogChannel& MediaPlayerPrivateMediaSourceAVFObjC::logChannel() const
1184 {
1185     return LogMediaSource;
1186 }
1187 #endif
1188
1189 }
1190
1191 #endif