7fb1a5ec305a854f66ee207579814c6618d1bec9
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaSourceAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "CDMSessionAVStreamSession.h"
34 #import "GraphicsContextCG.h"
35 #import "Logging.h"
36 #import "MediaSourcePrivateAVFObjC.h"
37 #import "MediaSourcePrivateClient.h"
38 #import "PixelBufferConformerCV.h"
39 #import "TextTrackRepresentation.h"
40 #import "TextureCacheCV.h"
41 #import "VideoFullscreenLayerManagerObjC.h"
42 #import "VideoTextureCopierCV.h"
43 #import "WebCoreDecompressionSession.h"
44 #import <AVFoundation/AVAsset.h>
45 #import <AVFoundation/AVTime.h>
46 #import <QuartzCore/CALayer.h>
47 #import <objc_runtime.h>
48 #import <pal/avfoundation/MediaTimeAVFoundation.h>
49 #import <pal/spi/mac/AVFoundationSPI.h>
50 #import <wtf/Deque.h>
51 #import <wtf/FileSystem.h>
52 #import <wtf/MainThread.h>
53 #import <wtf/NeverDestroyed.h>
54
55 #import "CoreVideoSoftLink.h"
56 #import <pal/cf/CoreMediaSoftLink.h>
57 #import <pal/cocoa/AVFoundationSoftLink.h>
58
59 #pragma mark -
60 #pragma mark AVStreamSession
61
62 @interface AVStreamSession : NSObject
63 - (instancetype)initWithStorageDirectoryAtURL:(NSURL *)storageDirectory;
64 @end
65
66 namespace WebCore {
67 using namespace PAL;
68
69 String convertEnumerationToString(MediaPlayerPrivateMediaSourceAVFObjC::SeekState enumerationValue)
70 {
71     static const NeverDestroyed<String> values[] = {
72         MAKE_STATIC_STRING_IMPL("Seeking"),
73         MAKE_STATIC_STRING_IMPL("WaitingForAvailableFame"),
74         MAKE_STATIC_STRING_IMPL("SeekCompleted"),
75     };
76     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::Seeking) == 0, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::Seeking is not 0 as expected");
77     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::WaitingForAvailableFame) == 1, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::WaitingForAvailableFame is not 1 as expected");
78     static_assert(static_cast<size_t>(MediaPlayerPrivateMediaSourceAVFObjC::SeekState::SeekCompleted) == 2, "MediaPlayerPrivateMediaSourceAVFObjC::SeekState::SeekCompleted is not 2 as expected");
79     ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
80     return values[static_cast<size_t>(enumerationValue)];
81 }
82     
83 #pragma mark -
84 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
85
86 static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
87 {
88     MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)const_cast<void*>(listener);
89     callOnMainThread([weakThis = player->createWeakPtr()] {
90         if (!weakThis)
91             return;
92         weakThis.get()->effectiveRateChanged();
93     });
94 }
95
96 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
97     : m_player(player)
98     , m_synchronizer(adoptNS([PAL::allocAVSampleBufferRenderSynchronizerInstance() init]))
99     , m_seekTimer(*this, &MediaPlayerPrivateMediaSourceAVFObjC::seekInternal)
100     , m_networkState(MediaPlayer::Empty)
101     , m_readyState(MediaPlayer::HaveNothing)
102     , m_rate(1)
103     , m_playing(0)
104     , m_seeking(false)
105     , m_loadingProgressed(false)
106     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
107 #if !RELEASE_LOG_DISABLED
108     , m_logger(player->mediaPlayerLogger())
109     , m_logIdentifier(player->mediaPlayerLogIdentifier())
110 #endif
111 {
112     CMTimebaseRef timebase = [m_synchronizer timebase];
113     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
114     CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
115
116     auto logSiteIdentifier = LOGIDENTIFIER;
117     ALWAYS_LOG(logSiteIdentifier);
118     UNUSED_PARAM(logSiteIdentifier);
119
120     // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
121     // an arbitrarily large time value of once an hour:
122     __block auto weakThis = createWeakPtr();
123     m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:PAL::toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
124 #if LOG_DISABLED
125         UNUSED_PARAM(time);
126 #endif
127         // FIXME: Remove the below once <rdar://problem/15798050> is fixed.
128         if (!weakThis)
129             return;
130
131         DEBUG_LOG(logSiteIdentifier, "synchronizer fired for ", toMediaTime(time), ", seeking = ", m_seeking, ", pending = ", !!m_pendingSeek);
132
133         if (m_seeking && !m_pendingSeek) {
134             m_seeking = false;
135
136             if (shouldBePlaying())
137                 [m_synchronizer setRate:m_rate];
138             if (!seeking() && m_seekCompleted == SeekCompleted)
139                 m_player->timeChanged();
140         }
141
142         if (m_pendingSeek)
143             seekInternal();
144     }];
145 }
146
147 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
148 {
149     ALWAYS_LOG(LOGIDENTIFIER);
150
151     CMTimebaseRef timebase = [m_synchronizer timebase];
152     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
153     CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
154
155     if (m_timeJumpedObserver)
156         [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
157     if (m_durationObserver)
158         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
159     flushPendingSizeChanges();
160
161     destroyLayer();
162     destroyDecompressionSession();
163
164     m_seekTimer.stop();
165 }
166
167 #pragma mark -
168 #pragma mark MediaPlayer Factory Methods
169
170 void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
171 {
172     if (!isAvailable())
173         return;
174
175     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaSourceAVFObjC>(player); },
176         getSupportedTypes, supportsType, 0, 0, 0, 0);
177     ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
178 }
179
180 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
181 {
182     return PAL::AVFoundationLibrary()
183         && isCoreMediaFrameworkAvailable()
184         && getAVStreamDataParserClass()
185         && getAVSampleBufferAudioRendererClass()
186         && getAVSampleBufferRenderSynchronizerClass()
187         && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:));
188 }
189
190 void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
191 {
192     types = AVFoundationMIMETypeCache::singleton().types();
193 }
194
195 MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
196 {
197     // This engine does not support non-media-source sources.
198     if (!parameters.isMediaSource)
199         return MediaPlayer::IsNotSupported;
200 #if ENABLE(MEDIA_STREAM)
201     if (parameters.isMediaStream)
202         return MediaPlayer::IsNotSupported;
203 #endif
204
205     if (parameters.type.isEmpty() || !AVFoundationMIMETypeCache::singleton().canDecodeType(parameters.type.containerType()))
206         return MediaPlayer::IsNotSupported;
207
208     // The spec says:
209     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
210     auto codecs = parameters.type.parameter(ContentType::codecsParameter());
211     if (codecs.isEmpty())
212         return MediaPlayer::MayBeSupported;
213
214     NSString *outputCodecs = codecs;
215     if ([PAL::getAVStreamDataParserClass() respondsToSelector:@selector(outputMIMECodecParameterForInputMIMECodecParameter:)])
216         outputCodecs = [PAL::getAVStreamDataParserClass() outputMIMECodecParameterForInputMIMECodecParameter:outputCodecs];
217
218     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
219         return MediaPlayer::IsNotSupported;
220
221     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type.containerType(), (NSString *)outputCodecs];
222     return [PAL::getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
223 }
224
225 #pragma mark -
226 #pragma mark MediaPlayerPrivateInterface Overrides
227
228 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&)
229 {
230     // This media engine only supports MediaSource URLs.
231     m_networkState = MediaPlayer::FormatError;
232     m_player->networkStateChanged();
233 }
234
235 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&, MediaSourcePrivateClient* client)
236 {
237     ALWAYS_LOG(LOGIDENTIFIER);
238
239     m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client);
240     m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
241     m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
242
243     acceleratedRenderingStateChanged();
244 }
245
246 #if ENABLE(MEDIA_STREAM)
247 void MediaPlayerPrivateMediaSourceAVFObjC::load(MediaStreamPrivate&)
248 {
249     setNetworkState(MediaPlayer::FormatError);
250 }
251 #endif
252
253 void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad()
254 {
255 }
256
257 void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay()
258 {
259 }
260
261 PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const
262 {
263     return m_videoFullscreenLayerManager->videoInlineLayer();
264 }
265
266 void MediaPlayerPrivateMediaSourceAVFObjC::play()
267 {
268     ALWAYS_LOG(LOGIDENTIFIER);
269     callOnMainThread([weakThis = createWeakPtr()] {
270         if (!weakThis)
271             return;
272         weakThis.get()->playInternal();
273     });
274 }
275
276 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
277 {
278     if (currentMediaTime() >= m_mediaSourcePrivate->duration())
279         return;
280
281     ALWAYS_LOG(LOGIDENTIFIER);
282     m_playing = true;
283     if (shouldBePlaying())
284         [m_synchronizer setRate:m_rate];
285 }
286
287 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
288 {
289     ALWAYS_LOG(LOGIDENTIFIER);
290     callOnMainThread([weakThis = createWeakPtr()] {
291         if (!weakThis)
292             return;
293         weakThis.get()->pauseInternal();
294     });
295 }
296
297 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
298 {
299     ALWAYS_LOG(LOGIDENTIFIER);
300     m_playing = false;
301     [m_synchronizer setRate:0];
302 }
303
304 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
305 {
306     return ![m_synchronizer rate];
307 }
308
309 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
310 {
311     ALWAYS_LOG(LOGIDENTIFIER, volume);
312     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
313         [(__bridge AVSampleBufferAudioRenderer *)key.get() setVolume:volume];
314 }
315
316 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
317 {
318     return true;
319 }
320
321 void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
322 {
323     ALWAYS_LOG(LOGIDENTIFIER, muted);
324     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
325         [(__bridge AVSampleBufferAudioRenderer *)key.get() setMuted:muted];
326 }
327
328 FloatSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
329 {
330     return m_naturalSize;
331 }
332
333 bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const
334 {
335     if (!m_mediaSourcePrivate)
336         return false;
337
338     return m_mediaSourcePrivate->hasVideo();
339 }
340
341 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const
342 {
343     if (!m_mediaSourcePrivate)
344         return false;
345
346     return m_mediaSourcePrivate->hasAudio();
347 }
348
349 void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool visible)
350 {
351     if (m_visible == visible)
352         return;
353
354     ALWAYS_LOG(LOGIDENTIFIER, visible);
355     m_visible = visible;
356     if (m_visible)
357         acceleratedRenderingStateChanged();
358 }
359
360 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
361 {
362     return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
363 }
364
365 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
366 {
367     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
368     if (synchronizerTime < MediaTime::zeroTime())
369         return MediaTime::zeroTime();
370     if (synchronizerTime < m_lastSeekTime)
371         return m_lastSeekTime;
372     return synchronizerTime;
373 }
374
375 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
376 {
377     return MediaTime::zeroTime();
378 }
379
380 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
381 {
382     return MediaTime::zeroTime();
383 }
384
385 void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
386 {
387     INFO_LOG(LOGIDENTIFIER, "time = ", time, ", negativeThreshold = ", negativeThreshold, ", positiveThreshold = ", positiveThreshold);
388
389     m_seeking = true;
390     auto weakThis = createWeakPtr();
391     m_pendingSeek = std::make_unique<PendingSeek>(time, negativeThreshold, positiveThreshold);
392
393     if (m_seekTimer.isActive())
394         m_seekTimer.stop();
395     m_seekTimer.startOneShot(0_s);
396 }
397
398 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
399 {
400     std::unique_ptr<PendingSeek> pendingSeek;
401     pendingSeek.swap(m_pendingSeek);
402
403     if (!pendingSeek)
404         return;
405
406     if (!m_mediaSourcePrivate)
407         return;
408
409     if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
410         m_lastSeekTime = pendingSeek->targetTime;
411     else
412         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
413
414     if (m_lastSeekTime.hasDoubleValue())
415         m_lastSeekTime = MediaTime::createWithDouble(m_lastSeekTime.toDouble(), MediaTime::DefaultTimeScale);
416
417     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
418     INFO_LOG(LOGIDENTIFIER, "seekTime = ", m_lastSeekTime, ", synchronizerTime = ", synchronizerTime);
419
420     bool doesNotRequireSeek = synchronizerTime == m_lastSeekTime;
421
422     m_mediaSourcePrivate->willSeek();
423     [m_synchronizer setRate:0 time:PAL::toCMTime(m_lastSeekTime)];
424     m_mediaSourcePrivate->seekToTime(m_lastSeekTime);
425
426     // In cases where the destination seek time precisely matches the synchronizer's existing time
427     // no time jumped notification will be issued. In this case, just notify the MediaPlayer that
428     // the seek completed successfully.
429     if (doesNotRequireSeek) {
430         m_seeking = false;
431
432         if (shouldBePlaying())
433             [m_synchronizer setRate:m_rate];
434         if (!seeking() && m_seekCompleted)
435             m_player->timeChanged();
436     }
437 }
438
439 void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted()
440 {
441     if (!m_seeking)
442         return;
443     ALWAYS_LOG(LOGIDENTIFIER);
444     m_seekCompleted = Seeking;
445 }
446
447 void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted()
448 {
449     if (m_seekCompleted == SeekCompleted)
450         return;
451     if (hasVideo() && !m_hasAvailableVideoFrame) {
452         ALWAYS_LOG(LOGIDENTIFIER, "waiting for video frame");
453         m_seekCompleted = WaitingForAvailableFame;
454         return;
455     }
456     ALWAYS_LOG(LOGIDENTIFIER);
457     m_seekCompleted = SeekCompleted;
458     if (shouldBePlaying())
459         [m_synchronizer setRate:m_rate];
460     if (!m_seeking)
461         m_player->timeChanged();
462 }
463
464 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
465 {
466     return m_seeking || m_seekCompleted != SeekCompleted;
467 }
468
469 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
470 {
471     // AVSampleBufferRenderSynchronizer does not support negative rate yet.
472     m_rate = std::max<double>(rate, 0);
473     if (shouldBePlaying())
474         [m_synchronizer setRate:m_rate];
475 }
476
477 void MediaPlayerPrivateMediaSourceAVFObjC::setPreservesPitch(bool preservesPitch)
478 {
479     ALWAYS_LOG(LOGIDENTIFIER, preservesPitch);
480     NSString *algorithm = preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed;
481     for (const auto& key : m_sampleBufferAudioRendererMap.keys())
482         [(__bridge AVSampleBufferAudioRenderer *)key.get() setAudioTimePitchAlgorithm:algorithm];
483 }
484
485 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
486 {
487     return m_networkState;
488 }
489
490 MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
491 {
492     return m_readyState;
493 }
494
495 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
496 {
497     return std::make_unique<PlatformTimeRanges>(minMediaTimeSeekable(), maxMediaTimeSeekable());
498 }
499
500 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
501 {
502     return durationMediaTime();
503 }
504
505 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
506 {
507     return startTime();
508 }
509
510 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
511 {
512     return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : std::make_unique<PlatformTimeRanges>();
513 }
514
515 bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const
516 {
517     bool loadingProgressed = m_loadingProgressed;
518     m_loadingProgressed = false;
519     return loadingProgressed;
520 }
521
522 void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&)
523 {
524     // No-op.
525 }
526
527 NativeImagePtr MediaPlayerPrivateMediaSourceAVFObjC::nativeImageForCurrentTime()
528 {
529     updateLastImage();
530     return m_lastImage.get();
531 }
532
533 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastPixelBuffer()
534 {
535     if (m_sampleBufferDisplayLayer || !m_decompressionSession)
536         return false;
537
538     auto flags = !m_lastPixelBuffer ? WebCoreDecompressionSession::AllowLater : WebCoreDecompressionSession::ExactTime;
539     auto newPixelBuffer = m_decompressionSession->imageForTime(currentMediaTime(), flags);
540     if (!newPixelBuffer)
541         return false;
542
543     m_lastPixelBuffer = newPixelBuffer;
544     return true;
545 }
546
547 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastImage()
548 {
549     if (!updateLastPixelBuffer())
550         return false;
551
552     ASSERT(m_lastPixelBuffer);
553
554     if (!m_rgbConformer) {
555         auto attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
556         m_rgbConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
557     }
558
559     m_lastImage = m_rgbConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
560     return true;
561 }
562
563 void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
564 {
565     paintCurrentFrameInContext(context, rect);
566 }
567
568 void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& outputRect)
569 {
570     if (context.paintingDisabled())
571         return;
572
573     auto image = nativeImageForCurrentTime();
574     if (!image)
575         return;
576
577     GraphicsContextStateSaver stateSaver(context);
578     FloatRect imageRect(0, 0, CGImageGetWidth(image.get()), CGImageGetHeight(image.get()));
579     context.drawNativeImage(image, imageRect.size(), outputRect, imageRect);
580 }
581
582 bool MediaPlayerPrivateMediaSourceAVFObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
583 {
584     // We have been asked to paint into a WebGL canvas, so take that as a signal to create
585     // a decompression session, even if that means the native video can't also be displayed
586     // in page.
587     if (!m_hasBeenAskedToPaintGL) {
588         m_hasBeenAskedToPaintGL = true;
589         acceleratedRenderingStateChanged();
590     }
591
592     ASSERT(context);
593
594     if (updateLastPixelBuffer()) {
595         if (!m_lastPixelBuffer)
596             return false;
597     }
598
599     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
600     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
601
602     if (!m_videoTextureCopier)
603         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
604
605     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
606 }
607
608 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const
609 {
610     return m_hasAvailableVideoFrame;
611 }
612
613 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const
614 {
615     return true;
616 }
617
618 void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged()
619 {
620     if (!m_hasBeenAskedToPaintGL) {
621         destroyDecompressionSession();
622         ensureLayer();
623     } else {
624         destroyLayer();
625         ensureDecompressionSession();
626     }
627 }
628
629 void MediaPlayerPrivateMediaSourceAVFObjC::notifyActiveSourceBuffersChanged()
630 {
631     m_player->client().mediaPlayerActiveSourceBuffersChanged(m_player);
632 }
633
634 MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const
635 {
636     return MediaPlayer::StoredStream;
637 }
638
639 void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering()
640 {
641     // No-op.
642 }
643
644 String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const
645 {
646     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaSource Engine"));
647     return description;
648 }
649
650 String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const
651 {
652     // FIXME(125158): implement languageOfPrimaryAudioTrack()
653     return emptyString();
654 }
655
656 size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const
657 {
658     return 0;
659 }
660
661 Optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateMediaSourceAVFObjC::videoPlaybackQualityMetrics()
662 {
663     if (m_decompressionSession) {
664         return VideoPlaybackQualityMetrics {
665             m_decompressionSession->totalVideoFrames(),
666             m_decompressionSession->droppedVideoFrames(),
667             m_decompressionSession->corruptedVideoFrames(),
668             m_decompressionSession->totalFrameDelay().toDouble(),
669             0,
670         };
671     }
672
673     auto metrics = [m_sampleBufferDisplayLayer videoPerformanceMetrics];
674     if (!metrics)
675         return WTF::nullopt;
676
677     uint32_t displayCompositedFrames = 0;
678     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
679     if ([metrics respondsToSelector:@selector(numberOfDisplayCompositedVideoFrames)])
680         displayCompositedFrames = [metrics numberOfDisplayCompositedVideoFrames];
681     ALLOW_NEW_API_WITHOUT_GUARDS_END
682
683     return VideoPlaybackQualityMetrics {
684         static_cast<uint32_t>([metrics totalNumberOfVideoFrames]),
685         static_cast<uint32_t>([metrics numberOfDroppedVideoFrames]),
686         static_cast<uint32_t>([metrics numberOfCorruptedVideoFrames]),
687         [metrics totalFrameDelay],
688         displayCompositedFrames,
689     };
690 }
691
692 #pragma mark -
693 #pragma mark Utility Methods
694
695 void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
696 {
697     if (m_sampleBufferDisplayLayer)
698         return;
699
700     m_sampleBufferDisplayLayer = adoptNS([PAL::allocAVSampleBufferDisplayLayerInstance() init]);
701 #ifndef NDEBUG
702     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaSource AVSampleBufferDisplayLayer"];
703 #endif
704
705     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
706     if (m_mediaSourcePrivate)
707         m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
708     m_videoFullscreenLayerManager->setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
709     m_player->client().mediaPlayerRenderingModeChanged(m_player);
710 }
711
712 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
713 {
714     if (!m_sampleBufferDisplayLayer)
715         return;
716
717     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
718     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
719         // No-op.
720     }];
721
722     if (m_mediaSourcePrivate)
723         m_mediaSourcePrivate->setVideoLayer(nullptr);
724     m_videoFullscreenLayerManager->didDestroyVideoLayer();
725     m_sampleBufferDisplayLayer = nullptr;
726     setHasAvailableVideoFrame(false);
727     m_player->client().mediaPlayerRenderingModeChanged(m_player);
728 }
729
730 void MediaPlayerPrivateMediaSourceAVFObjC::ensureDecompressionSession()
731 {
732     if (m_decompressionSession)
733         return;
734
735     m_decompressionSession = WebCoreDecompressionSession::createOpenGL();
736     m_decompressionSession->setTimebase([m_synchronizer timebase]);
737
738     if (m_mediaSourcePrivate)
739         m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
740
741     m_player->client().mediaPlayerRenderingModeChanged(m_player);
742 }
743
744 void MediaPlayerPrivateMediaSourceAVFObjC::destroyDecompressionSession()
745 {
746     if (!m_decompressionSession)
747         return;
748
749     if (m_mediaSourcePrivate)
750         m_mediaSourcePrivate->setDecompressionSession(nullptr);
751
752     m_decompressionSession->invalidate();
753     m_decompressionSession = nullptr;
754     setHasAvailableVideoFrame(false);
755 }
756
757 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const
758 {
759     return m_playing && !seeking() && allRenderersHaveAvailableSamples() && m_readyState >= MediaPlayer::HaveFutureData;
760 }
761
762 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableVideoFrame(bool flag)
763 {
764     if (m_hasAvailableVideoFrame == flag)
765         return;
766
767     DEBUG_LOG(LOGIDENTIFIER, flag);
768     m_hasAvailableVideoFrame = flag;
769     updateAllRenderersHaveAvailableSamples();
770
771     if (!m_hasAvailableVideoFrame)
772         return;
773
774     m_player->firstVideoFrameAvailable();
775     if (m_seekCompleted == WaitingForAvailableFame)
776         seekCompleted();
777
778     if (m_readyStateIsWaitingForAvailableFrame) {
779         m_readyStateIsWaitingForAvailableFrame = false;
780         m_player->readyStateChanged();
781     }
782 }
783
784 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
785 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableAudioSample(AVSampleBufferAudioRenderer* renderer, bool flag)
786 ALLOW_NEW_API_WITHOUT_GUARDS_END
787 {
788     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)renderer);
789     if (iter == m_sampleBufferAudioRendererMap.end())
790         return;
791
792     auto& properties = iter->value;
793     if (properties.hasAudibleSample == flag)
794         return;
795     DEBUG_LOG(LOGIDENTIFIER, flag);
796     properties.hasAudibleSample = flag;
797     updateAllRenderersHaveAvailableSamples();
798 }
799
800 void MediaPlayerPrivateMediaSourceAVFObjC::updateAllRenderersHaveAvailableSamples()
801 {
802     bool allRenderersHaveAvailableSamples = true;
803
804     do {
805         if (hasVideo() && !m_hasAvailableVideoFrame) {
806             allRenderersHaveAvailableSamples = false;
807             break;
808         }
809
810         for (auto& properties : m_sampleBufferAudioRendererMap.values()) {
811             if (!properties.hasAudibleSample) {
812                 allRenderersHaveAvailableSamples = false;
813                 break;
814             }
815         }
816     } while (0);
817
818     if (m_allRenderersHaveAvailableSamples == allRenderersHaveAvailableSamples)
819         return;
820
821     DEBUG_LOG(LOGIDENTIFIER, allRenderersHaveAvailableSamples);
822     m_allRenderersHaveAvailableSamples = allRenderersHaveAvailableSamples;
823
824     if (shouldBePlaying() && [m_synchronizer rate] != m_rate)
825         [m_synchronizer setRate:m_rate];
826     else if (!shouldBePlaying() && [m_synchronizer rate])
827         [m_synchronizer setRate:0];
828 }
829
830 void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
831 {
832     m_player->durationChanged();
833
834     if (m_durationObserver)
835         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
836
837     if (!m_mediaSourcePrivate)
838         return;
839
840     MediaTime duration = m_mediaSourcePrivate->duration();
841     auto weakThis = createWeakPtr();
842     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(duration)]];
843
844     auto logSiteIdentifier = LOGIDENTIFIER;
845     DEBUG_LOG(logSiteIdentifier, duration);
846     UNUSED_PARAM(logSiteIdentifier);
847
848     m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis, duration, logSiteIdentifier, this] {
849         if (!weakThis)
850             return;
851
852         MediaTime now = weakThis->currentMediaTime();
853         DEBUG_LOG(logSiteIdentifier, "boundary time observer called, now = ", now);
854
855         weakThis->pauseInternal();
856         if (now < duration) {
857             ERROR_LOG(logSiteIdentifier, "ERROR: boundary time observer called before duration");
858             [weakThis->m_synchronizer setRate:0 time:PAL::toCMTime(duration)];
859         }
860         weakThis->m_player->timeChanged();
861
862     }];
863
864     if (m_playing && duration <= currentMediaTime())
865         pauseInternal();
866 }
867
868 void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
869 {
870     m_player->rateChanged();
871 }
872
873 void MediaPlayerPrivateMediaSourceAVFObjC::sizeWillChangeAtTime(const MediaTime& time, const FloatSize& size)
874 {
875     auto weakThis = m_sizeChangeObserverWeakPtrFactory.createWeakPtr(*this);
876     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(time)]];
877     RetainPtr<id> observer = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[this, weakThis, size] {
878         if (!weakThis)
879             return;
880
881         ASSERT(!m_sizeChangeObservers.isEmpty());
882         if (!m_sizeChangeObservers.isEmpty()) {
883             RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
884             [m_synchronizer removeTimeObserver:observer.get()];
885         }
886         setNaturalSize(size);
887     }];
888     m_sizeChangeObservers.append(WTFMove(observer));
889
890     if (currentMediaTime() >= time)
891         setNaturalSize(size);
892 }
893
894 void MediaPlayerPrivateMediaSourceAVFObjC::setNaturalSize(const FloatSize& size)
895 {
896     if (size == m_naturalSize)
897         return;
898
899     ALWAYS_LOG(LOGIDENTIFIER, size);
900
901     m_naturalSize = size;
902     m_player->sizeChanged();
903 }
904
905 void MediaPlayerPrivateMediaSourceAVFObjC::flushPendingSizeChanges()
906 {
907     while (!m_sizeChangeObservers.isEmpty()) {
908         RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
909         [m_synchronizer removeTimeObserver:observer.get()];
910     }
911     m_sizeChangeObserverWeakPtrFactory.revokeAll();
912 }
913
914 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
915 #if HAVE(AVSTREAMSESSION)
916 AVStreamSession* MediaPlayerPrivateMediaSourceAVFObjC::streamSession()
917 {
918     if (!getAVStreamSessionClass() || ![PAL::getAVStreamSessionClass() instancesRespondToSelector:@selector(initWithStorageDirectoryAtURL:)])
919         return nil;
920
921     if (!m_streamSession) {
922         String storageDirectory = m_player->mediaKeysStorageDirectory();
923         if (storageDirectory.isEmpty())
924             return nil;
925
926         if (!FileSystem::fileExists(storageDirectory)) {
927             if (!FileSystem::makeAllDirectories(storageDirectory))
928                 return nil;
929         }
930
931         String storagePath = FileSystem::pathByAppendingComponent(storageDirectory, "SecureStop.plist");
932         m_streamSession = adoptNS([PAL::allocAVStreamSessionInstance() initWithStorageDirectoryAtURL:[NSURL fileURLWithPath:storagePath]]);
933     }
934     return m_streamSession.get();
935 }
936 #endif
937
938 void MediaPlayerPrivateMediaSourceAVFObjC::setCDMSession(LegacyCDMSession* session)
939 {
940     if (session == m_session)
941         return;
942
943     ALWAYS_LOG(LOGIDENTIFIER);
944
945     m_session = makeWeakPtr(toCDMSessionMediaSourceAVFObjC(session));
946
947 #if HAVE(AVSTREAMSESSION)
948     if (CDMSessionAVStreamSession* cdmStreamSession = toCDMSessionAVStreamSession(m_session.get()))
949         cdmStreamSession->setStreamSession(streamSession());
950 #endif
951
952     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
953         sourceBuffer->setCDMSession(m_session.get());
954 }
955 #endif // ENABLE(LEGACY_ENCRYPTED_MEDIA)
956
957 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
958 void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData)
959 {
960     m_player->keyNeeded(initData);
961 }
962 #endif
963
964 void MediaPlayerPrivateMediaSourceAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
965 {
966 #if ENABLE(ENCRYPTED_MEDIA)
967     ALWAYS_LOG(LOGIDENTIFIER, obscured);
968     if (m_mediaSourcePrivate)
969         m_mediaSourcePrivate->outputObscuredDueToInsufficientExternalProtectionChanged(obscured);
970 #else
971     UNUSED_PARAM(obscured);
972 #endif
973 }
974
975 #if ENABLE(ENCRYPTED_MEDIA)
976 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceAttached(CDMInstance& instance)
977 {
978     ALWAYS_LOG(LOGIDENTIFIER);
979     if (m_mediaSourcePrivate)
980         m_mediaSourcePrivate->cdmInstanceAttached(instance);
981 }
982
983 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceDetached(CDMInstance& instance)
984 {
985     ALWAYS_LOG(LOGIDENTIFIER);
986     if (m_mediaSourcePrivate)
987         m_mediaSourcePrivate->cdmInstanceDetached(instance);
988 }
989
990 void MediaPlayerPrivateMediaSourceAVFObjC::attemptToDecryptWithInstance(CDMInstance& instance)
991 {
992     ALWAYS_LOG(LOGIDENTIFIER);
993     if (m_mediaSourcePrivate)
994         m_mediaSourcePrivate->attemptToDecryptWithInstance(instance);
995 }
996
997 bool MediaPlayerPrivateMediaSourceAVFObjC::waitingForKey() const
998 {
999     return m_mediaSourcePrivate ? m_mediaSourcePrivate->waitingForKey() : false;
1000 }
1001
1002 void MediaPlayerPrivateMediaSourceAVFObjC::waitingForKeyChanged()
1003 {
1004     ALWAYS_LOG(LOGIDENTIFIER);
1005     m_player->waitingForKeyChanged();
1006 }
1007
1008 void MediaPlayerPrivateMediaSourceAVFObjC::initializationDataEncountered(const String& initDataType, RefPtr<ArrayBuffer>&& initData)
1009 {
1010     ALWAYS_LOG(LOGIDENTIFIER, initDataType);
1011     m_player->initializationDataEncountered(initDataType, WTFMove(initData));
1012 }
1013 #endif
1014
1015 const Vector<ContentType>& MediaPlayerPrivateMediaSourceAVFObjC::mediaContentTypesRequiringHardwareSupport() const
1016 {
1017     return m_player->mediaContentTypesRequiringHardwareSupport();
1018 }
1019
1020 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldCheckHardwareSupport() const
1021 {
1022     return m_player->shouldCheckHardwareSupport();
1023 }
1024
1025 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
1026 {
1027     if (m_readyState == readyState)
1028         return;
1029
1030     ALWAYS_LOG(LOGIDENTIFIER, readyState);
1031     m_readyState = readyState;
1032
1033     if (shouldBePlaying())
1034         [m_synchronizer setRate:m_rate];
1035     else
1036         [m_synchronizer setRate:0];
1037
1038     if (m_readyState >= MediaPlayerEnums::HaveCurrentData && hasVideo() && !m_hasAvailableVideoFrame) {
1039         m_readyStateIsWaitingForAvailableFrame = true;
1040         return;
1041     }
1042
1043     m_player->readyStateChanged();
1044 }
1045
1046 void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1047 {
1048     if (m_networkState == networkState)
1049         return;
1050
1051     ALWAYS_LOG(LOGIDENTIFIER, networkState);
1052     m_networkState = networkState;
1053     m_player->networkStateChanged();
1054 }
1055
1056 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1057 void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1058 ALLOW_NEW_API_WITHOUT_GUARDS_END
1059 {
1060     if (!m_sampleBufferAudioRendererMap.add((__bridge CFTypeRef)audioRenderer, AudioRendererProperties()).isNewEntry)
1061         return;
1062
1063     [audioRenderer setMuted:m_player->muted()];
1064     [audioRenderer setVolume:m_player->volume()];
1065     [audioRenderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1066
1067     [m_synchronizer addRenderer:audioRenderer];
1068     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1069 }
1070
1071 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1072 void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1073 ALLOW_NEW_API_WITHOUT_GUARDS_END
1074 {
1075     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)audioRenderer);
1076     if (iter == m_sampleBufferAudioRendererMap.end())
1077         return;
1078
1079     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
1080     [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
1081         // No-op.
1082     }];
1083
1084     m_sampleBufferAudioRendererMap.remove(iter);
1085     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1086 }
1087
1088 void MediaPlayerPrivateMediaSourceAVFObjC::characteristicsChanged()
1089 {
1090     updateAllRenderersHaveAvailableSamples();
1091     m_player->characteristicChanged();
1092 }
1093
1094 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1095 {
1096     updateLastImage();
1097     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1098 }
1099
1100 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenFrame(FloatRect frame)
1101 {
1102     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1103 }
1104
1105 bool MediaPlayerPrivateMediaSourceAVFObjC::requiresTextTrackRepresentation() const
1106 {
1107     return m_videoFullscreenLayerManager->videoFullscreenLayer();
1108 }
1109     
1110 void MediaPlayerPrivateMediaSourceAVFObjC::syncTextTrackBounds()
1111 {
1112     m_videoFullscreenLayerManager->syncTextTrackBounds();
1113 }
1114     
1115 void MediaPlayerPrivateMediaSourceAVFObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1116 {
1117     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
1118 }
1119
1120 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1121 void MediaPlayerPrivateMediaSourceAVFObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
1122 {
1123     m_playbackTarget = WTFMove(target);
1124 }
1125
1126 void MediaPlayerPrivateMediaSourceAVFObjC::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget)
1127 {
1128     if (shouldPlayToTarget == m_shouldPlayToTarget)
1129         return;
1130
1131     ALWAYS_LOG(LOGIDENTIFIER, shouldPlayToTarget);
1132     m_shouldPlayToTarget = shouldPlayToTarget;
1133
1134     if (m_player)
1135         m_player->currentPlaybackTargetIsWirelessChanged();
1136 }
1137
1138 bool MediaPlayerPrivateMediaSourceAVFObjC::isCurrentPlaybackTargetWireless() const
1139 {
1140     if (!m_playbackTarget)
1141         return false;
1142
1143     auto hasTarget = m_shouldPlayToTarget && m_playbackTarget->hasActiveRoute();
1144     INFO_LOG(LOGIDENTIFIER, hasTarget);
1145     return hasTarget;
1146 }
1147 #endif
1148
1149 bool MediaPlayerPrivateMediaSourceAVFObjC::performTaskAtMediaTime(WTF::Function<void()>&& task, MediaTime time)
1150 {
1151     __block WTF::Function<void()> taskIn = WTFMove(task);
1152
1153     if (m_performTaskObserver)
1154         [m_synchronizer removeTimeObserver:m_performTaskObserver.get()];
1155
1156     m_performTaskObserver = [m_synchronizer addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:toCMTime(time)]] queue:dispatch_get_main_queue() usingBlock:^{
1157         taskIn();
1158     }];
1159     return true;
1160 }
1161
1162 #if !RELEASE_LOG_DISABLED
1163 WTFLogChannel& MediaPlayerPrivateMediaSourceAVFObjC::logChannel() const
1164 {
1165     return LogMediaSource;
1166 }
1167 #endif
1168
1169 }
1170
1171 #endif