Remove WebCoreSystemInterface
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaSourceAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "CDMSessionAVStreamSession.h"
34 #import "CDMSessionMediaSourceAVFObjC.h"
35 #import "FileSystem.h"
36 #import "GraphicsContextCG.h"
37 #import "Logging.h"
38 #import "MediaSourcePrivateAVFObjC.h"
39 #import "MediaSourcePrivateClient.h"
40 #import "PixelBufferConformerCV.h"
41 #import "TextTrackRepresentation.h"
42 #import "TextureCacheCV.h"
43 #import "VideoTextureCopierCV.h"
44 #import "WebCoreDecompressionSession.h"
45 #import <AVFoundation/AVAsset.h>
46 #import <AVFoundation/AVTime.h>
47 #import <QuartzCore/CALayer.h>
48 #import <objc_runtime.h>
49 #import <pal/avfoundation/MediaTimeAVFoundation.h>
50 #import <pal/spi/mac/AVFoundationSPI.h>
51 #import <wtf/Deque.h>
52 #import <wtf/MainThread.h>
53 #import <wtf/NeverDestroyed.h>
54
55 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
56 #import "VideoFullscreenLayerManager.h"
57 #endif
58
59 #pragma mark - Soft Linking
60
61 #import "CoreMediaSoftLink.h"
62
63 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
64
65 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
66 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
67 #pragma clang diagnostic push
68 #pragma clang diagnostic ignored "-Wunknown-pragmas"
69 #pragma clang diagnostic ignored "-Wunguarded-availability-new"
70 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
71 #pragma clang diagnostic pop
72 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
73 #pragma clang diagnostic push
74 #pragma clang diagnostic ignored "-Wunknown-pragmas"
75 #pragma clang diagnostic ignored "-Wunguarded-availability-new"
76 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
77 #pragma clang diagnostic pop
78 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
79 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamSession);
80 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
81
82 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
83 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
84
85 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
86 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
87
88 #pragma mark -
89 #pragma mark AVVideoPerformanceMetrics
90
91 @interface AVVideoPerformanceMetrics : NSObject
92 - (unsigned long)totalNumberOfVideoFrames;
93 - (unsigned long)numberOfDroppedVideoFrames;
94 - (unsigned long)numberOfCorruptedVideoFrames;
95 - (double)totalFrameDelay;
96 @end
97
98 @interface AVSampleBufferDisplayLayer (WebCoreAVSampleBufferDisplayLayerPrivate)
99 - (AVVideoPerformanceMetrics *)videoPerformanceMetrics;
100 @end
101
102 #pragma mark -
103 #pragma mark AVStreamSession
104
105 @interface AVStreamSession : NSObject
106 - (instancetype)initWithStorageDirectoryAtURL:(NSURL *)storageDirectory;
107 @end
108
109 namespace WebCore {
110
111 #pragma mark -
112 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
113
114 static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
115 {
116     MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)const_cast<void*>(listener);
117     callOnMainThread([weakThis = player->createWeakPtr()] {
118         if (!weakThis)
119             return;
120         weakThis.get()->effectiveRateChanged();
121     });
122 }
123
124 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
125     : m_player(player)
126     , m_synchronizer(adoptNS([allocAVSampleBufferRenderSynchronizerInstance() init]))
127     , m_seekTimer(*this, &MediaPlayerPrivateMediaSourceAVFObjC::seekInternal)
128     , m_session(nullptr)
129     , m_networkState(MediaPlayer::Empty)
130     , m_readyState(MediaPlayer::HaveNothing)
131     , m_rate(1)
132     , m_playing(0)
133     , m_seeking(false)
134     , m_loadingProgressed(false)
135 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
136     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
137 #endif
138 {
139     CMTimebaseRef timebase = [m_synchronizer timebase];
140     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
141     CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
142
143     // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
144     // an arbitrarily large time value of once an hour:
145     __block auto weakThis = createWeakPtr();
146     m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:PAL::toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
147 #if LOG_DISABLED
148         UNUSED_PARAM(time);
149 #endif
150         // FIXME: Remove the below once <rdar://problem/15798050> is fixed.
151         if (!weakThis)
152             return;
153
154         if (m_seeking && !m_pendingSeek) {
155             LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::m_timeJumpedObserver(%p) - time(%s)", weakThis.get(), toString(PAL::toMediaTime(time)).utf8().data());
156             m_seeking = false;
157
158             if (shouldBePlaying())
159                 [m_synchronizer setRate:m_rate];
160             if (!seeking() && m_seekCompleted == SeekCompleted)
161                 m_player->timeChanged();
162         }
163
164         if (m_pendingSeek)
165             seekInternal();
166     }];
167 }
168
169 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
170 {
171     CMTimebaseRef timebase = [m_synchronizer timebase];
172     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
173     CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
174
175     if (m_timeJumpedObserver)
176         [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
177     if (m_durationObserver)
178         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
179     flushPendingSizeChanges();
180
181     destroyLayer();
182     destroyDecompressionSession();
183
184     m_seekTimer.stop();
185 }
186
187 #pragma mark -
188 #pragma mark MediaPlayer Factory Methods
189
190 void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
191 {
192     if (!isAvailable())
193         return;
194
195     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaSourceAVFObjC>(player); },
196         getSupportedTypes, supportsType, 0, 0, 0, 0);
197     AVFoundationMIMETypeCache::singleton().loadTypes();
198 }
199
200 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
201 {
202     return AVFoundationLibrary()
203         && isCoreMediaFrameworkAvailable()
204         && getAVStreamDataParserClass()
205         && getAVSampleBufferAudioRendererClass()
206         && getAVSampleBufferRenderSynchronizerClass()
207         && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:));
208 }
209
210 void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
211 {
212     types = AVFoundationMIMETypeCache::singleton().types();
213 }
214
215 MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
216 {
217     // This engine does not support non-media-source sources.
218     if (!parameters.isMediaSource)
219         return MediaPlayer::IsNotSupported;
220 #if ENABLE(MEDIA_STREAM)
221     if (parameters.isMediaStream)
222         return MediaPlayer::IsNotSupported;
223 #endif
224
225     if (parameters.type.isEmpty() || !AVFoundationMIMETypeCache::singleton().types().contains(parameters.type.containerType()))
226         return MediaPlayer::IsNotSupported;
227
228     // The spec says:
229     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
230     auto codecs = parameters.type.parameter(ContentType::codecsParameter());
231     if (codecs.isEmpty())
232         return MediaPlayer::MayBeSupported;
233
234     NSString *outputCodecs = codecs;
235     if ([getAVStreamDataParserClass() respondsToSelector:@selector(outputMIMECodecParameterForInputMIMECodecParameter:)])
236         outputCodecs = [getAVStreamDataParserClass() outputMIMECodecParameterForInputMIMECodecParameter:outputCodecs];
237
238     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
239         return MediaPlayer::IsNotSupported;
240
241     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type.containerType(), (NSString *)outputCodecs];
242     return [getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
243 }
244
245 #pragma mark -
246 #pragma mark MediaPlayerPrivateInterface Overrides
247
248 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&)
249 {
250     // This media engine only supports MediaSource URLs.
251     m_networkState = MediaPlayer::FormatError;
252     m_player->networkStateChanged();
253 }
254
255 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String& url, MediaSourcePrivateClient* client)
256 {
257     UNUSED_PARAM(url);
258
259     m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client);
260     m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
261     m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
262
263     acceleratedRenderingStateChanged();
264 }
265
266 #if ENABLE(MEDIA_STREAM)
267 void MediaPlayerPrivateMediaSourceAVFObjC::load(MediaStreamPrivate&)
268 {
269     setNetworkState(MediaPlayer::FormatError);
270 }
271 #endif
272
273 void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad()
274 {
275 }
276
277 void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay()
278 {
279 }
280
281 PlatformMedia MediaPlayerPrivateMediaSourceAVFObjC::platformMedia() const
282 {
283     PlatformMedia pm;
284     pm.type = PlatformMedia::AVFoundationAssetType;
285     pm.media.avfAsset = m_asset.get();
286     return pm;
287 }
288
289 PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const
290 {
291 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
292     return m_videoFullscreenLayerManager->videoInlineLayer();
293 #else
294     return m_sampleBufferDisplayLayer.get();
295 #endif
296 }
297
298 void MediaPlayerPrivateMediaSourceAVFObjC::play()
299 {
300     callOnMainThread([weakThis = createWeakPtr()] {
301         if (!weakThis)
302             return;
303         weakThis.get()->playInternal();
304     });
305 }
306
307 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
308 {
309     if (currentMediaTime() >= m_mediaSourcePrivate->duration())
310         return;
311
312     m_playing = true;
313     if (shouldBePlaying())
314         [m_synchronizer setRate:m_rate];
315 }
316
317 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
318 {
319     callOnMainThread([weakThis = createWeakPtr()] {
320         if (!weakThis)
321             return;
322         weakThis.get()->pauseInternal();
323     });
324 }
325
326 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
327 {
328     m_playing = false;
329     [m_synchronizer setRate:0];
330 }
331
332 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
333 {
334     return ![m_synchronizer rate];
335 }
336
337 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
338 {
339     for (auto pair : m_sampleBufferAudioRendererMap)
340         [pair.key setVolume:volume];
341 }
342
343 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
344 {
345     return true;
346 }
347
348 void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
349 {
350     for (auto pair : m_sampleBufferAudioRendererMap)
351         [pair.key setMuted:muted];
352 }
353
354 FloatSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
355 {
356     return m_naturalSize;
357 }
358
359 bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const
360 {
361     if (!m_mediaSourcePrivate)
362         return false;
363
364     return m_mediaSourcePrivate->hasVideo();
365 }
366
367 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const
368 {
369     if (!m_mediaSourcePrivate)
370         return false;
371
372     return m_mediaSourcePrivate->hasAudio();
373 }
374
375 void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool)
376 {
377     acceleratedRenderingStateChanged();
378 }
379
380 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
381 {
382     return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
383 }
384
385 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
386 {
387     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
388     if (synchronizerTime < MediaTime::zeroTime())
389         return MediaTime::zeroTime();
390     if (synchronizerTime < m_lastSeekTime)
391         return m_lastSeekTime;
392     return synchronizerTime;
393 }
394
395 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
396 {
397     return MediaTime::zeroTime();
398 }
399
400 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
401 {
402     return MediaTime::zeroTime();
403 }
404
405 void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
406 {
407     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(%p) - time(%s), negativeThreshold(%s), positiveThreshold(%s)", this, toString(time).utf8().data(), toString(negativeThreshold).utf8().data(), toString(positiveThreshold).utf8().data());
408     m_seeking = true;
409     auto weakThis = createWeakPtr();
410     m_pendingSeek = std::make_unique<PendingSeek>(time, negativeThreshold, positiveThreshold);
411
412     if (m_seekTimer.isActive())
413         m_seekTimer.stop();
414     m_seekTimer.startOneShot(0_s);
415 }
416
417 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
418 {
419     std::unique_ptr<PendingSeek> pendingSeek;
420     pendingSeek.swap(m_pendingSeek);
421
422     if (!pendingSeek)
423         return;
424
425     if (!m_mediaSourcePrivate)
426         return;
427
428     if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
429         m_lastSeekTime = pendingSeek->targetTime;
430     else
431         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
432
433     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekInternal(%p) - seekTime(%s)", this, toString(m_lastSeekTime).utf8().data());
434
435     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
436     bool doesNotRequireSeek = synchronizerTime == m_lastSeekTime;
437
438     m_mediaSourcePrivate->willSeek();
439     [m_synchronizer setRate:0 time:PAL::toCMTime(m_lastSeekTime)];
440     m_mediaSourcePrivate->seekToTime(m_lastSeekTime);
441
442     // In cases where the destination seek time precisely matches the synchronizer's existing time
443     // no time jumped notification will be issued. In this case, just notify the MediaPlayer that
444     // the seek completed successfully.
445     if (doesNotRequireSeek) {
446         m_seeking = false;
447
448         if (shouldBePlaying())
449             [m_synchronizer setRate:m_rate];
450         if (!seeking() && m_seekCompleted)
451             m_player->timeChanged();
452     }
453 }
454
455 void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted()
456 {
457     if (!m_seeking)
458         return;
459     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted(%p)", this);
460     m_seekCompleted = Seeking;
461 }
462
463 void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted()
464 {
465     if (m_seekCompleted == SeekCompleted)
466         return;
467     if (hasVideo() && !m_hasAvailableVideoFrame) {
468         m_seekCompleted = WaitingForAvailableFame;
469         return;
470     }
471     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted(%p)", this);
472     m_seekCompleted = SeekCompleted;
473     if (shouldBePlaying())
474         [m_synchronizer setRate:m_rate];
475     if (!m_seeking)
476         m_player->timeChanged();
477 }
478
479 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
480 {
481     return m_seeking || m_seekCompleted != SeekCompleted;
482 }
483
484 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
485 {
486     // AVSampleBufferRenderSynchronizer does not support negative rate yet.
487     m_rate = std::max<double>(rate, 0);
488     if (shouldBePlaying())
489         [m_synchronizer setRate:m_rate];
490 }
491
492 void MediaPlayerPrivateMediaSourceAVFObjC::setPreservesPitch(bool preservesPitch)
493 {
494     NSString *algorithm = preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed;
495     for (auto pair : m_sampleBufferAudioRendererMap)
496         [pair.key setAudioTimePitchAlgorithm:algorithm];
497 }
498
499 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
500 {
501     return m_networkState;
502 }
503
504 MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
505 {
506     return m_readyState;
507 }
508
509 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
510 {
511     return std::make_unique<PlatformTimeRanges>(minMediaTimeSeekable(), maxMediaTimeSeekable());
512 }
513
514 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
515 {
516     return durationMediaTime();
517 }
518
519 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
520 {
521     return startTime();
522 }
523
524 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
525 {
526     return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : std::make_unique<PlatformTimeRanges>();
527 }
528
529 bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const
530 {
531     bool loadingProgressed = m_loadingProgressed;
532     m_loadingProgressed = false;
533     return loadingProgressed;
534 }
535
536 void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&)
537 {
538     // No-op.
539 }
540
541 NativeImagePtr MediaPlayerPrivateMediaSourceAVFObjC::nativeImageForCurrentTime()
542 {
543     updateLastImage();
544     return m_lastImage.get();
545 }
546
547 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastPixelBuffer()
548 {
549     if (m_sampleBufferDisplayLayer || !m_decompressionSession)
550         return false;
551
552     auto flags = !m_lastPixelBuffer ? WebCoreDecompressionSession::AllowLater : WebCoreDecompressionSession::ExactTime;
553     auto newPixelBuffer = m_decompressionSession->imageForTime(currentMediaTime(), flags);
554     if (!newPixelBuffer)
555         return false;
556
557     m_lastPixelBuffer = newPixelBuffer;
558     return true;
559 }
560
561 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastImage()
562 {
563     if (!updateLastPixelBuffer())
564         return false;
565
566     ASSERT(m_lastPixelBuffer);
567
568     if (!m_rgbConformer) {
569         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
570         m_rgbConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
571     }
572
573     m_lastImage = m_rgbConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
574     return true;
575 }
576
577 void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
578 {
579     paintCurrentFrameInContext(context, rect);
580 }
581
582 void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& outputRect)
583 {
584     if (context.paintingDisabled())
585         return;
586
587     auto image = nativeImageForCurrentTime();
588     if (!image)
589         return;
590
591     GraphicsContextStateSaver stateSaver(context);
592     FloatRect imageRect(0, 0, CGImageGetWidth(image.get()), CGImageGetHeight(image.get()));
593     context.drawNativeImage(image, imageRect.size(), outputRect, imageRect);
594 }
595
596 bool MediaPlayerPrivateMediaSourceAVFObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
597 {
598     // We have been asked to paint into a WebGL canvas, so take that as a signal to create
599     // a decompression session, even if that means the native video can't also be displayed
600     // in page.
601     if (!m_hasBeenAskedToPaintGL) {
602         m_hasBeenAskedToPaintGL = true;
603         acceleratedRenderingStateChanged();
604     }
605
606     ASSERT(context);
607
608     if (updateLastPixelBuffer()) {
609         if (!m_lastPixelBuffer)
610             return false;
611
612         if (!m_textureCache) {
613             m_textureCache = TextureCacheCV::create(*context);
614             if (!m_textureCache)
615                 return false;
616         }
617
618         m_lastTexture = m_textureCache->textureFromImage(m_lastPixelBuffer.get(), outputTarget, level, internalFormat, format, type);
619     }
620
621     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
622     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
623
624     if (!m_videoTextureCopier)
625         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
626
627     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(m_lastTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
628 }
629
630 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const
631 {
632     return m_hasAvailableVideoFrame;
633 }
634
635 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const
636 {
637     return true;
638 }
639
640 void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged()
641 {
642     if (!m_hasBeenAskedToPaintGL && m_player->client().mediaPlayerRenderingCanBeAccelerated(m_player)) {
643         destroyDecompressionSession();
644         ensureLayer();
645     } else {
646         destroyLayer();
647         ensureDecompressionSession();
648     }
649 }
650
651 void MediaPlayerPrivateMediaSourceAVFObjC::notifyActiveSourceBuffersChanged()
652 {
653     m_player->client().mediaPlayerActiveSourceBuffersChanged(m_player);
654 }
655
656 MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const
657 {
658     return MediaPlayer::StoredStream;
659 }
660
661 void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering()
662 {
663     // No-op.
664 }
665
666 String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const
667 {
668     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaSource Engine"));
669     return description;
670 }
671
672 String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const
673 {
674     // FIXME(125158): implement languageOfPrimaryAudioTrack()
675     return emptyString();
676 }
677
678 size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const
679 {
680     return 0;
681 }
682
683 std::optional<PlatformVideoPlaybackQualityMetrics> MediaPlayerPrivateMediaSourceAVFObjC::videoPlaybackQualityMetrics()
684 {
685     if (m_decompressionSession) {
686         return PlatformVideoPlaybackQualityMetrics(
687             m_decompressionSession->totalVideoFrames(),
688             m_decompressionSession->droppedVideoFrames(),
689             m_decompressionSession->corruptedVideoFrames(),
690             m_decompressionSession->totalFrameDelay().toDouble()
691         );
692     }
693
694     auto metrics = [m_sampleBufferDisplayLayer videoPerformanceMetrics];
695     if (!metrics)
696         return std::nullopt;
697
698     return PlatformVideoPlaybackQualityMetrics(
699         [metrics totalNumberOfVideoFrames],
700         [metrics numberOfDroppedVideoFrames],
701         [metrics numberOfCorruptedVideoFrames],
702         [metrics totalFrameDelay]
703     );
704 }
705
706 #pragma mark -
707 #pragma mark Utility Methods
708
709 void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
710 {
711     if (m_sampleBufferDisplayLayer)
712         return;
713
714     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
715 #ifndef NDEBUG
716     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaSource AVSampleBufferDisplayLayer"];
717 #endif
718
719     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
720     if (m_mediaSourcePrivate)
721         m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
722 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
723     m_videoFullscreenLayerManager->setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
724 #endif
725     m_player->client().mediaPlayerRenderingModeChanged(m_player);
726 }
727
728 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
729 {
730     if (!m_sampleBufferDisplayLayer)
731         return;
732
733     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
734     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
735         // No-op.
736     }];
737
738     if (m_mediaSourcePrivate)
739         m_mediaSourcePrivate->setVideoLayer(nullptr);
740 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
741     m_videoFullscreenLayerManager->didDestroyVideoLayer();
742 #endif
743     m_sampleBufferDisplayLayer = nullptr;
744     setHasAvailableVideoFrame(false);
745     m_player->client().mediaPlayerRenderingModeChanged(m_player);
746 }
747
748 void MediaPlayerPrivateMediaSourceAVFObjC::ensureDecompressionSession()
749 {
750     if (m_decompressionSession)
751         return;
752
753     m_decompressionSession = WebCoreDecompressionSession::createOpenGL();
754     m_decompressionSession->setTimebase([m_synchronizer timebase]);
755
756     if (m_mediaSourcePrivate)
757         m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
758
759     m_player->client().mediaPlayerRenderingModeChanged(m_player);
760 }
761
762 void MediaPlayerPrivateMediaSourceAVFObjC::destroyDecompressionSession()
763 {
764     if (!m_decompressionSession)
765         return;
766
767     if (m_mediaSourcePrivate)
768         m_mediaSourcePrivate->setDecompressionSession(nullptr);
769
770     m_decompressionSession->invalidate();
771     m_decompressionSession = nullptr;
772     setHasAvailableVideoFrame(false);
773 }
774
775 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const
776 {
777     return m_playing && !seeking() && allRenderersHaveAvailableSamples() && m_readyState >= MediaPlayer::HaveFutureData;
778 }
779
780 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableVideoFrame(bool flag)
781 {
782     if (m_hasAvailableVideoFrame == flag)
783         return;
784     m_hasAvailableVideoFrame = flag;
785     updateAllRenderersHaveAvailableSamples();
786
787     if (!m_hasAvailableVideoFrame)
788         return;
789
790     m_player->firstVideoFrameAvailable();
791     if (m_seekCompleted == WaitingForAvailableFame)
792         seekCompleted();
793
794     if (m_readyStateIsWaitingForAvailableFrame) {
795         m_readyStateIsWaitingForAvailableFrame = false;
796         m_player->readyStateChanged();
797     }
798 }
799
800 #pragma clang diagnostic push
801 #pragma clang diagnostic ignored "-Wunknown-pragmas"
802 #pragma clang diagnostic ignored "-Wunguarded-availability-new"
803 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableAudioSample(AVSampleBufferAudioRenderer* renderer, bool flag)
804 #pragma clang diagnostic pop
805 {
806     auto iter = m_sampleBufferAudioRendererMap.find(renderer);
807     if (iter == m_sampleBufferAudioRendererMap.end())
808         return;
809
810     auto& properties = iter->value;
811     if (properties.hasAudibleSample == flag)
812         return;
813     properties.hasAudibleSample = flag;
814     updateAllRenderersHaveAvailableSamples();
815 }
816
817 void MediaPlayerPrivateMediaSourceAVFObjC::updateAllRenderersHaveAvailableSamples()
818 {
819     bool allRenderersHaveAvailableSamples = true;
820
821     do {
822         if (hasVideo() && !m_hasAvailableVideoFrame) {
823             allRenderersHaveAvailableSamples = false;
824             break;
825         }
826
827         for (auto& properties : m_sampleBufferAudioRendererMap.values()) {
828             if (!properties.hasAudibleSample) {
829                 allRenderersHaveAvailableSamples = false;
830                 break;
831             }
832         }
833     } while (0);
834
835     if (m_allRenderersHaveAvailableSamples == allRenderersHaveAvailableSamples)
836         return;
837
838     m_allRenderersHaveAvailableSamples = allRenderersHaveAvailableSamples;
839
840     if (shouldBePlaying() && [m_synchronizer rate] != m_rate)
841         [m_synchronizer setRate:m_rate];
842     else if (!shouldBePlaying() && [m_synchronizer rate])
843         [m_synchronizer setRate:0];
844 }
845
846 void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
847 {
848     m_player->durationChanged();
849
850     if (m_durationObserver)
851         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
852
853     if (!m_mediaSourcePrivate)
854         return;
855
856     MediaTime duration = m_mediaSourcePrivate->duration();
857     auto weakThis = createWeakPtr();
858     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(duration)]];
859
860     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::durationChanged(%p) - duration = %s", this, toString(duration).utf8().data());
861
862     m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis, duration] {
863         if (!weakThis)
864             return;
865
866         MediaTime now = weakThis->currentMediaTime();
867         LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::durationChanged(%p) - boundary time observer called, now = %s", weakThis.get(), toString(now).utf8().data());
868
869         weakThis->pauseInternal();
870         if (now < duration) {
871             LOG(MediaSource, "   ERROR: boundary time observer called before duration!", weakThis.get());
872             [weakThis->m_synchronizer setRate:0 time:PAL::toCMTime(duration)];
873         }
874         weakThis->m_player->timeChanged();
875
876     }];
877
878     if (m_playing && duration <= currentMediaTime())
879         pauseInternal();
880 }
881
882 void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
883 {
884     m_player->rateChanged();
885 }
886
887 void MediaPlayerPrivateMediaSourceAVFObjC::sizeWillChangeAtTime(const MediaTime& time, const FloatSize& size)
888 {
889     auto weakThis = m_sizeChangeObserverWeakPtrFactory.createWeakPtr(*this);
890     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(time)]];
891     RetainPtr<id> observer = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[this, weakThis, size] {
892         if (!weakThis)
893             return;
894
895         ASSERT(!m_sizeChangeObservers.isEmpty());
896         if (!m_sizeChangeObservers.isEmpty()) {
897             RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
898             [m_synchronizer removeTimeObserver:observer.get()];
899         }
900         setNaturalSize(size);
901     }];
902     m_sizeChangeObservers.append(WTFMove(observer));
903
904     if (currentMediaTime() >= time)
905         setNaturalSize(size);
906 }
907
908 void MediaPlayerPrivateMediaSourceAVFObjC::setNaturalSize(const FloatSize& size)
909 {
910     if (size == m_naturalSize)
911         return;
912
913     m_naturalSize = size;
914     m_player->sizeChanged();
915 }
916
917 void MediaPlayerPrivateMediaSourceAVFObjC::flushPendingSizeChanges()
918 {
919     while (!m_sizeChangeObservers.isEmpty()) {
920         RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
921         [m_synchronizer removeTimeObserver:observer.get()];
922     }
923     m_sizeChangeObserverWeakPtrFactory.revokeAll();
924 }
925
926 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
927 AVStreamSession* MediaPlayerPrivateMediaSourceAVFObjC::streamSession()
928 {
929     if (!getAVStreamSessionClass() || ![getAVStreamSessionClass() instancesRespondToSelector:@selector(initWithStorageDirectoryAtURL:)])
930         return nil;
931
932     if (!m_streamSession) {
933         String storageDirectory = m_player->mediaKeysStorageDirectory();
934         if (storageDirectory.isEmpty())
935             return nil;
936
937         if (!fileExists(storageDirectory)) {
938             if (!makeAllDirectories(storageDirectory))
939                 return nil;
940         }
941
942         String storagePath = pathByAppendingComponent(storageDirectory, "SecureStop.plist");
943         m_streamSession = adoptNS([allocAVStreamSessionInstance() initWithStorageDirectoryAtURL:[NSURL fileURLWithPath:storagePath]]);
944     }
945     return m_streamSession.get();
946 }
947
948 void MediaPlayerPrivateMediaSourceAVFObjC::setCDMSession(CDMSession* session)
949 {
950     if (session == m_session)
951         return;
952
953     m_session = toCDMSessionMediaSourceAVFObjC(session);
954
955     if (CDMSessionAVStreamSession* cdmStreamSession = toCDMSessionAVStreamSession(m_session))
956         cdmStreamSession->setStreamSession(streamSession());
957     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
958         sourceBuffer->setCDMSession(m_session);
959 }
960
961 void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData)
962 {
963     m_player->keyNeeded(initData);
964 }
965 #endif
966
967 const Vector<ContentType>& MediaPlayerPrivateMediaSourceAVFObjC::mediaContentTypesRequiringHardwareSupport() const
968 {
969     return m_player->mediaContentTypesRequiringHardwareSupport();
970 }
971
972 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldCheckHardwareSupport() const
973 {
974     return m_player->shouldCheckHardwareSupport();
975 }
976
977 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
978 {
979     if (m_readyState == readyState)
980         return;
981
982     m_readyState = readyState;
983
984     if (shouldBePlaying())
985         [m_synchronizer setRate:m_rate];
986     else
987         [m_synchronizer setRate:0];
988
989     if (m_readyState >= MediaPlayerEnums::HaveCurrentData && hasVideo() && !m_hasAvailableVideoFrame) {
990         m_readyStateIsWaitingForAvailableFrame = true;
991         return;
992     }
993
994     m_player->readyStateChanged();
995 }
996
997 void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
998 {
999     if (m_networkState == networkState)
1000         return;
1001
1002     m_networkState = networkState;
1003     m_player->networkStateChanged();
1004 }
1005
1006 #pragma clang diagnostic push
1007 #pragma clang diagnostic ignored "-Wunknown-pragmas"
1008 #pragma clang diagnostic ignored "-Wunguarded-availability-new"
1009 void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1010 #pragma clang diagnostic pop
1011 {
1012     if (m_sampleBufferAudioRendererMap.contains(audioRenderer))
1013         return;
1014
1015     m_sampleBufferAudioRendererMap.add(audioRenderer, AudioRendererProperties());
1016
1017     [audioRenderer setMuted:m_player->muted()];
1018     [audioRenderer setVolume:m_player->volume()];
1019     [audioRenderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1020
1021     [m_synchronizer addRenderer:audioRenderer];
1022     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1023 }
1024
1025 #pragma clang diagnostic push
1026 #pragma clang diagnostic ignored "-Wunknown-pragmas"
1027 #pragma clang diagnostic ignored "-Wunguarded-availability-new"
1028 void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1029 #pragma clang diagnostic pop
1030 {
1031     auto iter = m_sampleBufferAudioRendererMap.find(audioRenderer);
1032     if (iter == m_sampleBufferAudioRendererMap.end())
1033         return;
1034
1035     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
1036     [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
1037         // No-op.
1038     }];
1039
1040     m_sampleBufferAudioRendererMap.remove(iter);
1041     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1042 }
1043
1044 void MediaPlayerPrivateMediaSourceAVFObjC::characteristicsChanged()
1045 {
1046     updateAllRenderersHaveAvailableSamples();
1047     m_player->characteristicChanged();
1048 }
1049
1050 #if PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE)
1051 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1052 {
1053     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1054         completionHandler();
1055         return;
1056     }
1057     
1058     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler));
1059     
1060     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1061         syncTextTrackBounds();
1062         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1063     }
1064 }
1065
1066 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenFrame(FloatRect frame)
1067 {
1068     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1069     syncTextTrackBounds();
1070 }
1071 #endif
1072     
1073 bool MediaPlayerPrivateMediaSourceAVFObjC::requiresTextTrackRepresentation() const
1074 {
1075 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1076     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1077         return true;
1078 #endif
1079     return false;
1080 }
1081     
1082 void MediaPlayerPrivateMediaSourceAVFObjC::syncTextTrackBounds()
1083 {
1084 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1085     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
1086         return;
1087     
1088     auto videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
1089     auto videoRect = [m_sampleBufferDisplayLayer bounds];
1090     auto textFrame = m_sampleBufferDisplayLayer ? videoRect : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
1091     [m_textTrackRepresentationLayer setFrame:textFrame];
1092 #endif
1093 }
1094     
1095 void MediaPlayerPrivateMediaSourceAVFObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1096 {
1097 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1098     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
1099     if (representationLayer == m_textTrackRepresentationLayer) {
1100         syncTextTrackBounds();
1101         return;
1102     }
1103     
1104     if (m_textTrackRepresentationLayer)
1105         [m_textTrackRepresentationLayer removeFromSuperlayer];
1106     
1107     m_textTrackRepresentationLayer = representationLayer;
1108     
1109     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1110         syncTextTrackBounds();
1111         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1112     }
1113     
1114 #else
1115     UNUSED_PARAM(representation);
1116 #endif
1117 }
1118     
1119
1120 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1121 void MediaPlayerPrivateMediaSourceAVFObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
1122 {
1123     m_playbackTarget = WTFMove(target);
1124 }
1125
1126 void MediaPlayerPrivateMediaSourceAVFObjC::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget)
1127 {
1128     if (shouldPlayToTarget == m_shouldPlayToTarget)
1129         return;
1130
1131     m_shouldPlayToTarget = shouldPlayToTarget;
1132
1133     if (m_player)
1134         m_player->currentPlaybackTargetIsWirelessChanged();
1135 }
1136
1137 bool MediaPlayerPrivateMediaSourceAVFObjC::isCurrentPlaybackTargetWireless() const
1138 {
1139     if (!m_playbackTarget)
1140         return false;
1141
1142     return m_shouldPlayToTarget && m_playbackTarget->hasActiveRoute();
1143 }
1144 #endif
1145
1146 }
1147
1148 #endif