Add support for reporting "display composited video frames" through the VideoPlayback...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateMediaSourceAVFObjC.mm
1 /*
2  * Copyright (C) 2013-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateMediaSourceAVFObjC.h"
28
29 #if ENABLE(MEDIA_SOURCE) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "CDMInstance.h"
34 #import "CDMSessionAVStreamSession.h"
35 #import "CDMSessionMediaSourceAVFObjC.h"
36 #import "FileSystem.h"
37 #import "GraphicsContextCG.h"
38 #import "Logging.h"
39 #import "MediaSourcePrivateAVFObjC.h"
40 #import "MediaSourcePrivateClient.h"
41 #import "PixelBufferConformerCV.h"
42 #import "TextTrackRepresentation.h"
43 #import "TextureCacheCV.h"
44 #import "VideoFullscreenLayerManagerObjC.h"
45 #import "VideoTextureCopierCV.h"
46 #import "WebCoreDecompressionSession.h"
47 #import <AVFoundation/AVAsset.h>
48 #import <AVFoundation/AVTime.h>
49 #import <QuartzCore/CALayer.h>
50 #import <objc_runtime.h>
51 #import <pal/avfoundation/MediaTimeAVFoundation.h>
52 #import <pal/spi/mac/AVFoundationSPI.h>
53 #import <wtf/Algorithms.h>
54 #import <wtf/Deque.h>
55 #import <wtf/MainThread.h>
56 #import <wtf/NeverDestroyed.h>
57
58 #pragma mark - Soft Linking
59
60 #import <pal/cf/CoreMediaSoftLink.h>
61
62 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
63
64 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVAsset)
65 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVURLAsset)
66 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
67 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferAudioRenderer)
68 ALLOW_NEW_API_WITHOUT_GUARDS_END
69 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferDisplayLayer)
70 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
71 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVSampleBufferRenderSynchronizer)
72 ALLOW_NEW_API_WITHOUT_GUARDS_END
73 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamDataParser)
74 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVStreamSession);
75 SOFT_LINK_CLASS_OPTIONAL(AVFoundation, AVVideoPerformanceMetrics)
76
77 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
78 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
79
80 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
81 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
82
83 #pragma mark -
84 #pragma mark AVStreamSession
85
86 @interface AVStreamSession : NSObject
87 - (instancetype)initWithStorageDirectoryAtURL:(NSURL *)storageDirectory;
88 @end
89
90 namespace WebCore {
91 using namespace PAL;
92
93 #pragma mark -
94 #pragma mark MediaPlayerPrivateMediaSourceAVFObjC
95
96 static void CMTimebaseEffectiveRateChangedCallback(CMNotificationCenterRef, const void *listener, CFStringRef, const void *, CFTypeRef)
97 {
98     MediaPlayerPrivateMediaSourceAVFObjC* player = (MediaPlayerPrivateMediaSourceAVFObjC*)const_cast<void*>(listener);
99     callOnMainThread([weakThis = player->createWeakPtr()] {
100         if (!weakThis)
101             return;
102         weakThis.get()->effectiveRateChanged();
103     });
104 }
105
106 MediaPlayerPrivateMediaSourceAVFObjC::MediaPlayerPrivateMediaSourceAVFObjC(MediaPlayer* player)
107     : m_player(player)
108     , m_synchronizer(adoptNS([allocAVSampleBufferRenderSynchronizerInstance() init]))
109     , m_seekTimer(*this, &MediaPlayerPrivateMediaSourceAVFObjC::seekInternal)
110     , m_session(nullptr)
111     , m_networkState(MediaPlayer::Empty)
112     , m_readyState(MediaPlayer::HaveNothing)
113     , m_rate(1)
114     , m_playing(0)
115     , m_seeking(false)
116     , m_loadingProgressed(false)
117     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
118 {
119     CMTimebaseRef timebase = [m_synchronizer timebase];
120     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
121     CMNotificationCenterAddListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase, 0);
122
123     // addPeriodicTimeObserverForInterval: throws an exception if you pass a non-numeric CMTime, so just use
124     // an arbitrarily large time value of once an hour:
125     __block auto weakThis = createWeakPtr();
126     m_timeJumpedObserver = [m_synchronizer addPeriodicTimeObserverForInterval:PAL::toCMTime(MediaTime::createWithDouble(3600)) queue:dispatch_get_main_queue() usingBlock:^(CMTime time) {
127 #if LOG_DISABLED
128         UNUSED_PARAM(time);
129 #endif
130         // FIXME: Remove the below once <rdar://problem/15798050> is fixed.
131         if (!weakThis)
132             return;
133
134         if (m_seeking && !m_pendingSeek) {
135             LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::m_timeJumpedObserver(%p) - time(%s)", weakThis.get(), toString(PAL::toMediaTime(time)).utf8().data());
136             m_seeking = false;
137
138             if (shouldBePlaying())
139                 [m_synchronizer setRate:m_rate];
140             if (!seeking() && m_seekCompleted == SeekCompleted)
141                 m_player->timeChanged();
142         }
143
144         if (m_pendingSeek)
145             seekInternal();
146     }];
147 }
148
149 MediaPlayerPrivateMediaSourceAVFObjC::~MediaPlayerPrivateMediaSourceAVFObjC()
150 {
151     CMTimebaseRef timebase = [m_synchronizer timebase];
152     CMNotificationCenterRef nc = CMNotificationCenterGetDefaultLocalCenter();
153     CMNotificationCenterRemoveListener(nc, this, CMTimebaseEffectiveRateChangedCallback, kCMTimebaseNotification_EffectiveRateChanged, timebase);
154
155     if (m_timeJumpedObserver)
156         [m_synchronizer removeTimeObserver:m_timeJumpedObserver.get()];
157     if (m_durationObserver)
158         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
159     flushPendingSizeChanges();
160
161     destroyLayer();
162     destroyDecompressionSession();
163
164     m_seekTimer.stop();
165 }
166
167 #pragma mark -
168 #pragma mark MediaPlayer Factory Methods
169
170 void MediaPlayerPrivateMediaSourceAVFObjC::registerMediaEngine(MediaEngineRegistrar registrar)
171 {
172     if (!isAvailable())
173         return;
174
175     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateMediaSourceAVFObjC>(player); },
176         getSupportedTypes, supportsType, 0, 0, 0, 0);
177     ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
178 }
179
180 bool MediaPlayerPrivateMediaSourceAVFObjC::isAvailable()
181 {
182     return AVFoundationLibrary()
183         && isCoreMediaFrameworkAvailable()
184         && getAVStreamDataParserClass()
185         && getAVSampleBufferAudioRendererClass()
186         && getAVSampleBufferRenderSynchronizerClass()
187         && class_getInstanceMethod(getAVSampleBufferAudioRendererClass(), @selector(setMuted:));
188 }
189
190 void MediaPlayerPrivateMediaSourceAVFObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& types)
191 {
192     types = AVFoundationMIMETypeCache::singleton().types();
193 }
194
195 MediaPlayer::SupportsType MediaPlayerPrivateMediaSourceAVFObjC::supportsType(const MediaEngineSupportParameters& parameters)
196 {
197     // This engine does not support non-media-source sources.
198     if (!parameters.isMediaSource)
199         return MediaPlayer::IsNotSupported;
200 #if ENABLE(MEDIA_STREAM)
201     if (parameters.isMediaStream)
202         return MediaPlayer::IsNotSupported;
203 #endif
204
205     if (parameters.type.isEmpty() || !AVFoundationMIMETypeCache::singleton().canDecodeType(parameters.type.containerType()))
206         return MediaPlayer::IsNotSupported;
207
208     // The spec says:
209     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
210     auto codecs = parameters.type.parameter(ContentType::codecsParameter());
211     if (codecs.isEmpty())
212         return MediaPlayer::MayBeSupported;
213
214     NSString *outputCodecs = codecs;
215     if ([getAVStreamDataParserClass() respondsToSelector:@selector(outputMIMECodecParameterForInputMIMECodecParameter:)])
216         outputCodecs = [getAVStreamDataParserClass() outputMIMECodecParameterForInputMIMECodecParameter:outputCodecs];
217
218     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
219         return MediaPlayer::IsNotSupported;
220
221     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type.containerType(), (NSString *)outputCodecs];
222     return [getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
223 }
224
225 #pragma mark -
226 #pragma mark MediaPlayerPrivateInterface Overrides
227
228 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String&)
229 {
230     // This media engine only supports MediaSource URLs.
231     m_networkState = MediaPlayer::FormatError;
232     m_player->networkStateChanged();
233 }
234
235 void MediaPlayerPrivateMediaSourceAVFObjC::load(const String& url, MediaSourcePrivateClient* client)
236 {
237     UNUSED_PARAM(url);
238
239     m_mediaSourcePrivate = MediaSourcePrivateAVFObjC::create(this, client);
240     m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
241     m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
242
243     acceleratedRenderingStateChanged();
244 }
245
246 #if ENABLE(MEDIA_STREAM)
247 void MediaPlayerPrivateMediaSourceAVFObjC::load(MediaStreamPrivate&)
248 {
249     setNetworkState(MediaPlayer::FormatError);
250 }
251 #endif
252
253 void MediaPlayerPrivateMediaSourceAVFObjC::cancelLoad()
254 {
255 }
256
257 void MediaPlayerPrivateMediaSourceAVFObjC::prepareToPlay()
258 {
259 }
260
261 PlatformLayer* MediaPlayerPrivateMediaSourceAVFObjC::platformLayer() const
262 {
263     return m_videoFullscreenLayerManager->videoInlineLayer();
264 }
265
266 void MediaPlayerPrivateMediaSourceAVFObjC::play()
267 {
268     callOnMainThread([weakThis = createWeakPtr()] {
269         if (!weakThis)
270             return;
271         weakThis.get()->playInternal();
272     });
273 }
274
275 void MediaPlayerPrivateMediaSourceAVFObjC::playInternal()
276 {
277     if (currentMediaTime() >= m_mediaSourcePrivate->duration())
278         return;
279
280     m_playing = true;
281     if (shouldBePlaying())
282         [m_synchronizer setRate:m_rate];
283 }
284
285 void MediaPlayerPrivateMediaSourceAVFObjC::pause()
286 {
287     callOnMainThread([weakThis = createWeakPtr()] {
288         if (!weakThis)
289             return;
290         weakThis.get()->pauseInternal();
291     });
292 }
293
294 void MediaPlayerPrivateMediaSourceAVFObjC::pauseInternal()
295 {
296     m_playing = false;
297     [m_synchronizer setRate:0];
298 }
299
300 bool MediaPlayerPrivateMediaSourceAVFObjC::paused() const
301 {
302     return ![m_synchronizer rate];
303 }
304
305 void MediaPlayerPrivateMediaSourceAVFObjC::setVolume(float volume)
306 {
307     for (auto key : m_sampleBufferAudioRendererMap.keys())
308         [(__bridge AVSampleBufferAudioRenderer *)key.get() setVolume:volume];
309 }
310
311 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsScanning() const
312 {
313     return true;
314 }
315
316 void MediaPlayerPrivateMediaSourceAVFObjC::setMuted(bool muted)
317 {
318     for (auto key : m_sampleBufferAudioRendererMap.keys())
319         [(__bridge AVSampleBufferAudioRenderer *)key.get() setMuted:muted];
320 }
321
322 FloatSize MediaPlayerPrivateMediaSourceAVFObjC::naturalSize() const
323 {
324     return m_naturalSize;
325 }
326
327 bool MediaPlayerPrivateMediaSourceAVFObjC::hasVideo() const
328 {
329     if (!m_mediaSourcePrivate)
330         return false;
331
332     return m_mediaSourcePrivate->hasVideo();
333 }
334
335 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAudio() const
336 {
337     if (!m_mediaSourcePrivate)
338         return false;
339
340     return m_mediaSourcePrivate->hasAudio();
341 }
342
343 void MediaPlayerPrivateMediaSourceAVFObjC::setVisible(bool)
344 {
345     acceleratedRenderingStateChanged();
346 }
347
348 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::durationMediaTime() const
349 {
350     return m_mediaSourcePrivate ? m_mediaSourcePrivate->duration() : MediaTime::zeroTime();
351 }
352
353 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::currentMediaTime() const
354 {
355     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
356     if (synchronizerTime < MediaTime::zeroTime())
357         return MediaTime::zeroTime();
358     if (synchronizerTime < m_lastSeekTime)
359         return m_lastSeekTime;
360     return synchronizerTime;
361 }
362
363 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::startTime() const
364 {
365     return MediaTime::zeroTime();
366 }
367
368 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::initialTime() const
369 {
370     return MediaTime::zeroTime();
371 }
372
373 void MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(const MediaTime& time, const MediaTime& negativeThreshold, const MediaTime& positiveThreshold)
374 {
375     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekWithTolerance(%p) - time(%s), negativeThreshold(%s), positiveThreshold(%s)", this, toString(time).utf8().data(), toString(negativeThreshold).utf8().data(), toString(positiveThreshold).utf8().data());
376     m_seeking = true;
377     auto weakThis = createWeakPtr();
378     m_pendingSeek = std::make_unique<PendingSeek>(time, negativeThreshold, positiveThreshold);
379
380     if (m_seekTimer.isActive())
381         m_seekTimer.stop();
382     m_seekTimer.startOneShot(0_s);
383 }
384
385 void MediaPlayerPrivateMediaSourceAVFObjC::seekInternal()
386 {
387     std::unique_ptr<PendingSeek> pendingSeek;
388     pendingSeek.swap(m_pendingSeek);
389
390     if (!pendingSeek)
391         return;
392
393     if (!m_mediaSourcePrivate)
394         return;
395
396     if (!pendingSeek->negativeThreshold && !pendingSeek->positiveThreshold)
397         m_lastSeekTime = pendingSeek->targetTime;
398     else
399         m_lastSeekTime = m_mediaSourcePrivate->fastSeekTimeForMediaTime(pendingSeek->targetTime, pendingSeek->positiveThreshold, pendingSeek->negativeThreshold);
400
401     if (m_lastSeekTime.hasDoubleValue())
402         m_lastSeekTime = MediaTime::createWithDouble(m_lastSeekTime.toDouble(), MediaTime::DefaultTimeScale);
403
404     MediaTime synchronizerTime = PAL::toMediaTime(CMTimebaseGetTime([m_synchronizer timebase]));
405     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekInternal(%p) - seekTime(%s), synchronizerTime(%s)", this, toString(m_lastSeekTime).utf8().data(), toString(synchronizerTime).utf8().data());
406
407     bool doesNotRequireSeek = synchronizerTime == m_lastSeekTime;
408
409     m_mediaSourcePrivate->willSeek();
410     [m_synchronizer setRate:0 time:PAL::toCMTime(m_lastSeekTime)];
411     m_mediaSourcePrivate->seekToTime(m_lastSeekTime);
412
413     // In cases where the destination seek time precisely matches the synchronizer's existing time
414     // no time jumped notification will be issued. In this case, just notify the MediaPlayer that
415     // the seek completed successfully.
416     if (doesNotRequireSeek) {
417         m_seeking = false;
418
419         if (shouldBePlaying())
420             [m_synchronizer setRate:m_rate];
421         if (!seeking() && m_seekCompleted)
422             m_player->timeChanged();
423     }
424 }
425
426 void MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted()
427 {
428     if (!m_seeking)
429         return;
430     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::waitForSeekCompleted(%p)", this);
431     m_seekCompleted = Seeking;
432 }
433
434 void MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted()
435 {
436     if (m_seekCompleted == SeekCompleted)
437         return;
438     if (hasVideo() && !m_hasAvailableVideoFrame) {
439         m_seekCompleted = WaitingForAvailableFame;
440         return;
441     }
442     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::seekCompleted(%p)", this);
443     m_seekCompleted = SeekCompleted;
444     if (shouldBePlaying())
445         [m_synchronizer setRate:m_rate];
446     if (!m_seeking)
447         m_player->timeChanged();
448 }
449
450 bool MediaPlayerPrivateMediaSourceAVFObjC::seeking() const
451 {
452     return m_seeking || m_seekCompleted != SeekCompleted;
453 }
454
455 void MediaPlayerPrivateMediaSourceAVFObjC::setRateDouble(double rate)
456 {
457     // AVSampleBufferRenderSynchronizer does not support negative rate yet.
458     m_rate = std::max<double>(rate, 0);
459     if (shouldBePlaying())
460         [m_synchronizer setRate:m_rate];
461 }
462
463 void MediaPlayerPrivateMediaSourceAVFObjC::setPreservesPitch(bool preservesPitch)
464 {
465     NSString *algorithm = preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed;
466     for (auto key : m_sampleBufferAudioRendererMap.keys())
467         [(__bridge AVSampleBufferAudioRenderer *)key.get() setAudioTimePitchAlgorithm:algorithm];
468 }
469
470 MediaPlayer::NetworkState MediaPlayerPrivateMediaSourceAVFObjC::networkState() const
471 {
472     return m_networkState;
473 }
474
475 MediaPlayer::ReadyState MediaPlayerPrivateMediaSourceAVFObjC::readyState() const
476 {
477     return m_readyState;
478 }
479
480 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::seekable() const
481 {
482     return std::make_unique<PlatformTimeRanges>(minMediaTimeSeekable(), maxMediaTimeSeekable());
483 }
484
485 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::maxMediaTimeSeekable() const
486 {
487     return durationMediaTime();
488 }
489
490 MediaTime MediaPlayerPrivateMediaSourceAVFObjC::minMediaTimeSeekable() const
491 {
492     return startTime();
493 }
494
495 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateMediaSourceAVFObjC::buffered() const
496 {
497     return m_mediaSourcePrivate ? m_mediaSourcePrivate->buffered() : std::make_unique<PlatformTimeRanges>();
498 }
499
500 bool MediaPlayerPrivateMediaSourceAVFObjC::didLoadingProgress() const
501 {
502     bool loadingProgressed = m_loadingProgressed;
503     m_loadingProgressed = false;
504     return loadingProgressed;
505 }
506
507 void MediaPlayerPrivateMediaSourceAVFObjC::setSize(const IntSize&)
508 {
509     // No-op.
510 }
511
512 NativeImagePtr MediaPlayerPrivateMediaSourceAVFObjC::nativeImageForCurrentTime()
513 {
514     updateLastImage();
515     return m_lastImage.get();
516 }
517
518 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastPixelBuffer()
519 {
520     if (m_sampleBufferDisplayLayer || !m_decompressionSession)
521         return false;
522
523     auto flags = !m_lastPixelBuffer ? WebCoreDecompressionSession::AllowLater : WebCoreDecompressionSession::ExactTime;
524     auto newPixelBuffer = m_decompressionSession->imageForTime(currentMediaTime(), flags);
525     if (!newPixelBuffer)
526         return false;
527
528     m_lastPixelBuffer = newPixelBuffer;
529     return true;
530 }
531
532 bool MediaPlayerPrivateMediaSourceAVFObjC::updateLastImage()
533 {
534     if (!updateLastPixelBuffer())
535         return false;
536
537     ASSERT(m_lastPixelBuffer);
538
539     if (!m_rgbConformer) {
540         auto attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
541         m_rgbConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
542     }
543
544     m_lastImage = m_rgbConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
545     return true;
546 }
547
548 void MediaPlayerPrivateMediaSourceAVFObjC::paint(GraphicsContext& context, const FloatRect& rect)
549 {
550     paintCurrentFrameInContext(context, rect);
551 }
552
553 void MediaPlayerPrivateMediaSourceAVFObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& outputRect)
554 {
555     if (context.paintingDisabled())
556         return;
557
558     auto image = nativeImageForCurrentTime();
559     if (!image)
560         return;
561
562     GraphicsContextStateSaver stateSaver(context);
563     FloatRect imageRect(0, 0, CGImageGetWidth(image.get()), CGImageGetHeight(image.get()));
564     context.drawNativeImage(image, imageRect.size(), outputRect, imageRect);
565 }
566
567 bool MediaPlayerPrivateMediaSourceAVFObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
568 {
569     // We have been asked to paint into a WebGL canvas, so take that as a signal to create
570     // a decompression session, even if that means the native video can't also be displayed
571     // in page.
572     if (!m_hasBeenAskedToPaintGL) {
573         m_hasBeenAskedToPaintGL = true;
574         acceleratedRenderingStateChanged();
575     }
576
577     ASSERT(context);
578
579     if (updateLastPixelBuffer()) {
580         if (!m_lastPixelBuffer)
581             return false;
582     }
583
584     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
585     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
586
587     if (!m_videoTextureCopier)
588         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
589
590     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
591 }
592
593 bool MediaPlayerPrivateMediaSourceAVFObjC::hasAvailableVideoFrame() const
594 {
595     return m_hasAvailableVideoFrame;
596 }
597
598 bool MediaPlayerPrivateMediaSourceAVFObjC::supportsAcceleratedRendering() const
599 {
600     return true;
601 }
602
603 void MediaPlayerPrivateMediaSourceAVFObjC::acceleratedRenderingStateChanged()
604 {
605     if (!m_hasBeenAskedToPaintGL) {
606         destroyDecompressionSession();
607         ensureLayer();
608     } else {
609         destroyLayer();
610         ensureDecompressionSession();
611     }
612 }
613
614 void MediaPlayerPrivateMediaSourceAVFObjC::notifyActiveSourceBuffersChanged()
615 {
616     m_player->client().mediaPlayerActiveSourceBuffersChanged(m_player);
617 }
618
619 MediaPlayer::MovieLoadType MediaPlayerPrivateMediaSourceAVFObjC::movieLoadType() const
620 {
621     return MediaPlayer::StoredStream;
622 }
623
624 void MediaPlayerPrivateMediaSourceAVFObjC::prepareForRendering()
625 {
626     // No-op.
627 }
628
629 String MediaPlayerPrivateMediaSourceAVFObjC::engineDescription() const
630 {
631     static NeverDestroyed<String> description(MAKE_STATIC_STRING_IMPL("AVFoundation MediaSource Engine"));
632     return description;
633 }
634
635 String MediaPlayerPrivateMediaSourceAVFObjC::languageOfPrimaryAudioTrack() const
636 {
637     // FIXME(125158): implement languageOfPrimaryAudioTrack()
638     return emptyString();
639 }
640
641 size_t MediaPlayerPrivateMediaSourceAVFObjC::extraMemoryCost() const
642 {
643     return 0;
644 }
645
646 std::optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateMediaSourceAVFObjC::videoPlaybackQualityMetrics()
647 {
648     if (m_decompressionSession) {
649         return VideoPlaybackQualityMetrics {
650             m_decompressionSession->totalVideoFrames(),
651             m_decompressionSession->droppedVideoFrames(),
652             m_decompressionSession->corruptedVideoFrames(),
653             m_decompressionSession->totalFrameDelay().toDouble(),
654             0,
655         };
656     }
657
658     auto metrics = [m_sampleBufferDisplayLayer videoPerformanceMetrics];
659     if (!metrics)
660         return std::nullopt;
661
662     uint32_t displayCompositedFrames = 0;
663 #pragma clang diagnostic push
664 #pragma clang diagnostic ignored "-Wunknown-pragmas"
665 #pragma clang diagnostic ignored "-Wunguarded-availability-new"
666     if ([metrics respondsToSelector:@selector(numberOfDisplayCompositedVideoFrames)])
667         displayCompositedFrames = [metrics numberOfDisplayCompositedVideoFrames];
668 #pragma clang diagnostic pop
669
670     return VideoPlaybackQualityMetrics {
671         static_cast<uint32_t>([metrics totalNumberOfVideoFrames]),
672         static_cast<uint32_t>([metrics numberOfDroppedVideoFrames]),
673         static_cast<uint32_t>([metrics numberOfCorruptedVideoFrames]),
674         [metrics totalFrameDelay],
675         displayCompositedFrames,
676     };
677 }
678
679 #pragma mark -
680 #pragma mark Utility Methods
681
682 void MediaPlayerPrivateMediaSourceAVFObjC::ensureLayer()
683 {
684     if (m_sampleBufferDisplayLayer)
685         return;
686
687     m_sampleBufferDisplayLayer = adoptNS([allocAVSampleBufferDisplayLayerInstance() init]);
688 #ifndef NDEBUG
689     [m_sampleBufferDisplayLayer setName:@"MediaPlayerPrivateMediaSource AVSampleBufferDisplayLayer"];
690 #endif
691
692     [m_synchronizer addRenderer:m_sampleBufferDisplayLayer.get()];
693     if (m_mediaSourcePrivate)
694         m_mediaSourcePrivate->setVideoLayer(m_sampleBufferDisplayLayer.get());
695     m_videoFullscreenLayerManager->setVideoLayer(m_sampleBufferDisplayLayer.get(), snappedIntRect(m_player->client().mediaPlayerContentBoxRect()).size());
696     m_player->client().mediaPlayerRenderingModeChanged(m_player);
697 }
698
699 void MediaPlayerPrivateMediaSourceAVFObjC::destroyLayer()
700 {
701     if (!m_sampleBufferDisplayLayer)
702         return;
703
704     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
705     [m_synchronizer removeRenderer:m_sampleBufferDisplayLayer.get() atTime:currentTime withCompletionHandler:^(BOOL){
706         // No-op.
707     }];
708
709     if (m_mediaSourcePrivate)
710         m_mediaSourcePrivate->setVideoLayer(nullptr);
711     m_videoFullscreenLayerManager->didDestroyVideoLayer();
712     m_sampleBufferDisplayLayer = nullptr;
713     setHasAvailableVideoFrame(false);
714     m_player->client().mediaPlayerRenderingModeChanged(m_player);
715 }
716
717 void MediaPlayerPrivateMediaSourceAVFObjC::ensureDecompressionSession()
718 {
719     if (m_decompressionSession)
720         return;
721
722     m_decompressionSession = WebCoreDecompressionSession::createOpenGL();
723     m_decompressionSession->setTimebase([m_synchronizer timebase]);
724
725     if (m_mediaSourcePrivate)
726         m_mediaSourcePrivate->setDecompressionSession(m_decompressionSession.get());
727
728     m_player->client().mediaPlayerRenderingModeChanged(m_player);
729 }
730
731 void MediaPlayerPrivateMediaSourceAVFObjC::destroyDecompressionSession()
732 {
733     if (!m_decompressionSession)
734         return;
735
736     if (m_mediaSourcePrivate)
737         m_mediaSourcePrivate->setDecompressionSession(nullptr);
738
739     m_decompressionSession->invalidate();
740     m_decompressionSession = nullptr;
741     setHasAvailableVideoFrame(false);
742 }
743
744 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldBePlaying() const
745 {
746     return m_playing && !seeking() && allRenderersHaveAvailableSamples() && m_readyState >= MediaPlayer::HaveFutureData;
747 }
748
749 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableVideoFrame(bool flag)
750 {
751     if (m_hasAvailableVideoFrame == flag)
752         return;
753     m_hasAvailableVideoFrame = flag;
754     updateAllRenderersHaveAvailableSamples();
755
756     if (!m_hasAvailableVideoFrame)
757         return;
758
759     m_player->firstVideoFrameAvailable();
760     if (m_seekCompleted == WaitingForAvailableFame)
761         seekCompleted();
762
763     if (m_readyStateIsWaitingForAvailableFrame) {
764         m_readyStateIsWaitingForAvailableFrame = false;
765         m_player->readyStateChanged();
766     }
767 }
768
769 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
770 void MediaPlayerPrivateMediaSourceAVFObjC::setHasAvailableAudioSample(AVSampleBufferAudioRenderer* renderer, bool flag)
771 ALLOW_NEW_API_WITHOUT_GUARDS_END
772 {
773     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)renderer);
774     if (iter == m_sampleBufferAudioRendererMap.end())
775         return;
776
777     auto& properties = iter->value;
778     if (properties.hasAudibleSample == flag)
779         return;
780     properties.hasAudibleSample = flag;
781     updateAllRenderersHaveAvailableSamples();
782 }
783
784 void MediaPlayerPrivateMediaSourceAVFObjC::updateAllRenderersHaveAvailableSamples()
785 {
786     bool allRenderersHaveAvailableSamples = true;
787
788     do {
789         if (hasVideo() && !m_hasAvailableVideoFrame) {
790             allRenderersHaveAvailableSamples = false;
791             break;
792         }
793
794         for (auto& properties : m_sampleBufferAudioRendererMap.values()) {
795             if (!properties.hasAudibleSample) {
796                 allRenderersHaveAvailableSamples = false;
797                 break;
798             }
799         }
800     } while (0);
801
802     if (m_allRenderersHaveAvailableSamples == allRenderersHaveAvailableSamples)
803         return;
804
805     m_allRenderersHaveAvailableSamples = allRenderersHaveAvailableSamples;
806
807     if (shouldBePlaying() && [m_synchronizer rate] != m_rate)
808         [m_synchronizer setRate:m_rate];
809     else if (!shouldBePlaying() && [m_synchronizer rate])
810         [m_synchronizer setRate:0];
811 }
812
813 void MediaPlayerPrivateMediaSourceAVFObjC::durationChanged()
814 {
815     m_player->durationChanged();
816
817     if (m_durationObserver)
818         [m_synchronizer removeTimeObserver:m_durationObserver.get()];
819
820     if (!m_mediaSourcePrivate)
821         return;
822
823     MediaTime duration = m_mediaSourcePrivate->duration();
824     auto weakThis = createWeakPtr();
825     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(duration)]];
826
827     LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::durationChanged(%p) - duration = %s", this, toString(duration).utf8().data());
828
829     m_durationObserver = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[weakThis, duration] {
830         if (!weakThis)
831             return;
832
833         MediaTime now = weakThis->currentMediaTime();
834         LOG(MediaSource, "MediaPlayerPrivateMediaSourceAVFObjC::durationChanged(%p) - boundary time observer called, now = %s", weakThis.get(), toString(now).utf8().data());
835
836         weakThis->pauseInternal();
837         if (now < duration) {
838             LOG(MediaSource, "   ERROR: boundary time observer called before duration!", weakThis.get());
839             [weakThis->m_synchronizer setRate:0 time:PAL::toCMTime(duration)];
840         }
841         weakThis->m_player->timeChanged();
842
843     }];
844
845     if (m_playing && duration <= currentMediaTime())
846         pauseInternal();
847 }
848
849 void MediaPlayerPrivateMediaSourceAVFObjC::effectiveRateChanged()
850 {
851     m_player->rateChanged();
852 }
853
854 void MediaPlayerPrivateMediaSourceAVFObjC::sizeWillChangeAtTime(const MediaTime& time, const FloatSize& size)
855 {
856     auto weakThis = m_sizeChangeObserverWeakPtrFactory.createWeakPtr(*this);
857     NSArray* times = @[[NSValue valueWithCMTime:PAL::toCMTime(time)]];
858     RetainPtr<id> observer = [m_synchronizer addBoundaryTimeObserverForTimes:times queue:dispatch_get_main_queue() usingBlock:[this, weakThis, size] {
859         if (!weakThis)
860             return;
861
862         ASSERT(!m_sizeChangeObservers.isEmpty());
863         if (!m_sizeChangeObservers.isEmpty()) {
864             RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
865             [m_synchronizer removeTimeObserver:observer.get()];
866         }
867         setNaturalSize(size);
868     }];
869     m_sizeChangeObservers.append(WTFMove(observer));
870
871     if (currentMediaTime() >= time)
872         setNaturalSize(size);
873 }
874
875 void MediaPlayerPrivateMediaSourceAVFObjC::setNaturalSize(const FloatSize& size)
876 {
877     if (size == m_naturalSize)
878         return;
879
880     m_naturalSize = size;
881     m_player->sizeChanged();
882 }
883
884 void MediaPlayerPrivateMediaSourceAVFObjC::flushPendingSizeChanges()
885 {
886     while (!m_sizeChangeObservers.isEmpty()) {
887         RetainPtr<id> observer = m_sizeChangeObservers.takeFirst();
888         [m_synchronizer removeTimeObserver:observer.get()];
889     }
890     m_sizeChangeObserverWeakPtrFactory.revokeAll();
891 }
892
893 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
894 AVStreamSession* MediaPlayerPrivateMediaSourceAVFObjC::streamSession()
895 {
896     if (!getAVStreamSessionClass() || ![getAVStreamSessionClass() instancesRespondToSelector:@selector(initWithStorageDirectoryAtURL:)])
897         return nil;
898
899     if (!m_streamSession) {
900         String storageDirectory = m_player->mediaKeysStorageDirectory();
901         if (storageDirectory.isEmpty())
902             return nil;
903
904         if (!FileSystem::fileExists(storageDirectory)) {
905             if (!FileSystem::makeAllDirectories(storageDirectory))
906                 return nil;
907         }
908
909         String storagePath = FileSystem::pathByAppendingComponent(storageDirectory, "SecureStop.plist");
910         m_streamSession = adoptNS([allocAVStreamSessionInstance() initWithStorageDirectoryAtURL:[NSURL fileURLWithPath:storagePath]]);
911     }
912     return m_streamSession.get();
913 }
914
915 void MediaPlayerPrivateMediaSourceAVFObjC::setCDMSession(LegacyCDMSession* session)
916 {
917     if (session == m_session)
918         return;
919
920     m_session = toCDMSessionMediaSourceAVFObjC(session);
921
922     if (CDMSessionAVStreamSession* cdmStreamSession = toCDMSessionAVStreamSession(m_session))
923         cdmStreamSession->setStreamSession(streamSession());
924     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
925         sourceBuffer->setCDMSession(m_session);
926 }
927
928 void MediaPlayerPrivateMediaSourceAVFObjC::keyNeeded(Uint8Array* initData)
929 {
930     m_player->keyNeeded(initData);
931 }
932
933 void MediaPlayerPrivateMediaSourceAVFObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool obscured)
934 {
935 #if ENABLE(ENCRYPTED_MEDIA)
936     if (m_cdmInstance)
937         m_cdmInstance->setHDCPStatus(obscured ? CDMInstance::HDCPStatus::OutputRestricted : CDMInstance::HDCPStatus::Valid);
938 #else
939     UNUSED_PARAM(obscured);
940 #endif
941 }
942 #endif
943
944 #if ENABLE(ENCRYPTED_MEDIA)
945 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceAttached(CDMInstance& instance)
946 {
947     ASSERT(!m_cdmInstance);
948     m_cdmInstance = &instance;
949     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
950         sourceBuffer->setCDMInstance(&instance);
951 }
952
953 void MediaPlayerPrivateMediaSourceAVFObjC::cdmInstanceDetached(CDMInstance& instance)
954 {
955     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
956     for (auto& sourceBuffer : m_mediaSourcePrivate->sourceBuffers())
957         sourceBuffer->setCDMInstance(nullptr);
958
959     m_cdmInstance = nullptr;
960 }
961
962 void MediaPlayerPrivateMediaSourceAVFObjC::attemptToDecryptWithInstance(CDMInstance&)
963 {
964 }
965
966 bool MediaPlayerPrivateMediaSourceAVFObjC::waitingForKey() const
967 {
968     return anyOf(m_mediaSourcePrivate->sourceBuffers(), [] (auto& sourceBuffer) {
969         return sourceBuffer->waitingForKey();
970     });
971 }
972
973 void MediaPlayerPrivateMediaSourceAVFObjC::waitingForKeyChanged()
974 {
975     m_player->waitingForKeyChanged();
976 }
977
978 void MediaPlayerPrivateMediaSourceAVFObjC::initializationDataEncountered(const String& initDataType, RefPtr<ArrayBuffer>&& initData)
979 {
980     m_player->initializationDataEncountered(initDataType, WTFMove(initData));
981 }
982 #endif
983
984 const Vector<ContentType>& MediaPlayerPrivateMediaSourceAVFObjC::mediaContentTypesRequiringHardwareSupport() const
985 {
986     return m_player->mediaContentTypesRequiringHardwareSupport();
987 }
988
989 bool MediaPlayerPrivateMediaSourceAVFObjC::shouldCheckHardwareSupport() const
990 {
991     return m_player->shouldCheckHardwareSupport();
992 }
993
994 void MediaPlayerPrivateMediaSourceAVFObjC::setReadyState(MediaPlayer::ReadyState readyState)
995 {
996     if (m_readyState == readyState)
997         return;
998
999     m_readyState = readyState;
1000
1001     if (shouldBePlaying())
1002         [m_synchronizer setRate:m_rate];
1003     else
1004         [m_synchronizer setRate:0];
1005
1006     if (m_readyState >= MediaPlayerEnums::HaveCurrentData && hasVideo() && !m_hasAvailableVideoFrame) {
1007         m_readyStateIsWaitingForAvailableFrame = true;
1008         return;
1009     }
1010
1011     m_player->readyStateChanged();
1012 }
1013
1014 void MediaPlayerPrivateMediaSourceAVFObjC::setNetworkState(MediaPlayer::NetworkState networkState)
1015 {
1016     if (m_networkState == networkState)
1017         return;
1018
1019     m_networkState = networkState;
1020     m_player->networkStateChanged();
1021 }
1022
1023 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1024 void MediaPlayerPrivateMediaSourceAVFObjC::addAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1025 ALLOW_NEW_API_WITHOUT_GUARDS_END
1026 {
1027     if (!m_sampleBufferAudioRendererMap.add((__bridge CFTypeRef)audioRenderer, AudioRendererProperties()).isNewEntry)
1028         return;
1029
1030     [audioRenderer setMuted:m_player->muted()];
1031     [audioRenderer setVolume:m_player->volume()];
1032     [audioRenderer setAudioTimePitchAlgorithm:(m_player->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1033
1034     [m_synchronizer addRenderer:audioRenderer];
1035     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1036 }
1037
1038 ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
1039 void MediaPlayerPrivateMediaSourceAVFObjC::removeAudioRenderer(AVSampleBufferAudioRenderer* audioRenderer)
1040 ALLOW_NEW_API_WITHOUT_GUARDS_END
1041 {
1042     auto iter = m_sampleBufferAudioRendererMap.find((__bridge CFTypeRef)audioRenderer);
1043     if (iter == m_sampleBufferAudioRendererMap.end())
1044         return;
1045
1046     CMTime currentTime = CMTimebaseGetTime([m_synchronizer timebase]);
1047     [m_synchronizer removeRenderer:audioRenderer atTime:currentTime withCompletionHandler:^(BOOL){
1048         // No-op.
1049     }];
1050
1051     m_sampleBufferAudioRendererMap.remove(iter);
1052     m_player->client().mediaPlayerRenderingModeChanged(m_player);
1053 }
1054
1055 void MediaPlayerPrivateMediaSourceAVFObjC::characteristicsChanged()
1056 {
1057     updateAllRenderersHaveAvailableSamples();
1058     m_player->characteristicChanged();
1059 }
1060
1061 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenLayer(PlatformLayer *videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1062 {
1063     updateLastImage();
1064     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1065 }
1066
1067 void MediaPlayerPrivateMediaSourceAVFObjC::setVideoFullscreenFrame(FloatRect frame)
1068 {
1069     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1070 }
1071
1072 bool MediaPlayerPrivateMediaSourceAVFObjC::requiresTextTrackRepresentation() const
1073 {
1074     return m_videoFullscreenLayerManager->videoFullscreenLayer();
1075 }
1076     
1077 void MediaPlayerPrivateMediaSourceAVFObjC::syncTextTrackBounds()
1078 {
1079     m_videoFullscreenLayerManager->syncTextTrackBounds();
1080 }
1081     
1082 void MediaPlayerPrivateMediaSourceAVFObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1083 {
1084     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
1085 }
1086
1087 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1088 void MediaPlayerPrivateMediaSourceAVFObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
1089 {
1090     m_playbackTarget = WTFMove(target);
1091 }
1092
1093 void MediaPlayerPrivateMediaSourceAVFObjC::setShouldPlayToPlaybackTarget(bool shouldPlayToTarget)
1094 {
1095     if (shouldPlayToTarget == m_shouldPlayToTarget)
1096         return;
1097
1098     m_shouldPlayToTarget = shouldPlayToTarget;
1099
1100     if (m_player)
1101         m_player->currentPlaybackTargetIsWirelessChanged();
1102 }
1103
1104 bool MediaPlayerPrivateMediaSourceAVFObjC::isCurrentPlaybackTargetWireless() const
1105 {
1106     if (!m_playbackTarget)
1107         return false;
1108
1109     return m_shouldPlayToTarget && m_playbackTarget->hasActiveRoute();
1110 }
1111 #endif
1112
1113 }
1114
1115 #endif