Define media buffering policy
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetMIMETypeCache.h"
32 #import "AVAssetTrackUtilities.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "VideoFullscreenLayerManagerObjC.h"
63 #import "VideoTextureCopierCV.h"
64 #import "VideoTrackPrivateAVFObjC.h"
65 #import "WebCoreAVFResourceLoader.h"
66 #import "WebCoreCALayerExtras.h"
67 #import "WebCoreNSURLSession.h"
68 #import <JavaScriptCore/DataView.h>
69 #import <JavaScriptCore/JSCInlines.h>
70 #import <JavaScriptCore/TypedArrayInlines.h>
71 #import <JavaScriptCore/Uint16Array.h>
72 #import <JavaScriptCore/Uint32Array.h>
73 #import <JavaScriptCore/Uint8Array.h>
74 #import <functional>
75 #import <objc/runtime.h>
76 #import <pal/avfoundation/MediaTimeAVFoundation.h>
77 #import <pal/spi/cocoa/QuartzCoreSPI.h>
78 #import <pal/spi/mac/AVFoundationSPI.h>
79 #import <wtf/BlockObjCExceptions.h>
80 #import <wtf/ListHashSet.h>
81 #import <wtf/NeverDestroyed.h>
82 #import <wtf/OSObjectPtr.h>
83 #import <wtf/URL.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS_FAMILY)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #import <pal/ios/UIKitSoftLink.h>
107 #else
108 #import <Foundation/NSGeometry.h>
109 #import <QuartzCore/CoreImage.h>
110 #endif
111
112 #if USE(VIDEOTOOLBOX)
113 #import <CoreVideo/CoreVideo.h>
114 #import <VideoToolbox/VideoToolbox.h>
115 #endif
116
117 #import "CoreVideoSoftLink.h"
118 #import "MediaRemoteSoftLink.h"
119
120 namespace std {
121 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
122     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
123 };
124 }
125
126 #if ENABLE(AVF_CAPTIONS)
127 // Note: This must be defined before our SOFT_LINK macros:
128 @class AVMediaSelectionOption;
129 @interface AVMediaSelectionOption (OutOfBandExtensions)
130 @property (nonatomic, readonly) NSString* outOfBandSource;
131 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
132 @end
133 #endif
134
135 @interface AVURLAsset (WebKitExtensions)
136 @property (nonatomic, readonly) NSURL *resolvedURL;
137 @end
138
139 #import <pal/cf/CoreMediaSoftLink.h>
140 #import <pal/cocoa/AVFoundationSoftLink.h>
141
142 SOFT_LINK_FRAMEWORK(MediaToolbox)
143 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
144
145 #if PLATFORM(IOS_FAMILY)
146
147 #if HAVE(CELESTIAL)
148 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
149 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
150 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
151 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
152 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
153 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
154 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
155 #endif // HAVE(CELESTIAL)
156
157 #endif // PLATFORM(IOS_FAMILY)
158
159 using namespace WebCore;
160
161 enum MediaPlayerAVFoundationObservationContext {
162     MediaPlayerAVFoundationObservationContextPlayerItem,
163     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
164     MediaPlayerAVFoundationObservationContextPlayer,
165     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
166 };
167
168 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
169 {
170     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
171     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
172     int m_delayCallbacks;
173 }
174 -(id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)callback;
175 -(void)disconnect;
176 -(void)metadataLoaded;
177 -(void)didEnd:(NSNotification *)notification;
178 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
179 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
180 - (void)outputSequenceWasFlushed:(id)output;
181 @end
182
183 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
184 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
185     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
186     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
187 }
188 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
189 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
190 @end
191 #endif
192
193 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
194 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
195     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
196 }
197 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
198 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
199 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
200 @end
201 #endif
202
203 namespace WebCore {
204 using namespace PAL;
205
206 static NSArray *assetMetadataKeyNames();
207 static NSArray *itemKVOProperties();
208 static NSArray *assetTrackMetadataKeyNames();
209 static NSArray *playerKVOProperties();
210 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
211
212 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
213 static dispatch_queue_t globalLoaderDelegateQueue()
214 {
215     static dispatch_queue_t globalQueue;
216     static dispatch_once_t onceToken;
217     dispatch_once(&onceToken, ^{
218         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
219     });
220     return globalQueue;
221 }
222 #endif
223
224 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
225 static dispatch_queue_t globalPullDelegateQueue()
226 {
227     static dispatch_queue_t globalQueue;
228     static dispatch_once_t onceToken;
229     dispatch_once(&onceToken, ^{
230         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
231     });
232     return globalQueue;
233 }
234 #endif
235
236 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
237 {
238     if (!isAvailable())
239         return;
240
241     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
242             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
243     ASSERT(AVAssetMIMETypeCache::singleton().isAvailable());
244 }
245
246 static AVAssetCache *assetCacheForPath(const String& path)
247 {
248     NSURL *assetCacheURL;
249     
250     if (path.isEmpty())
251         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
252     else
253         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
254
255     return [PAL::getAVAssetCacheClass() assetCacheWithURL:assetCacheURL];
256 }
257
258 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
259 {
260     HashSet<RefPtr<SecurityOrigin>> origins;
261     for (NSString *key in [assetCacheForPath(path) allKeys]) {
262         URL keyAsURL = URL(URL(), key);
263         if (keyAsURL.isValid())
264             origins.add(SecurityOrigin::create(keyAsURL));
265     }
266     return origins;
267 }
268
269 static WallTime toSystemClockTime(NSDate *date)
270 {
271     ASSERT(date);
272     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
273 }
274
275 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
276 {
277     AVAssetCache* assetCache = assetCacheForPath(path);
278     
279     for (NSString *key in [assetCache allKeys]) {
280         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
281             [assetCache removeEntryForKey:key];
282     }
283
284     NSFileManager *fileManager = [NSFileManager defaultManager];
285     NSURL *baseURL = [assetCache URL];
286
287     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
288         [fileManager removeItemAtURL:baseURL error:nil];
289         return;
290     }
291     
292     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
293     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
294         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
295         errorHandler:nil];
296     
297     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
298     for (NSURL *fileURL : enumerator) {
299         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
300     
301         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
302             continue;
303         
304         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
305             continue;
306         
307         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
308             continue;
309         
310         [urlsToDelete addObject:fileURL];
311     }
312     
313     for (NSURL *fileURL in urlsToDelete.get())
314         [fileManager removeItemAtURL:fileURL error:nil];
315 }
316
317 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
318 {
319     AVAssetCache* assetCache = assetCacheForPath(path);
320     for (NSString *key in [assetCache allKeys]) {
321         URL keyAsURL = URL(URL(), key);
322         if (keyAsURL.isValid()) {
323             if (origins.contains(SecurityOrigin::create(keyAsURL)))
324                 [assetCache removeEntryForKey:key];
325         }
326     }
327 }
328
329 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
330     : MediaPlayerPrivateAVFoundation(player)
331     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
332     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
333     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
334     , m_videoFrameHasDrawn(false)
335     , m_haveCheckedPlayability(false)
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
338 #endif
339 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
340     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
341 #endif
342     , m_currentTextTrack(0)
343     , m_cachedRate(0)
344     , m_cachedTotalBytes(0)
345     , m_pendingStatusChanges(0)
346     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
347     , m_cachedLikelyToKeepUp(false)
348     , m_cachedBufferEmpty(false)
349     , m_cachedBufferFull(false)
350     , m_cachedHasEnabledAudio(false)
351     , m_cachedIsReadyForDisplay(false)
352     , m_haveBeenAskedToCreateLayer(false)
353 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
354     , m_allowsWirelessVideoPlayback(true)
355 #endif
356 {
357 }
358
359 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
360 {
361     m_weakPtrFactory.revokeAll();
362
363 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
364     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
365
366     for (auto& pair : m_resourceLoaderMap)
367         pair.value->invalidate();
368 #endif
369 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
370     [m_videoOutput setDelegate:nil queue:0];
371 #endif
372
373     if (m_videoLayer)
374         destroyVideoLayer();
375
376     cancelLoad();
377 }
378
379 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
380 {
381     ALWAYS_LOG(LOGIDENTIFIER);
382     tearDownVideoRendering();
383
384     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
385     [m_objcObserver.get() disconnect];
386
387     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
388     setIgnoreLoadStateChanges(true);
389     if (m_avAsset) {
390         [m_avAsset.get() cancelLoading];
391         m_avAsset = nil;
392     }
393
394     clearTextTracks();
395
396     if (m_legibleOutput) {
397         if (m_avPlayerItem)
398             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
399         m_legibleOutput = nil;
400     }
401
402     if (m_avPlayerItem) {
403         for (NSString *keyName in itemKVOProperties())
404             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
405         
406         m_avPlayerItem = nil;
407     }
408     if (m_avPlayer) {
409         if (m_timeObserver)
410             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
411         m_timeObserver = nil;
412
413         for (NSString *keyName in playerKVOProperties())
414             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
415
416         setShouldObserveTimeControlStatus(false);
417
418         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
419 #if !PLATFORM(IOS_FAMILY)
420         [m_avPlayer setOutputContext:nil];
421 #endif
422         m_avPlayer = nil;
423     }
424
425     // Reset cached properties
426     m_pendingStatusChanges = 0;
427     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
428     m_cachedSeekableRanges = nullptr;
429     m_cachedLoadedRanges = nullptr;
430     m_cachedHasEnabledAudio = false;
431     m_cachedPresentationSize = FloatSize();
432     m_cachedDuration = MediaTime::zeroTime();
433
434     for (AVPlayerItemTrack *track in m_cachedTracks.get())
435         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
436     m_cachedTracks = nullptr;
437
438 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
439     if (m_provider) {
440         m_provider->setPlayerItem(nullptr);
441         m_provider->setAudioTrack(nullptr);
442     }
443 #endif
444
445     setIgnoreLoadStateChanges(false);
446 }
447
448 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
449 {
450     return m_haveBeenAskedToCreateLayer;
451 }
452
453 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
454 {
455 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
456     if (m_videoOutput)
457         return true;
458 #endif
459     return m_imageGenerator;
460 }
461
462 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
463 {
464 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
465     createVideoOutput();
466 #else
467     createImageGenerator();
468 #endif
469 }
470
471 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
472 {
473     using namespace PAL;
474     INFO_LOG(LOGIDENTIFIER);
475
476     if (!m_avAsset || m_imageGenerator)
477         return;
478
479     m_imageGenerator = [PAL::getAVAssetImageGeneratorClass() assetImageGeneratorWithAsset:m_avAsset.get()];
480
481     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
482     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
483     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
484     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
485 }
486
487 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
488 {
489 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
490     destroyVideoOutput();
491 #endif
492     destroyImageGenerator();
493 }
494
495 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
496 {
497     if (!m_imageGenerator)
498         return;
499
500     INFO_LOG(LOGIDENTIFIER);
501
502     m_imageGenerator = 0;
503 }
504
505 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
506 {
507     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
508         return;
509
510     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
511         if (!weakThis)
512             return;
513
514         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
515             return;
516         m_haveBeenAskedToCreateLayer = true;
517
518         if (!m_videoLayer)
519             createAVPlayerLayer();
520
521 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
522         if (!m_videoOutput)
523             createVideoOutput();
524 #endif
525
526         player()->client().mediaPlayerRenderingModeChanged(player());
527     });
528 }
529
530 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
531 {
532     if (!m_avPlayer)
533         return;
534
535     m_videoLayer = adoptNS([PAL::allocAVPlayerLayerInstance() init]);
536     [m_videoLayer setPlayer:m_avPlayer.get()];
537
538 #ifndef NDEBUG
539     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
540 #endif
541     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
542     updateVideoLayerGravity();
543     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
544     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
545     ALWAYS_LOG(LOGIDENTIFIER);
546
547     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
548
549 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
550     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
551         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
552 #endif
553 }
554
555 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
556 {
557     if (!m_videoLayer)
558         return;
559
560     ALWAYS_LOG(LOGIDENTIFIER);
561
562     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
563     [m_videoLayer setPlayer:nil];
564     m_videoFullscreenLayerManager->didDestroyVideoLayer();
565
566     m_videoLayer = nil;
567 }
568
569 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
570 {
571     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
572     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
573
574     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
575     if (!date)
576         return MediaTime::invalidTime();
577
578     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
579
580     // Rounding due to second offset error when subtracting.
581     return MediaTime::createWithDouble(round(date - currentTime));
582 }
583
584 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
585 {
586     if (currentRenderingMode() == MediaRenderingToLayer)
587         return m_cachedIsReadyForDisplay;
588
589 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
590     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
591         return true;
592 #endif
593
594     return m_videoFrameHasDrawn;
595 }
596
597 #if ENABLE(AVF_CAPTIONS)
598 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
599 {
600     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
601     if (manualSelectionMode)
602         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
603
604     // FIXME: Match these to correct types:
605     if (kind == PlatformTextTrack::Caption)
606         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
607
608     if (kind == PlatformTextTrack::Subtitle)
609         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
610
611     if (kind == PlatformTextTrack::Description)
612         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
613
614     if (kind == PlatformTextTrack::Forced)
615         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
616
617     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
618 }
619     
620 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
621 {
622     trackModeChanged();
623 }
624     
625 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
626 {
627     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
628     
629     for (auto& textTrack : m_textTracks) {
630         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
631             continue;
632         
633         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
634         RetainPtr<AVMediaSelectionOption> currentOption = trackPrivate->mediaSelectionOption();
635         
636         for (auto& track : outOfBandTrackSources) {
637             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
638             
639             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
640                 continue;
641             
642             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
643             if (track->mode() == PlatformTextTrack::Hidden)
644                 mode = InbandTextTrackPrivate::Hidden;
645             else if (track->mode() == PlatformTextTrack::Disabled)
646                 mode = InbandTextTrackPrivate::Disabled;
647             else if (track->mode() == PlatformTextTrack::Showing)
648                 mode = InbandTextTrackPrivate::Showing;
649             
650             textTrack->setMode(mode);
651             break;
652         }
653     }
654 }
655 #endif
656
657
658 static NSURL *canonicalURL(const URL& url)
659 {
660     NSURL *cocoaURL = url;
661     if (url.isEmpty())
662         return cocoaURL;
663
664     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
665     if (!request)
666         return cocoaURL;
667
668     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
669     if (!canonicalRequest)
670         return cocoaURL;
671
672     return [canonicalRequest URL];
673 }
674
675 #if PLATFORM(IOS_FAMILY)
676 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
677 {
678     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
679     [properties setDictionary:@{
680         NSHTTPCookieName: cookie.name,
681         NSHTTPCookieValue: cookie.value,
682         NSHTTPCookieDomain: cookie.domain,
683         NSHTTPCookiePath: cookie.path,
684         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
685     }];
686     if (cookie.secure)
687         [properties setObject:@YES forKey:NSHTTPCookieSecure];
688     if (cookie.session)
689         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
690
691     return [NSHTTPCookie cookieWithProperties:properties.get()];
692 }
693 #endif
694
695 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
696 {
697     if (m_avAsset)
698         return;
699
700     ALWAYS_LOG(LOGIDENTIFIER);
701
702     setDelayCallbacks(true);
703
704     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
705
706     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
707
708     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
709
710     String referrer = player()->referrer();
711     if (!referrer.isEmpty())
712         [headerFields.get() setObject:referrer forKey:@"Referer"];
713
714     String userAgent = player()->userAgent();
715     if (!userAgent.isEmpty())
716         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
717
718     if ([headerFields.get() count])
719         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
720
721     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
722         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
723
724     if (PAL::canLoad_AVFoundation_AVURLAssetUseClientURLLoadingExclusively())
725         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
726 #if PLATFORM(IOS_FAMILY)
727     else if (PAL::canLoad_AVFoundation_AVURLAssetRequiresCustomURLLoadingKey())
728         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
729     // FIXME: rdar://problem/20354688
730     String identifier = player()->sourceApplicationIdentifier();
731     if (!identifier.isEmpty())
732         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
733 #endif
734
735     auto type = player()->contentMIMEType();
736     if (PAL::canLoad_AVFoundation_AVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
737         auto codecs = player()->contentTypeCodecs();
738         if (!codecs.isEmpty()) {
739             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
740             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
741         } else
742             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
743     }
744
745 #if ENABLE(AVF_CAPTIONS)
746     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
747     if (!outOfBandTrackSources.isEmpty()) {
748         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
749         for (auto& trackSource : outOfBandTrackSources) {
750             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
751             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
752             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
753             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
754             [outOfBandTracks.get() addObject:@{
755                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
756                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
757                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
758                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
759                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
760                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
761             }];
762         }
763
764         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
765     }
766 #endif
767
768 #if PLATFORM(IOS_FAMILY)
769     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
770     if (!networkInterfaceName.isEmpty())
771         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
772 #endif
773
774 #if PLATFORM(IOS_FAMILY)
775     Vector<Cookie> cookies;
776     if (player()->getRawCookies(url, cookies)) {
777         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
778         for (auto& cookie : cookies)
779             [nsCookies addObject:toNSHTTPCookie(cookie)];
780
781         if (PAL::canLoad_AVFoundation_AVURLAssetHTTPCookiesKey())
782             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
783     }
784 #endif
785
786     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
787     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
788     
789     if (usePersistentCache)
790         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
791
792     NSURL *cocoaURL = canonicalURL(url);
793     m_avAsset = adoptNS([PAL::allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
794
795 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
796     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
797     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
798
799     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
800         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
801         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
802         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
803         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
804         if (mediaResourceLoader)
805             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
806     }
807
808 #endif
809
810     m_haveCheckedPlayability = false;
811
812     setDelayCallbacks(false);
813 }
814
815 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItem *item)
816 {
817     if (!m_avPlayer)
818         return;
819
820     if (pthread_main_np()) {
821         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
822         return;
823     }
824
825     RetainPtr<AVPlayer> strongPlayer = m_avPlayer.get();
826     RetainPtr<AVPlayerItem> strongItem = item;
827     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
828         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
829     });
830 }
831
832 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
833 {
834     if (m_avPlayer)
835         return;
836
837     ALWAYS_LOG(LOGIDENTIFIER);
838
839     setDelayCallbacks(true);
840
841     m_avPlayer = adoptNS([PAL::allocAVPlayerInstance() init]);
842     for (NSString *keyName in playerKVOProperties())
843         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
844
845     setShouldObserveTimeControlStatus(true);
846
847     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
848
849 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
850     updateDisableExternalPlayback();
851     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
852 #endif
853
854 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS_FAMILY)
855     if (m_shouldPlayToPlaybackTarget) {
856         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
857         m_shouldPlayToPlaybackTarget = false;
858         setShouldPlayToPlaybackTarget(true);
859     }
860 #endif
861
862 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(IOSMAC)
863     setShouldDisableSleep(player()->shouldDisableSleep());
864 #endif
865
866     if (m_muted) {
867         // Clear m_muted so setMuted doesn't return without doing anything.
868         m_muted = false;
869         [m_avPlayer.get() setMuted:m_muted];
870     }
871
872     if (player()->client().mediaPlayerIsVideo())
873         createAVPlayerLayer();
874
875     if (m_avPlayerItem)
876         setAVPlayerItem(m_avPlayerItem.get());
877
878     setDelayCallbacks(false);
879 }
880
881 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
882 {
883     if (m_avPlayerItem)
884         return;
885
886     ALWAYS_LOG(LOGIDENTIFIER);
887
888     setDelayCallbacks(true);
889
890     // Create the player item so we can load media data. 
891     m_avPlayerItem = adoptNS([PAL::allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
892
893     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
894
895     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
896     for (NSString *keyName in itemKVOProperties())
897         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
898
899     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
900
901     if (m_avPlayer)
902         setAVPlayerItem(m_avPlayerItem.get());
903
904     const NSTimeInterval legibleOutputAdvanceInterval = 2;
905
906     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
907     m_legibleOutput = adoptNS([PAL::allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
908     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
909
910     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
911     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
912     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
913     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
914
915 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
916     if (m_provider) {
917         m_provider->setPlayerItem(m_avPlayerItem.get());
918         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
919     }
920 #endif
921
922 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
923     createVideoOutput();
924 #endif
925
926     setDelayCallbacks(false);
927 }
928
929 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
930 {
931     if (m_haveCheckedPlayability)
932         return;
933     m_haveCheckedPlayability = true;
934
935     INFO_LOG(LOGIDENTIFIER);
936     auto weakThis = makeWeakPtr(*this);
937
938     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
939         callOnMainThread([weakThis] {
940             if (weakThis)
941                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
942         });
943     }];
944 }
945
946 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
947 {
948     INFO_LOG(LOGIDENTIFIER);
949
950     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
951     dispatch_group_enter(metadataLoadingGroup.get());
952     auto weakThis = makeWeakPtr(*this);
953     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
954
955         callOnMainThread([weakThis, metadataLoadingGroup] {
956             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
957                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
958                     dispatch_group_enter(metadataLoadingGroup.get());
959                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
960                         dispatch_group_leave(metadataLoadingGroup.get());
961                     }];
962                 }
963             }
964             dispatch_group_leave(metadataLoadingGroup.get());
965         });
966     }];
967
968     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
969         callOnMainThread([weakThis] {
970             if (weakThis)
971                 [weakThis->m_objcObserver.get() metadataLoaded];
972         });
973     });
974 }
975
976 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
977 {
978     if (!m_avPlayerItem)
979         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
980
981     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
982         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
983     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
984         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
985     if (m_cachedLikelyToKeepUp)
986         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
987     if (m_cachedBufferFull)
988         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
989     if (m_cachedBufferEmpty)
990         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
991
992     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
993 }
994
995 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
996 {
997     return m_videoFullscreenLayerManager->videoInlineLayer();
998 }
999
1000 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1001 {
1002 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1003     updateLastImage(UpdateType::UpdateSynchronously);
1004     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1005 #endif
1006 }
1007
1008 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1009 {
1010 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1011     updateLastImage(UpdateType::UpdateSynchronously);
1012     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1013 #else
1014     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1015 #endif
1016     updateDisableExternalPlayback();
1017 }
1018
1019 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1020 {
1021     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1022 }
1023
1024 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1025 {
1026     m_videoFullscreenGravity = gravity;
1027
1028     if (!m_videoLayer)
1029         return;
1030
1031     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1032     if (gravity == MediaPlayer::VideoGravityResize)
1033         videoGravity = AVLayerVideoGravityResize;
1034     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1035         videoGravity = AVLayerVideoGravityResizeAspect;
1036     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1037         videoGravity = AVLayerVideoGravityResizeAspectFill;
1038     else
1039         ASSERT_NOT_REACHED();
1040     
1041     if ([m_videoLayer videoGravity] == videoGravity)
1042         return;
1043
1044     [m_videoLayer setVideoGravity:videoGravity];
1045     syncTextTrackBounds();
1046 }
1047
1048 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1049 {
1050 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1051     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1052         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1053     updateDisableExternalPlayback();
1054 #else
1055     UNUSED_PARAM(mode);
1056 #endif
1057 }
1058     
1059 void MediaPlayerPrivateAVFoundationObjC::videoFullscreenStandbyChanged()
1060 {
1061 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1062     updateDisableExternalPlayback();
1063 #endif
1064 }
1065
1066 #if PLATFORM(IOS_FAMILY)
1067 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1068 {
1069     if (m_currentMetaData)
1070         return m_currentMetaData.get();
1071     return nil;
1072 }
1073
1074 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1075 {
1076     if (!m_avPlayerItem)
1077         return emptyString();
1078     
1079     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1080     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1081
1082     return logString.get();
1083 }
1084
1085 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1086 {
1087     if (!m_avPlayerItem)
1088         return emptyString();
1089
1090     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1091     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1092
1093     return logString.get();
1094 }
1095 #endif
1096
1097 void MediaPlayerPrivateAVFoundationObjC::didEnd()
1098 {
1099     m_requestedPlaying = false;
1100     MediaPlayerPrivateAVFoundation::didEnd();
1101 }
1102
1103 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1104 {
1105     [CATransaction begin];
1106     [CATransaction setDisableActions:YES];    
1107     if (m_videoLayer)
1108         [m_videoLayer.get() setHidden:!isVisible];
1109     [CATransaction commit];
1110 }
1111     
1112 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1113 {
1114     if (!metaDataAvailable())
1115         return;
1116
1117     ALWAYS_LOG(LOGIDENTIFIER);
1118
1119     m_requestedPlaying = true;
1120     setPlayerRate(m_requestedRate);
1121 }
1122
1123 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1124 {
1125     if (!metaDataAvailable())
1126         return;
1127
1128     ALWAYS_LOG(LOGIDENTIFIER);
1129
1130     m_requestedPlaying = false;
1131     setPlayerRate(0);
1132 }
1133
1134 bool MediaPlayerPrivateAVFoundationObjC::platformPaused() const
1135 {
1136     return m_cachedTimeControlStatus == AVPlayerTimeControlStatusPaused;
1137 }
1138
1139 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1140 {
1141     // Do not ask the asset for duration before it has been loaded or it will fetch the
1142     // answer synchronously.
1143     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1144         return MediaTime::invalidTime();
1145     
1146     CMTime cmDuration;
1147     
1148     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1149     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1150         cmDuration = [m_avPlayerItem.get() duration];
1151     else
1152         cmDuration = [m_avAsset.get() duration];
1153
1154     if (CMTIME_IS_NUMERIC(cmDuration))
1155         return PAL::toMediaTime(cmDuration);
1156
1157     if (CMTIME_IS_INDEFINITE(cmDuration))
1158         return MediaTime::positiveInfiniteTime();
1159
1160     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1161     return MediaTime::invalidTime();
1162 }
1163
1164 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1165 {
1166     if (!metaDataAvailable() || !m_avPlayerItem)
1167         return MediaTime::zeroTime();
1168
1169     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1170     if (CMTIME_IS_NUMERIC(itemTime))
1171         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1172
1173     return MediaTime::zeroTime();
1174 }
1175
1176 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1177 {
1178     // setCurrentTime generates several event callbacks, update afterwards.
1179     setDelayCallbacks(true);
1180
1181     if (m_metadataTrack)
1182         m_metadataTrack->flushPartialCues();
1183
1184     CMTime cmTime = PAL::toCMTime(time);
1185     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1186     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1187
1188     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1189     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1190         cmBefore = kCMTimeZero;
1191     
1192     auto weakThis = makeWeakPtr(*this);
1193
1194     setShouldObserveTimeControlStatus(false);
1195     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1196         callOnMainThread([weakThis, finished] {
1197             auto _this = weakThis.get();
1198             if (!_this)
1199                 return;
1200
1201             _this->setShouldObserveTimeControlStatus(true);
1202             _this->seekCompleted(finished);
1203         });
1204     }];
1205
1206     setDelayCallbacks(false);
1207 }
1208
1209 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1210 {
1211 #if PLATFORM(IOS_FAMILY)
1212     UNUSED_PARAM(volume);
1213     return;
1214 #else
1215
1216     if (!m_avPlayer)
1217         return;
1218
1219     ALWAYS_LOG(LOGIDENTIFIER, volume);
1220
1221     [m_avPlayer.get() setVolume:volume];
1222 #endif
1223 }
1224
1225 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1226 {
1227     if (m_muted == muted)
1228         return;
1229
1230     ALWAYS_LOG(LOGIDENTIFIER, muted);
1231
1232     m_muted = muted;
1233
1234     if (!m_avPlayer)
1235         return;
1236
1237     [m_avPlayer.get() setMuted:m_muted];
1238 }
1239
1240 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1241 {
1242     UNUSED_PARAM(closedCaptionsVisible);
1243
1244     if (!metaDataAvailable())
1245         return;
1246
1247     ALWAYS_LOG(LOGIDENTIFIER, closedCaptionsVisible);
1248 }
1249
1250 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1251 {
1252     m_requestedRate = rate;
1253     if (m_requestedPlaying)
1254         setPlayerRate(rate);
1255 }
1256
1257 void MediaPlayerPrivateAVFoundationObjC::setPlayerRate(double rate)
1258 {
1259     setDelayCallbacks(true);
1260     m_cachedRate = rate;
1261     setShouldObserveTimeControlStatus(false);
1262     [m_avPlayer setRate:rate];
1263     m_cachedTimeControlStatus = [m_avPlayer timeControlStatus];
1264     setShouldObserveTimeControlStatus(true);
1265     setDelayCallbacks(false);
1266 }
1267
1268 double MediaPlayerPrivateAVFoundationObjC::rate() const
1269 {
1270     if (!metaDataAvailable())
1271         return 0;
1272
1273     return m_cachedRate;
1274 }
1275
1276 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1277 {
1278 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1279     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1280 #else
1281     return 0;
1282 #endif
1283 }
1284
1285 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1286 {
1287 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1288     return [m_avPlayerItem liveUpdateInterval];
1289 #else
1290     return 0;
1291 #endif
1292 }
1293
1294 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1295 {
1296     if (m_avPlayerItem)
1297         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1298 }
1299
1300 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1301 {
1302     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1303
1304     if (!m_avPlayerItem)
1305         return timeRanges;
1306
1307     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1308         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1309         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1310             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1311     }
1312     return timeRanges;
1313 }
1314
1315 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1316 {
1317     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1318         return MediaTime::zeroTime();
1319
1320     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1321     bool hasValidRange = false;
1322     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1323         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1324         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1325             continue;
1326
1327         hasValidRange = true;
1328         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1329         if (minTimeSeekable > startOfRange)
1330             minTimeSeekable = startOfRange;
1331     }
1332     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1333 }
1334
1335 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1336 {
1337     if (!m_cachedSeekableRanges)
1338         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1339
1340     MediaTime maxTimeSeekable;
1341     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1342         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1343         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1344             continue;
1345         
1346         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1347         if (maxTimeSeekable < endOfRange)
1348             maxTimeSeekable = endOfRange;
1349     }
1350     return maxTimeSeekable;
1351 }
1352
1353 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1354 {
1355     if (!m_cachedLoadedRanges)
1356         return MediaTime::zeroTime();
1357
1358     MediaTime maxTimeLoaded;
1359     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1360         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1361         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1362             continue;
1363         
1364         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1365         if (maxTimeLoaded < endOfRange)
1366             maxTimeLoaded = endOfRange;
1367     }
1368
1369     return maxTimeLoaded;   
1370 }
1371
1372 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1373 {
1374     if (!metaDataAvailable())
1375         return 0;
1376
1377     if (m_cachedTotalBytes)
1378         return m_cachedTotalBytes;
1379
1380     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1381         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1382
1383     return m_cachedTotalBytes;
1384 }
1385
1386 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id>&& asset)
1387 {
1388     m_avAsset = WTFMove(asset);
1389 }
1390
1391 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1392 {
1393     if (!m_avAsset)
1394         return MediaPlayerAVAssetStatusDoesNotExist;
1395
1396     for (NSString *keyName in assetMetadataKeyNames()) {
1397         NSError *error = nil;
1398         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1399
1400         if (error)
1401             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1402
1403         if (keyStatus < AVKeyValueStatusLoaded)
1404             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1405         
1406         if (keyStatus == AVKeyValueStatusFailed)
1407             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1408
1409         if (keyStatus == AVKeyValueStatusCancelled)
1410             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1411     }
1412
1413     if (!player()->shouldCheckHardwareSupport())
1414         m_tracksArePlayable = true;
1415
1416     if (!m_tracksArePlayable) {
1417         m_tracksArePlayable = true;
1418         for (AVAssetTrack *track in [m_avAsset tracks]) {
1419             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1420                 m_tracksArePlayable = false;
1421                 break;
1422             }
1423         }
1424     }
1425
1426     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1427         return MediaPlayerAVAssetStatusPlayable;
1428
1429     return MediaPlayerAVAssetStatusLoaded;
1430 }
1431
1432 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1433 {
1434     if (!m_avAsset)
1435         return 0;
1436
1437     NSError *error = nil;
1438     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1439     return [error code];
1440 }
1441
1442 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1443 {
1444     if (!metaDataAvailable() || context.paintingDisabled())
1445         return;
1446
1447     setDelayCallbacks(true);
1448     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1449
1450 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1451     if (videoOutputHasAvailableFrame())
1452         paintWithVideoOutput(context, rect);
1453     else
1454 #endif
1455         paintWithImageGenerator(context, rect);
1456
1457     END_BLOCK_OBJC_EXCEPTIONS;
1458     setDelayCallbacks(false);
1459
1460     m_videoFrameHasDrawn = true;
1461 }
1462
1463 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1464 {
1465     if (!metaDataAvailable() || context.paintingDisabled())
1466         return;
1467
1468     // We can ignore the request if we are already rendering to a layer.
1469     if (currentRenderingMode() == MediaRenderingToLayer)
1470         return;
1471
1472     // paint() is best effort, so only paint if we already have an image generator or video output available.
1473     if (!hasContextRenderer())
1474         return;
1475
1476     paintCurrentFrameInContext(context, rect);
1477 }
1478
1479 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1480 {
1481     INFO_LOG(LOGIDENTIFIER);
1482
1483     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1484     if (image) {
1485         GraphicsContextStateSaver stateSaver(context);
1486         context.translate(rect.x(), rect.y() + rect.height());
1487         context.scale(FloatSize(1.0f, -1.0f));
1488         context.setImageInterpolationQuality(InterpolationLow);
1489         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1490         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1491     }
1492 }
1493
1494 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1495 {
1496     if (!m_imageGenerator)
1497         createImageGenerator();
1498     ASSERT(m_imageGenerator);
1499
1500 #if !RELEASE_LOG_DISABLED
1501     MonotonicTime start = MonotonicTime::now();
1502 #endif
1503
1504     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1505     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1506     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1507
1508 #if !RELEASE_LOG_DISABLED
1509     INFO_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1510 #endif
1511
1512     return image;
1513 }
1514
1515 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1516 {
1517     supportedTypes = AVAssetMIMETypeCache::singleton().types();
1518 }
1519
1520 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1521 static bool keySystemIsSupported(const String& keySystem)
1522 {
1523     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1524         return true;
1525     return false;
1526 }
1527 #endif
1528
1529 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1530 {
1531 #if ENABLE(MEDIA_SOURCE)
1532     if (parameters.isMediaSource)
1533         return MediaPlayer::IsNotSupported;
1534 #endif
1535 #if ENABLE(MEDIA_STREAM)
1536     if (parameters.isMediaStream)
1537         return MediaPlayer::IsNotSupported;
1538 #endif
1539
1540     auto containerType = parameters.type.containerType();
1541     if (isUnsupportedMIMEType(containerType))
1542         return MediaPlayer::IsNotSupported;
1543
1544     if (!staticMIMETypeList().contains(containerType) && !AVAssetMIMETypeCache::singleton().canDecodeType(containerType))
1545         return MediaPlayer::IsNotSupported;
1546
1547     // The spec says:
1548     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1549     if (parameters.type.codecs().isEmpty())
1550         return MediaPlayer::MayBeSupported;
1551
1552     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1553         return MediaPlayer::IsNotSupported;
1554
1555     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1556     return [PAL::getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1557 }
1558
1559 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1560 {
1561 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1562     if (!keySystem.isEmpty()) {
1563         // "Clear Key" is only supported with HLS:
1564         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1565             return MediaPlayer::IsNotSupported;
1566
1567         if (!keySystemIsSupported(keySystem))
1568             return false;
1569
1570         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1571             return false;
1572
1573         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVAssetMIMETypeCache::singleton().canDecodeType(mimeType))
1574             return false;
1575
1576         return true;
1577     }
1578 #else
1579     UNUSED_PARAM(keySystem);
1580     UNUSED_PARAM(mimeType);
1581 #endif
1582     return false;
1583 }
1584
1585 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1586 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1587 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1588 {
1589     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1590         [infoRequest setContentLength:keyData->byteLength()];
1591         [infoRequest setByteRangeAccessSupported:YES];
1592     }
1593
1594     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1595         long long start = [dataRequest currentOffset];
1596         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1597
1598         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1599             [request finishLoadingWithError:nil];
1600             return;
1601         }
1602
1603         ASSERT(start <= std::numeric_limits<int>::max());
1604         ASSERT(end <= std::numeric_limits<int>::max());
1605         auto requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1606         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1607         [dataRequest respondWithData:nsData.get()];
1608     }
1609
1610     [request finishLoading];
1611 }
1612 #endif
1613
1614 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1615 {
1616     String scheme = [[[avRequest request] URL] scheme];
1617     String keyURI = [[[avRequest request] URL] absoluteString];
1618
1619 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1620     if (scheme == "skd") {
1621 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1622         // Create an initData with the following layout:
1623         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1624         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1625         auto initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1626         unsigned byteLength = initDataBuffer->byteLength();
1627         auto initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1628         initDataView->set<uint32_t>(0, keyURISize, true);
1629
1630         auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1631         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1632
1633         auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1634         if (!player()->keyNeeded(initData.ptr()))
1635             return false;
1636 #endif
1637
1638 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1639         if (m_cdmInstance) {
1640             avRequest.contentInformationRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
1641             [avRequest finishLoading];
1642             return true;
1643         }
1644
1645         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1646         m_keyID = SharedBuffer::create(keyURIData.get());
1647         player()->initializationDataEncountered("skd"_s, m_keyID->tryCreateArrayBuffer());
1648         setWaitingForKey(true);
1649 #endif
1650         m_keyURIToRequestMap.set(keyURI, avRequest);
1651
1652         return true;
1653     }
1654
1655 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1656     if (scheme == "clearkey") {
1657         String keyID = [[[avRequest request] URL] resourceSpecifier];
1658         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1659
1660         auto initData = Uint8Array::create(encodedKeyId.size());
1661         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1662
1663         auto keyData = player()->cachedKeyForKeyId(keyID);
1664         if (keyData) {
1665             fulfillRequestWithKeyData(avRequest, keyData.get());
1666             return false;
1667         }
1668
1669         if (!player()->keyNeeded(initData.ptr()))
1670             return false;
1671
1672         m_keyURIToRequestMap.set(keyID, avRequest);
1673         return true;
1674     }
1675 #endif
1676 #endif
1677
1678     auto resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1679     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader.copyRef());
1680     resourceLoader->startLoading();
1681     return true;
1682 }
1683
1684 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1685 {
1686     String scheme = [[[avRequest request] URL] scheme];
1687
1688     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1689
1690     if (resourceLoader)
1691         resourceLoader->stopLoading();
1692 }
1693
1694 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1695 {
1696     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1697 }
1698 #endif
1699
1700 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1701 {
1702     return PAL::isAVFoundationFrameworkAvailable() && isCoreMediaFrameworkAvailable();
1703 }
1704
1705 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1706 {
1707     if (!metaDataAvailable())
1708         return timeValue;
1709
1710     // FIXME - impossible to implement until rdar://8721510 is fixed.
1711     return timeValue;
1712 }
1713
1714 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1715 {
1716     return 0;
1717 }
1718
1719 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1720 {
1721     if (!m_videoLayer)
1722         return;
1723
1724     // Do not attempt to change the video gravity while in full screen mode.
1725     // See setVideoFullscreenGravity().
1726     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1727         return;
1728
1729     [CATransaction begin];
1730     [CATransaction setDisableActions:YES];    
1731     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1732     [m_videoLayer.get() setVideoGravity:gravity];
1733     [CATransaction commit];
1734 }
1735
1736 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1737 {
1738     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1739         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1740     }];
1741     if (index == NSNotFound)
1742         return nil;
1743     return [tracks objectAtIndex:index];
1744 }
1745
1746 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1747 {
1748     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1749     m_languageOfPrimaryAudioTrack = String();
1750
1751     if (!m_avAsset)
1752         return;
1753
1754     setDelayCharacteristicsChangedNotification(true);
1755
1756     bool haveCCTrack = false;
1757     bool hasCaptions = false;
1758
1759     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1760     // asked about those fairly fequently.
1761     if (!m_avPlayerItem) {
1762         // We don't have a player item yet, so check with the asset because some assets support inspection
1763         // prior to becoming ready to play.
1764         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1765         setHasVideo(firstEnabledVideoTrack);
1766         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1767         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1768         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1769         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1770         if (size.width() < 0)
1771             size.setWidth(-size.width());
1772         if (size.height() < 0)
1773             size.setHeight(-size.height());
1774         presentationSizeDidChange(size);
1775     } else {
1776         bool hasVideo = false;
1777         bool hasAudio = false;
1778         bool hasMetaData = false;
1779         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1780             if ([track isEnabled]) {
1781                 AVAssetTrack *assetTrack = [track assetTrack];
1782                 NSString *mediaType = [assetTrack mediaType];
1783                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1784                     hasVideo = true;
1785                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1786                     hasAudio = true;
1787                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1788                     haveCCTrack = true;
1789                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1790                     hasMetaData = true;
1791                 }
1792             }
1793         }
1794
1795         updateAudioTracks();
1796         updateVideoTracks();
1797
1798         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1799         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1800
1801         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1802         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1803         // when it is not.
1804         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1805
1806         setHasAudio(hasAudio);
1807 #if ENABLE(DATACUE_VALUE)
1808         if (hasMetaData)
1809             processMetadataTrack();
1810 #endif
1811     }
1812
1813     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1814     if (legibleGroup && m_cachedTracks) {
1815         hasCaptions = [[PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1816         if (hasCaptions)
1817             processMediaSelectionOptions();
1818     }
1819
1820     setHasClosedCaptions(hasCaptions);
1821
1822     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1823
1824     sizeChanged();
1825
1826     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1827         characteristicsChanged();
1828
1829 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1830     if (m_provider)
1831         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1832 #endif
1833
1834     setDelayCharacteristicsChangedNotification(false);
1835 }
1836
1837 #if ENABLE(VIDEO_TRACK)
1838
1839 template <typename RefT, typename PassRefT>
1840 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1841 {
1842     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1843         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1844     }]]]);
1845     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1846
1847     for (auto& oldItem : oldItems) {
1848         if (oldItem->playerItemTrack())
1849             [oldTracks addObject:oldItem->playerItemTrack()];
1850     }
1851
1852     // Find the added & removed AVPlayerItemTracks:
1853     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1854     [removedTracks minusSet:newTracks.get()];
1855
1856     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1857     [addedTracks minusSet:oldTracks.get()];
1858
1859     typedef Vector<RefT> ItemVector;
1860     ItemVector replacementItems;
1861     ItemVector addedItems;
1862     ItemVector removedItems;
1863     for (auto& oldItem : oldItems) {
1864         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1865             removedItems.append(oldItem);
1866         else
1867             replacementItems.append(oldItem);
1868     }
1869
1870     for (AVPlayerItemTrack* track in addedTracks.get())
1871         addedItems.append(itemFactory(track));
1872
1873     replacementItems.appendVector(addedItems);
1874     oldItems.swap(replacementItems);
1875
1876     for (auto& removedItem : removedItems)
1877         (player->*removedFunction)(*removedItem);
1878
1879     for (auto& addedItem : addedItems)
1880         (player->*addedFunction)(*addedItem);
1881 }
1882
1883 template <typename RefT, typename PassRefT>
1884 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1885 {
1886     group->updateOptions(characteristics);
1887
1888     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1889     for (auto& option : group->options()) {
1890         if (!option)
1891             continue;
1892         AVMediaSelectionOption* avOption = option->avMediaSelectionOption();
1893         if (!avOption)
1894             continue;
1895         newSelectionOptions.add(option);
1896     }
1897
1898     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1899     for (auto& oldItem : oldItems) {
1900         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1901             oldSelectionOptions.add(option);
1902     }
1903
1904     // Find the added & removed AVMediaSelectionOptions:
1905     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1906     for (auto& oldOption : oldSelectionOptions) {
1907         if (!newSelectionOptions.contains(oldOption))
1908             removedSelectionOptions.add(oldOption);
1909     }
1910
1911     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1912     for (auto& newOption : newSelectionOptions) {
1913         if (!oldSelectionOptions.contains(newOption))
1914             addedSelectionOptions.add(newOption);
1915     }
1916
1917     typedef Vector<RefT> ItemVector;
1918     ItemVector replacementItems;
1919     ItemVector addedItems;
1920     ItemVector removedItems;
1921     for (auto& oldItem : oldItems) {
1922         if (!oldItem->mediaSelectionOption())
1923             removedItems.append(oldItem);
1924         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1925             removedItems.append(oldItem);
1926         else
1927             replacementItems.append(oldItem);
1928     }
1929
1930     for (auto& option : addedSelectionOptions)
1931         addedItems.append(itemFactory(*option.get()));
1932
1933     replacementItems.appendVector(addedItems);
1934     oldItems.swap(replacementItems);
1935     
1936     for (auto& removedItem : removedItems)
1937         (player->*removedFunction)(*removedItem);
1938
1939     for (auto& addedItem : addedItems)
1940         (player->*addedFunction)(*addedItem);
1941 }
1942
1943 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1944 {
1945 #if !RELEASE_LOG_DISABLED
1946     size_t count = m_audioTracks.size();
1947 #endif
1948
1949     Vector<String> characteristics = player()->preferredAudioCharacteristics();
1950     if (!m_audibleGroup) {
1951         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForAudibleMedia())
1952             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
1953     }
1954
1955     if (m_audibleGroup)
1956         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1957     else
1958         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1959
1960     for (auto& track : m_audioTracks)
1961         track->resetPropertiesFromTrack();
1962
1963 #if !RELEASE_LOG_DISABLED
1964     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
1965 #endif
1966 }
1967
1968 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1969 {
1970 #if !RELEASE_LOG_DISABLED
1971     size_t count = m_videoTracks.size();
1972 #endif
1973
1974     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1975
1976     if (!m_visualGroup) {
1977         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForVisualMedia())
1978             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
1979     }
1980
1981     if (m_visualGroup)
1982         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1983
1984     for (auto& track : m_audioTracks)
1985         track->resetPropertiesFromTrack();
1986
1987 #if !RELEASE_LOG_DISABLED
1988     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
1989 #endif
1990 }
1991
1992 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
1993 {
1994     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
1995 }
1996
1997 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
1998 {
1999     m_videoFullscreenLayerManager->syncTextTrackBounds();
2000 }
2001
2002 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2003 {
2004     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2005 }
2006
2007 #endif // ENABLE(VIDEO_TRACK)
2008
2009 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2010
2011 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2012 {
2013     if (!m_provider) {
2014         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2015         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2016     }
2017     return m_provider.get();
2018 }
2019
2020 #endif
2021
2022 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2023 {
2024     if (!m_avAsset)
2025         return;
2026
2027     setNaturalSize(m_cachedPresentationSize);
2028 }
2029
2030 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2031 {
2032     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2033 }
2034
2035 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2036 {
2037     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2038     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2039         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2040         return false;
2041
2042     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2043     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2044         return session.didPassCORSAccessChecks;
2045
2046     return false;
2047 }
2048
2049 Optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2050 {
2051     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2052     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2053         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2054         return false;
2055
2056     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2057     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2058         return [session wouldTaintOrigin:origin];
2059
2060     return WTF::nullopt;
2061 }
2062
2063
2064 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2065
2066 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2067 {
2068     INFO_LOG(LOGIDENTIFIER);
2069
2070     if (!m_avPlayerItem || m_videoOutput)
2071         return;
2072
2073 #if USE(VIDEOTOOLBOX)
2074     NSDictionary* attributes = nil;
2075 #else
2076     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2077 #endif
2078     m_videoOutput = adoptNS([PAL::allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2079     ASSERT(m_videoOutput);
2080
2081     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2082
2083     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2084 }
2085
2086 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2087 {
2088     if (!m_videoOutput)
2089         return;
2090
2091     if (m_avPlayerItem)
2092         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2093
2094     INFO_LOG(LOGIDENTIFIER);
2095
2096     m_videoOutput = 0;
2097 }
2098
2099 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2100 {
2101     if (!m_avPlayerItem)
2102         return false;
2103
2104     if (!m_videoOutput)
2105         createVideoOutput();
2106     ASSERT(m_videoOutput);
2107
2108     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2109
2110     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2111         return false;
2112
2113     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2114     m_lastImage = nullptr;
2115     return true;
2116 }
2117
2118 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2119 {
2120     if (!m_avPlayerItem)
2121         return false;
2122
2123     if (m_lastImage)
2124         return true;
2125
2126     if (!m_videoOutput)
2127         createVideoOutput();
2128
2129     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2130 }
2131
2132 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2133 {
2134 #if HAVE(CORE_VIDEO)
2135     if (!m_avPlayerItem)
2136         return;
2137
2138     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2139         waitForVideoOutputMediaDataWillChange();
2140
2141     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2142     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2143     // should be displayed.
2144     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2145         return;
2146
2147     if (!m_pixelBufferConformer) {
2148 #if USE(VIDEOTOOLBOX)
2149         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2150 #else
2151         NSDictionary *attributes = nil;
2152 #endif
2153         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2154     }
2155
2156 #if !RELEASE_LOG_DISABLED
2157     MonotonicTime start = MonotonicTime::now();
2158 #endif
2159
2160     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2161
2162 #if !RELEASE_LOG_DISABLED
2163     INFO_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2164 #endif
2165 #endif // HAVE(CORE_VIDEO)
2166 }
2167
2168 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2169 {
2170     updateLastImage(UpdateType::UpdateSynchronously);
2171     if (!m_lastImage)
2172         return;
2173
2174     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2175     if (!firstEnabledVideoTrack)
2176         return;
2177
2178     INFO_LOG(LOGIDENTIFIER);
2179
2180     GraphicsContextStateSaver stateSaver(context);
2181     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2182     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2183     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2184
2185     context.concatCTM(videoTransform);
2186     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2187
2188     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2189     // video frame, destroy it now that it is no longer needed.
2190     if (m_imageGenerator)
2191         destroyImageGenerator();
2192
2193 }
2194
2195 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2196 {
2197     ASSERT(context);
2198
2199     updateLastPixelBuffer();
2200     if (!m_lastPixelBuffer)
2201         return false;
2202
2203     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2204     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2205
2206     if (!m_videoTextureCopier)
2207         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2208
2209     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2210 }
2211
2212 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2213 {
2214     updateLastImage();
2215     return m_lastImage;
2216 }
2217
2218 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2219 {
2220     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2221
2222     // Wait for 1 second.
2223     bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2224     if (!satisfied)
2225         ERROR_LOG(LOGIDENTIFIER, "timed out");
2226 }
2227
2228 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput *)
2229 {
2230     m_videoOutputSemaphore.signal();
2231 }
2232
2233 #endif
2234
2235 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2236
2237 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2238 {
2239     return m_keyURIToRequestMap.take(keyURI);
2240 }
2241
2242 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2243 {
2244     Vector<String> fulfilledKeyIds;
2245
2246     for (auto& pair : m_keyURIToRequestMap) {
2247         const String& keyId = pair.key;
2248         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2249
2250         auto keyData = player()->cachedKeyForKeyId(keyId);
2251         if (!keyData)
2252             continue;
2253
2254         fulfillRequestWithKeyData(request.get(), keyData.get());
2255         fulfilledKeyIds.append(keyId);
2256     }
2257
2258     for (auto& keyId : fulfilledKeyIds)
2259         m_keyURIToRequestMap.remove(keyId);
2260 }
2261
2262 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2263 {
2264     ASSERT_UNUSED(session, &session == m_session);
2265     m_session = nullptr;
2266 }
2267
2268 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2269 {
2270     if (!keySystemIsSupported(keySystem))
2271         return nullptr;
2272     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2273     m_session = makeWeakPtr(*session);
2274     return WTFMove(session);
2275 }
2276 #endif
2277
2278 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2279 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2280 {
2281 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2282     if (m_session && newValue)
2283         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2284 #endif
2285
2286 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2287     if (m_cdmInstance)
2288         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2289 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2290     UNUSED_PARAM(newValue);
2291 #endif
2292 }
2293 #endif
2294
2295 #if ENABLE(ENCRYPTED_MEDIA)
2296 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2297 {
2298 #if HAVE(AVCONTENTKEYSESSION)
2299     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2300         return;
2301
2302     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2303     if (&fpsInstance == m_cdmInstance)
2304         return;
2305
2306     if (m_cdmInstance)
2307         cdmInstanceDetached(*m_cdmInstance);
2308
2309     m_cdmInstance = &fpsInstance;
2310 #else
2311     UNUSED_PARAM(instance);
2312 #endif
2313 }
2314
2315 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2316 {
2317 #if HAVE(AVCONTENTKEYSESSION)
2318     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2319     m_cdmInstance = nullptr;
2320 #else
2321     UNUSED_PARAM(instance);
2322 #endif
2323 }
2324
2325 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2326 {
2327 #if HAVE(AVCONTENTKEYSESSION)
2328     if (!m_keyID || !m_cdmInstance)
2329         return;
2330
2331     auto instanceSession = m_cdmInstance->sessionForKeyIDs(Vector<Ref<SharedBuffer>>::from(*m_keyID));
2332     if (!instanceSession)
2333         return;
2334
2335     [instanceSession->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2336
2337     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2338     for (auto& request : keyURIToRequestMap.values()) {
2339         if (auto *infoRequest = request.get().contentInformationRequest)
2340             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2341         [request finishLoading];
2342     }
2343     setWaitingForKey(false);
2344 #endif
2345 }
2346
2347 void MediaPlayerPrivateAVFoundationObjC::setWaitingForKey(bool waitingForKey)
2348 {
2349     if (m_waitingForKey == waitingForKey)
2350         return;
2351
2352     m_waitingForKey = waitingForKey;
2353     player()->waitingForKeyChanged();
2354 }
2355 #endif
2356
2357 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2358 {
2359     if (!m_avAsset)
2360         return nil;
2361
2362     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2363         return nil;
2364
2365     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2366 }
2367
2368 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2369 {
2370     if (!m_avAsset)
2371         return false;
2372
2373     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2374         return false;
2375
2376     return true;
2377 }
2378
2379 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2380 {
2381     if (!hasLoadedMediaSelectionGroups())
2382         return nil;
2383
2384     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2385 }
2386
2387 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2388 {
2389     if (!hasLoadedMediaSelectionGroups())
2390         return nil;
2391
2392     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2393 }
2394
2395 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2396 {
2397     if (!hasLoadedMediaSelectionGroups())
2398         return nil;
2399
2400     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2401 }
2402
2403 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2404 {
2405     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2406     if (!legibleGroup) {
2407         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2408         return;
2409     }
2410
2411     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2412     // but set the selected legible track to nil so text tracks will not be automatically configured.
2413     if (!m_textTracks.size())
2414         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2415
2416     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2417     NSArray *legibleOptions = [PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2418     for (AVMediaSelectionOption *option in legibleOptions) {
2419         bool newTrack = true;
2420         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2421             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2422                 continue;
2423             
2424             RetainPtr<AVMediaSelectionOption> currentOption;
2425 #if ENABLE(AVF_CAPTIONS)
2426             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2427                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2428                 currentOption = track->mediaSelectionOption();
2429             } else
2430 #endif
2431             {
2432                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2433                 currentOption = track->mediaSelectionOption();
2434             }
2435             
2436             if ([currentOption.get() isEqual:option]) {
2437                 removedTextTracks.remove(i - 1);
2438                 newTrack = false;
2439                 break;
2440             }
2441         }
2442         if (!newTrack)
2443             continue;
2444
2445 #if ENABLE(AVF_CAPTIONS)
2446         if ([option outOfBandSource]) {
2447             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2448             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2449             continue;
2450         }
2451 #endif
2452
2453         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2454     }
2455
2456     processNewAndRemovedTextTracks(removedTextTracks);
2457 }
2458
2459 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2460 {
2461     if (m_metadataTrack)
2462         return;
2463
2464     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2465     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2466     player()->addTextTrack(*m_metadataTrack);
2467 }
2468
2469 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2470 {
2471     ASSERT(time >= MediaTime::zeroTime());
2472
2473     if (!m_currentTextTrack)
2474         return;
2475
2476     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2477 }
2478
2479 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2480 {
2481     INFO_LOG(LOGIDENTIFIER);
2482
2483     if (!m_currentTextTrack)
2484         return;
2485     
2486     m_currentTextTrack->resetCueValues();
2487 }
2488
2489 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2490 {
2491     if (m_currentTextTrack == track)
2492         return;
2493
2494     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2495
2496     m_currentTextTrack = track;
2497
2498     if (track) {
2499         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2500             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2501             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2502             ALLOW_DEPRECATED_DECLARATIONS_END
2503 #if ENABLE(AVF_CAPTIONS)
2504         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2505             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2506 #endif
2507         else
2508             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2509     } else {
2510         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2511         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2512         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2513         ALLOW_DEPRECATED_DECLARATIONS_END
2514     }
2515
2516 }
2517
2518 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2519 {
2520     if (!m_languageOfPrimaryAudioTrack.isNull())
2521         return m_languageOfPrimaryAudioTrack;
2522
2523     if (!m_avPlayerItem.get())
2524         return emptyString();
2525
2526     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2527     AVMediaSelectionGroup *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2528     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2529     AVMediaSelectionOption *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2530     ALLOW_DEPRECATED_DECLARATIONS_END
2531     if (currentlySelectedAudibleOption) {
2532         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2533         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2534
2535         return m_languageOfPrimaryAudioTrack;
2536     }
2537
2538     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2539     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2540     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2541     if (!tracks || [tracks count] != 1) {
2542         m_languageOfPrimaryAudioTrack = emptyString();
2543         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2544         return m_languageOfPrimaryAudioTrack;
2545     }
2546
2547     AVAssetTrack *track = [tracks objectAtIndex:0];
2548     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2549
2550     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2551
2552     return m_languageOfPrimaryAudioTrack;
2553 }
2554
2555 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2556 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2557 {
2558     bool wirelessTarget = false;
2559
2560 #if !PLATFORM(IOS_FAMILY)
2561     if (m_playbackTarget) {
2562         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2563             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2564         else
2565             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2566     }
2567 #else
2568     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2569 #endif
2570
2571     INFO_LOG(LOGIDENTIFIER, wirelessTarget);
2572
2573     return wirelessTarget;
2574 }
2575
2576 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2577 {
2578     if (!m_avPlayer)
2579         return MediaPlayer::TargetTypeNone;
2580
2581 #if PLATFORM(IOS_FAMILY)
2582     if (!PAL::isAVFoundationFrameworkAvailable())
2583         return MediaPlayer::TargetTypeNone;
2584
2585     switch ([m_avPlayer externalPlaybackType]) {
2586     case AVPlayerExternalPlaybackTypeNone:
2587         return MediaPlayer::TargetTypeNone;
2588     case AVPlayerExternalPlaybackTypeAirPlay:
2589         return MediaPlayer::TargetTypeAirPlay;
2590     case AVPlayerExternalPlaybackTypeTVOut:
2591         return MediaPlayer::TargetTypeTVOut;
2592     }
2593
2594     ASSERT_NOT_REACHED();
2595     return MediaPlayer::TargetTypeNone;
2596
2597 #else
2598     return MediaPlayer::TargetTypeAirPlay;
2599 #endif
2600 }
2601     
2602 #if PLATFORM(IOS_FAMILY)
2603 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayer *player)
2604 {
2605 #if HAVE(CELESTIAL)
2606     if (!PAL::isAVFoundationFrameworkAvailable())
2607         return nil;
2608
2609     if ([PAL::getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2610         AVOutputContext *outputContext = [PAL::getAVOutputContextClass() sharedAudioPresentationOutputContext];
2611
2612         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2613             || ![outputContext supportsMultipleOutputDevices]
2614             || ![outputContext respondsToSelector:@selector(outputDevices)])
2615             return [outputContext deviceName];
2616
2617         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2618         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2619 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2620             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2621 ALLOW_DEPRECATED_DECLARATIONS_END
2622             [outputDeviceNames addObject:outputDeviceName.get()];
2623         }
2624
2625         return [outputDeviceNames componentsJoinedByString:@" + "];
2626     }
2627
2628     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2629         return nil;
2630
2631     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2632     if (!pickableRoutes.count)
2633         return nil;
2634
2635     NSString *displayName = nil;
2636     for (NSDictionary *pickableRoute in pickableRoutes) {
2637         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2638             continue;
2639
2640         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2641
2642         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2643         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2644             break;
2645
2646         // The route is a speaker or HDMI out, override the name to be the localized device model.
2647         NSString *localizedDeviceModel = [[PAL::getUIDeviceClass() currentDevice] localizedModel];
2648
2649         // In cases where a route with that name already exists, prefix the name with the model.
2650         BOOL includeLocalizedDeviceModelName = NO;
2651         for (NSDictionary *otherRoute in pickableRoutes) {
2652             if (otherRoute == pickableRoute)
2653                 continue;
2654
2655             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2656                 includeLocalizedDeviceModelName = YES;
2657                 break;
2658             }
2659         }
2660
2661         if (includeLocalizedDeviceModelName)
2662             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2663         else
2664             displayName = localizedDeviceModel;
2665
2666         break;
2667     }
2668
2669     return displayName;
2670 #else
2671     UNUSED_PARAM(player);
2672     return nil;
2673 #endif
2674 }
2675 #endif
2676
2677 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2678 {
2679     if (!m_avPlayer)
2680         return emptyString();
2681
2682     String wirelessTargetName;
2683 #if !PLATFORM(IOS_FAMILY)
2684     if (m_playbackTarget)
2685         wirelessTargetName = m_playbackTarget->deviceName();
2686 #else
2687     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2688 #endif
2689
2690     return wirelessTargetName;
2691 }
2692
2693 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2694 {
2695     if (!m_avPlayer)
2696         return !m_allowsWirelessVideoPlayback;
2697
2698     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2699     INFO_LOG(LOGIDENTIFIER, !m_allowsWirelessVideoPlayback);
2700
2701     return !m_allowsWirelessVideoPlayback;
2702 }
2703
2704 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2705 {
2706     INFO_LOG(LOGIDENTIFIER, disabled);
2707     m_allowsWirelessVideoPlayback = !disabled;
2708     if (!m_avPlayer)
2709         return;
2710
2711     setDelayCallbacks(true);
2712     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2713     setDelayCallbacks(false);
2714 }
2715
2716 #if !PLATFORM(IOS_FAMILY)
2717
2718 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2719 {
2720     m_playbackTarget = WTFMove(target);
2721
2722     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2723
2724     INFO_LOG(LOGIDENTIFIER);
2725
2726     if (!m_playbackTarget->hasActiveRoute())
2727         setShouldPlayToPlaybackTarget(false);
2728 }
2729
2730 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2731 {
2732     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2733         return;
2734
2735     m_shouldPlayToPlaybackTarget = shouldPlay;
2736
2737     if (!m_playbackTarget)
2738         return;
2739
2740     INFO_LOG(LOGIDENTIFIER, shouldPlay);
2741
2742     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2743         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2744
2745         if (!m_avPlayer)
2746             return;
2747
2748         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2749         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2750             return;
2751
2752         setDelayCallbacks(true);
2753         m_avPlayer.get().outputContext = newContext;
2754         setDelayCallbacks(false);
2755
2756         return;
2757     }
2758
2759     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2760
2761     setDelayCallbacks(true);
2762     auto weakThis = makeWeakPtr(*this);
2763     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2764         if (!weakThis)
2765             return;
2766         weakThis->playbackTargetIsWirelessDidChange();
2767     }));
2768     setDelayCallbacks(false);
2769 }
2770
2771 #endif // !PLATFORM(IOS_FAMILY)
2772
2773 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2774 {
2775 #if PLATFORM(IOS_FAMILY)
2776     if (!m_avPlayer)
2777         return;
2778
2779     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
2780         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:(player()->fullscreenMode() == MediaPlayer::VideoFullscreenModeStandard) || player()->isVideoFullscreenStandby()];
2781 #endif
2782 }
2783
2784 #endif
2785
2786 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2787 {
2788     m_cachedItemStatus = status;
2789
2790     updateStates();
2791 }
2792
2793 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2794 {
2795     m_pendingStatusChanges++;
2796 }
2797
2798 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2799 {
2800     m_cachedLikelyToKeepUp = likelyToKeepUp;
2801
2802     ASSERT(m_pendingStatusChanges);
2803     if (!--m_pendingStatusChanges)
2804         updateStates();
2805 }
2806
2807 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2808 {
2809     m_pendingStatusChanges++;
2810 }
2811
2812 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2813 {
2814     m_cachedBufferEmpty = bufferEmpty;
2815
2816     ASSERT(m_pendingStatusChanges);
2817     if (!--m_pendingStatusChanges)
2818         updateStates();
2819 }
2820
2821 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2822 {
2823     m_pendingStatusChanges++;
2824 }
2825
2826 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2827 {
2828     m_cachedBufferFull = bufferFull;
2829
2830     ASSERT(m_pendingStatusChanges);
2831     if (!--m_pendingStatusChanges)
2832         updateStates();
2833 }
2834
2835 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray>&& seekableRanges)
2836 {
2837     m_cachedSeekableRanges = WTFMove(seekableRanges);
2838
2839     seekableTimeRangesChanged();
2840     updateStates();
2841 }
2842
2843 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray>&& loadedRanges)
2844 {
2845     m_cachedLoadedRanges = WTFMove(loadedRanges);
2846
2847     loadedTimeRangesChanged();
2848     updateStates();
2849 }
2850
2851 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2852 {
2853     m_cachedIsReadyForDisplay = isReady;
2854     if (!hasVideo() && isReady)
2855         tracksChanged();
2856     updateStates();
2857 }
2858
2859 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2860 {
2861     tracksChanged();
2862     updateStates();
2863 }
2864
2865 void MediaPlayerPrivateAVFoundationObjC::setBufferingPolicy(MediaPlayer::BufferingPolicy policy)
2866 {
2867     ALWAYS_LOG(LOGIDENTIFIER, policy);
2868
2869     if (m_bufferingPolicy == policy)
2870         return;
2871
2872     m_bufferingPolicy = policy;
2873     
2874     if (!m_avPlayer)
2875         return;
2876
2877 #if HAVE(AVPLAYER_RESOURCE_CONSERVATION_LEVEL)
2878     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::Default) == AVPlayerResourceConservationLevelNone, "MediaPlayer::BufferingPolicy::Default is not AVPlayerResourceConservationLevelNone as expected");
2879     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::LimitReadAhead) == AVPlayerResourceConservationLevelReduceReadAhead, "MediaPlayer::BufferingPolicy::LimitReadAhead is not AVPlayerResourceConservationLevelReduceReadAhead as expected");
2880     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::MakeResourcesPurgeable) == AVPlayerResourceConservationLevelReuseActivePlayerResources, "MediaPlayer::BufferingPolicy::MakeResourcesPurgeable is not AVPlayerResourceConservationLevelReuseActivePlayerResources as expected");
2881     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::PurgeResources) == AVPlayerResourceConservationLevelRecycleBuffer, "MediaPlayer::BufferingPolicy::PurgeResources is not AVPlayerResourceConservationLevelRecycleBuffer as expected");
2882
2883     if ([m_avPlayer respondsToSelector:@selector(setResourceConservationLevelWhilePaused:)]) {
2884         m_avPlayer.get().resourceConservationLevelWhilePaused = static_cast<AVPlayerResourceConservationLevel>(policy);
2885         updateStates();
2886         return;
2887     }
2888 #endif
2889
2890     switch (policy) {
2891     case MediaPlayer::BufferingPolicy::Default:
2892         setAVPlayerItem(m_avPlayerItem.get());
2893         break;
2894     case MediaPlayer::BufferingPolicy::LimitReadAhead:
2895     case MediaPlayer::BufferingPolicy::MakeResourcesPurgeable:
2896         setAVPlayerItem(nil);
2897         break;
2898     case MediaPlayer::BufferingPolicy::PurgeResources:
2899         setAVPlayerItem(nil);
2900         setAVPlayerItem(m_avPlayerItem.get());
2901         break;
2902     }
2903
2904     updateStates();
2905 }
2906
2907 #if ENABLE(DATACUE_VALUE)
2908
2909 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2910 {
2911     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2912     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2913     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2914     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2915     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2916
2917     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2918         return quickTimeUserData;
2919     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2920         return isoUserData;
2921     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2922         return quickTimeMetadata;
2923     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2924         return iTunesMetadata;
2925     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2926         return id3Metadata;
2927
2928     return emptyAtom();
2929 }
2930
2931 #endif
2932
2933 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(const RetainPtr<NSArray>& metadata, const MediaTime& mediaTime)
2934 {
2935     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2936
2937     INFO_LOG(LOGIDENTIFIER, "adding ", m_currentMetaData ? [m_currentMetaData.get() count] : 0, " at time ", mediaTime);
2938
2939 #if ENABLE(DATACUE_VALUE)
2940     if (seeking())
2941         return;
2942
2943     if (!m_metadataTrack)
2944         processMetadataTrack();
2945
2946     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2947         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2948         return;
2949     }
2950
2951     // Set the duration of all incomplete cues before adding new ones.
2952     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2953     for (AVMetadataItem *item in m_currentMetaData.get()) {
2954         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
2955         if (start < earliestStartTime)
2956             earliestStartTime = start;
2957     }
2958     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2959
2960     for (AVMetadataItem *item in m_currentMetaData.get()) {
2961         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
2962         MediaTime end = MediaTime::positiveInfiniteTime();
2963         if (CMTIME_IS_VALID(item.duration))
2964             end = start + PAL::toMediaTime(item.duration);
2965
2966         AtomicString type = nullAtom();
2967         if (item.keySpace)
2968             type = metadataType(item.keySpace);
2969
2970         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2971     }
2972 #endif
2973 }
2974
2975 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(const RetainPtr<NSArray>& tracks)
2976 {
2977     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2978         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2979
2980     NSArray *assetTracks = [m_avAsset tracks];
2981
2982     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2983         AVAssetTrack* assetTrack = [obj assetTrack];
2984
2985         if ([assetTracks containsObject:assetTrack])
2986             return YES;
2987
2988         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2989         if (!hasLoadedMediaSelectionGroups())
2990             return NO;
2991
2992         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2993             return NO;
2994
2995         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2996             return NO;
2997
2998         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2999             return NO;
3000
3001         return YES;
3002     }]];
3003
3004     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3005         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3006
3007     m_cachedTotalBytes = 0;
3008
3009     tracksChanged();
3010     updateStates();
3011 }
3012
3013 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3014 {
3015     m_cachedHasEnabledAudio = hasEnabledAudio;
3016
3017     tracksChanged();
3018     updateStates();
3019 }
3020
3021 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3022 {
3023     m_cachedPresentationSize = size;
3024
3025     sizeChanged();
3026     updateStates();
3027 }
3028
3029 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3030 {
3031     m_cachedDuration = duration;
3032
3033     invalidateCachedDuration();
3034 }
3035
3036 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3037 {
3038     m_cachedRate = rate;
3039
3040     updateStates();
3041     rateChanged();
3042 }
3043
3044 void MediaPlayerPrivateAVFoundationObjC::timeControlStatusDidChange(int timeControlStatus)
3045 {
3046     if (m_cachedTimeControlStatus == timeControlStatus)
3047         return;
3048
3049     if (!m_shouldObserveTimeControlStatus)
3050         return;
3051
3052     m_cachedTimeControlStatus = timeControlStatus;
3053
3054 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3055     if (!isCurrentPlaybackTargetWireless())
3056         return;
3057
3058     bool playerIsPlaying = m_cachedTimeControlStatus != AVPlayerTimeControlStatusPaused;
3059     if (playerIsPlaying != m_requestedPlaying) {
3060         m_requestedPlaying = playerIsPlaying;
3061         player()->playbackStateChanged();
3062     }
3063 #endif
3064 }