4e5b82372e9b758c26d4a3e5d5801cfa7a1840bc
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "VideoFullscreenLayerManagerObjC.h"
63 #import "VideoTextureCopierCV.h"
64 #import "VideoTrackPrivateAVFObjC.h"
65 #import "WebCoreAVFResourceLoader.h"
66 #import "WebCoreCALayerExtras.h"
67 #import "WebCoreNSURLSession.h"
68 #import <JavaScriptCore/DataView.h>
69 #import <JavaScriptCore/JSCInlines.h>
70 #import <JavaScriptCore/TypedArrayInlines.h>
71 #import <JavaScriptCore/Uint16Array.h>
72 #import <JavaScriptCore/Uint32Array.h>
73 #import <JavaScriptCore/Uint8Array.h>
74 #import <functional>
75 #import <objc/runtime.h>
76 #import <pal/avfoundation/MediaTimeAVFoundation.h>
77 #import <pal/spi/cocoa/QuartzCoreSPI.h>
78 #import <pal/spi/mac/AVFoundationSPI.h>
79 #import <wtf/BlockObjCExceptions.h>
80 #import <wtf/ListHashSet.h>
81 #import <wtf/NeverDestroyed.h>
82 #import <wtf/OSObjectPtr.h>
83 #import <wtf/URL.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS_FAMILY)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #import <pal/ios/UIKitSoftLink.h>
107 #else
108 #import <Foundation/NSGeometry.h>
109 #import <QuartzCore/CoreImage.h>
110 #endif
111
112 #if USE(VIDEOTOOLBOX)
113 #import <CoreVideo/CoreVideo.h>
114 #import <VideoToolbox/VideoToolbox.h>
115 #endif
116
117 #import "CoreVideoSoftLink.h"
118 #import "MediaRemoteSoftLink.h"
119
120 namespace std {
121 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
122     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
123 };
124 }
125
126 #if ENABLE(AVF_CAPTIONS)
127 // Note: This must be defined before our SOFT_LINK macros:
128 @class AVMediaSelectionOption;
129 @interface AVMediaSelectionOption (OutOfBandExtensions)
130 @property (nonatomic, readonly) NSString* outOfBandSource;
131 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
132 @end
133 #endif
134
135 @interface AVURLAsset (WebKitExtensions)
136 @property (nonatomic, readonly) NSURL *resolvedURL;
137 @end
138
139 #import <pal/cf/CoreMediaSoftLink.h>
140 #import <pal/cocoa/AVFoundationSoftLink.h>
141
142 SOFT_LINK_FRAMEWORK(MediaToolbox)
143 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
144
145 #if PLATFORM(IOS_FAMILY)
146
147 #if HAVE(CELESTIAL)
148 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
149 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
150 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
151 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
152 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
153 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
154 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
155 #endif // HAVE(CELESTIAL)
156
157 #endif // PLATFORM(IOS_FAMILY)
158
159 using namespace WebCore;
160
161 enum MediaPlayerAVFoundationObservationContext {
162     MediaPlayerAVFoundationObservationContextPlayerItem,
163     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
164     MediaPlayerAVFoundationObservationContextPlayer,
165     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
166 };
167
168 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
169 {
170     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
171     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
172     int m_delayCallbacks;
173 }
174 -(id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)callback;
175 -(void)disconnect;
176 -(void)metadataLoaded;
177 -(void)didEnd:(NSNotification *)notification;
178 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
179 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
180 - (void)outputSequenceWasFlushed:(id)output;
181 @end
182
183 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
184 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
185     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
186     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
187 }
188 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
189 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
190 @end
191 #endif
192
193 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
194 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
195     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
196 }
197 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
198 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
199 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
200 @end
201 #endif
202
203 namespace WebCore {
204 using namespace PAL;
205
206 static NSArray *assetMetadataKeyNames();
207 static NSArray *itemKVOProperties();
208 static NSArray *assetTrackMetadataKeyNames();
209 static NSArray *playerKVOProperties();
210 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
211
212 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
213 static dispatch_queue_t globalLoaderDelegateQueue()
214 {
215     static dispatch_queue_t globalQueue;
216     static dispatch_once_t onceToken;
217     dispatch_once(&onceToken, ^{
218         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
219     });
220     return globalQueue;
221 }
222 #endif
223
224 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
225 static dispatch_queue_t globalPullDelegateQueue()
226 {
227     static dispatch_queue_t globalQueue;
228     static dispatch_once_t onceToken;
229     dispatch_once(&onceToken, ^{
230         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
231     });
232     return globalQueue;
233 }
234 #endif
235
236 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
237 {
238     if (!isAvailable())
239         return;
240
241     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
242             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
243     ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
244 }
245
246 static AVAssetCache *assetCacheForPath(const String& path)
247 {
248     NSURL *assetCacheURL;
249     
250     if (path.isEmpty())
251         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
252     else
253         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
254
255     return [PAL::getAVAssetCacheClass() assetCacheWithURL:assetCacheURL];
256 }
257
258 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
259 {
260     HashSet<RefPtr<SecurityOrigin>> origins;
261     for (NSString *key in [assetCacheForPath(path) allKeys]) {
262         URL keyAsURL = URL(URL(), key);
263         if (keyAsURL.isValid())
264             origins.add(SecurityOrigin::create(keyAsURL));
265     }
266     return origins;
267 }
268
269 static WallTime toSystemClockTime(NSDate *date)
270 {
271     ASSERT(date);
272     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
273 }
274
275 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
276 {
277     AVAssetCache* assetCache = assetCacheForPath(path);
278     
279     for (NSString *key in [assetCache allKeys]) {
280         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
281             [assetCache removeEntryForKey:key];
282     }
283
284     NSFileManager *fileManager = [NSFileManager defaultManager];
285     NSURL *baseURL = [assetCache URL];
286
287     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
288         [fileManager removeItemAtURL:baseURL error:nil];
289         return;
290     }
291     
292     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
293     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
294         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
295         errorHandler:nil];
296     
297     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
298     for (NSURL *fileURL : enumerator) {
299         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
300     
301         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
302             continue;
303         
304         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
305             continue;
306         
307         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
308             continue;
309         
310         [urlsToDelete addObject:fileURL];
311     }
312     
313     for (NSURL *fileURL in urlsToDelete.get())
314         [fileManager removeItemAtURL:fileURL error:nil];
315 }
316
317 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
318 {
319     AVAssetCache* assetCache = assetCacheForPath(path);
320     for (NSString *key in [assetCache allKeys]) {
321         URL keyAsURL = URL(URL(), key);
322         if (keyAsURL.isValid()) {
323             if (origins.contains(SecurityOrigin::create(keyAsURL)))
324                 [assetCache removeEntryForKey:key];
325         }
326     }
327 }
328
329 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
330     : MediaPlayerPrivateAVFoundation(player)
331     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
332     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
333     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
334     , m_videoFrameHasDrawn(false)
335     , m_haveCheckedPlayability(false)
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
338 #endif
339 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
340     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithPlayer:m_weakPtrFactory.createWeakPtr(*this)]))
341 #endif
342     , m_currentTextTrack(0)
343     , m_cachedRate(0)
344     , m_cachedTotalBytes(0)
345     , m_pendingStatusChanges(0)
346     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
347     , m_cachedLikelyToKeepUp(false)
348     , m_cachedBufferEmpty(false)
349     , m_cachedBufferFull(false)
350     , m_cachedHasEnabledAudio(false)
351     , m_shouldBufferData(true)
352     , m_cachedIsReadyForDisplay(false)
353     , m_haveBeenAskedToCreateLayer(false)
354 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
355     , m_allowsWirelessVideoPlayback(true)
356 #endif
357 {
358 }
359
360 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
361 {
362     m_weakPtrFactory.revokeAll();
363
364 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
365     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
366
367     for (auto& pair : m_resourceLoaderMap)
368         pair.value->invalidate();
369 #endif
370 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
371     [m_videoOutput setDelegate:nil queue:0];
372 #endif
373
374     if (m_videoLayer)
375         destroyVideoLayer();
376
377     cancelLoad();
378 }
379
380 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
381 {
382     INFO_LOG(LOGIDENTIFIER);
383     tearDownVideoRendering();
384
385     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
386     [m_objcObserver.get() disconnect];
387
388     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
389     setIgnoreLoadStateChanges(true);
390     if (m_avAsset) {
391         [m_avAsset.get() cancelLoading];
392         m_avAsset = nil;
393     }
394
395     clearTextTracks();
396
397     if (m_legibleOutput) {
398         if (m_avPlayerItem)
399             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
400         m_legibleOutput = nil;
401     }
402
403     if (m_avPlayerItem) {
404         for (NSString *keyName in itemKVOProperties())
405             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
406         
407         m_avPlayerItem = nil;
408     }
409     if (m_avPlayer) {
410         if (m_timeObserver)
411             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
412         m_timeObserver = nil;
413
414         for (NSString *keyName in playerKVOProperties())
415             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
416
417         setShouldObserveTimeControlStatus(false);
418
419         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
420 #if !PLATFORM(IOS_FAMILY)
421         [m_avPlayer setOutputContext:nil];
422 #endif
423         m_avPlayer = nil;
424     }
425
426     // Reset cached properties
427     m_pendingStatusChanges = 0;
428     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
429     m_cachedSeekableRanges = nullptr;
430     m_cachedLoadedRanges = nullptr;
431     m_cachedHasEnabledAudio = false;
432     m_cachedPresentationSize = FloatSize();
433     m_cachedDuration = MediaTime::zeroTime();
434
435     for (AVPlayerItemTrack *track in m_cachedTracks.get())
436         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
437     m_cachedTracks = nullptr;
438
439 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
440     if (m_provider) {
441         m_provider->setPlayerItem(nullptr);
442         m_provider->setAudioTrack(nullptr);
443     }
444 #endif
445
446     setIgnoreLoadStateChanges(false);
447 }
448
449 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
450 {
451     return m_haveBeenAskedToCreateLayer;
452 }
453
454 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
455 {
456 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
457     if (m_videoOutput)
458         return true;
459 #endif
460     return m_imageGenerator;
461 }
462
463 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
464 {
465 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
466     createVideoOutput();
467 #else
468     createImageGenerator();
469 #endif
470 }
471
472 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
473 {
474     using namespace PAL;
475     INFO_LOG(LOGIDENTIFIER);
476
477     if (!m_avAsset || m_imageGenerator)
478         return;
479
480     m_imageGenerator = [PAL::getAVAssetImageGeneratorClass() assetImageGeneratorWithAsset:m_avAsset.get()];
481
482     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
483     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
484     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
485     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
486 }
487
488 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
489 {
490 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
491     destroyVideoOutput();
492 #endif
493     destroyImageGenerator();
494 }
495
496 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
497 {
498     if (!m_imageGenerator)
499         return;
500
501     INFO_LOG(LOGIDENTIFIER);
502
503     m_imageGenerator = 0;
504 }
505
506 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
507 {
508     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
509         return;
510
511     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
512         if (!weakThis)
513             return;
514
515         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
516             return;
517         m_haveBeenAskedToCreateLayer = true;
518
519         if (!m_videoLayer)
520             createAVPlayerLayer();
521
522 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
523         if (!m_videoOutput)
524             createVideoOutput();
525 #endif
526
527         player()->client().mediaPlayerRenderingModeChanged(player());
528     });
529 }
530
531 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
532 {
533     if (!m_avPlayer)
534         return;
535
536     m_videoLayer = adoptNS([PAL::allocAVPlayerLayerInstance() init]);
537     [m_videoLayer setPlayer:m_avPlayer.get()];
538
539 #ifndef NDEBUG
540     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
541 #endif
542     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
543     updateVideoLayerGravity();
544     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
545     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
546     INFO_LOG(LOGIDENTIFIER);
547
548     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
549
550 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
551     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
552         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
553 #endif
554 }
555
556 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
557 {
558     if (!m_videoLayer)
559         return;
560
561     INFO_LOG(LOGIDENTIFIER);
562
563     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
564     [m_videoLayer setPlayer:nil];
565     m_videoFullscreenLayerManager->didDestroyVideoLayer();
566
567     m_videoLayer = nil;
568 }
569
570 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
571 {
572     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
573     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
574
575     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
576     if (!date)
577         return MediaTime::invalidTime();
578
579     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
580
581     // Rounding due to second offset error when subtracting.
582     return MediaTime::createWithDouble(round(date - currentTime));
583 }
584
585 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
586 {
587     if (currentRenderingMode() == MediaRenderingToLayer)
588         return m_cachedIsReadyForDisplay;
589
590 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
591     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
592         return true;
593 #endif
594
595     return m_videoFrameHasDrawn;
596 }
597
598 #if ENABLE(AVF_CAPTIONS)
599 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
600 {
601     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
602     if (manualSelectionMode)
603         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
604
605     // FIXME: Match these to correct types:
606     if (kind == PlatformTextTrack::Caption)
607         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
608
609     if (kind == PlatformTextTrack::Subtitle)
610         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
611
612     if (kind == PlatformTextTrack::Description)
613         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
614
615     if (kind == PlatformTextTrack::Forced)
616         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
617
618     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
619 }
620     
621 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
622 {
623     trackModeChanged();
624 }
625     
626 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
627 {
628     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
629     
630     for (auto& textTrack : m_textTracks) {
631         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
632             continue;
633         
634         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
635         RetainPtr<AVMediaSelectionOption> currentOption = trackPrivate->mediaSelectionOption();
636         
637         for (auto& track : outOfBandTrackSources) {
638             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
639             
640             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
641                 continue;
642             
643             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
644             if (track->mode() == PlatformTextTrack::Hidden)
645                 mode = InbandTextTrackPrivate::Hidden;
646             else if (track->mode() == PlatformTextTrack::Disabled)
647                 mode = InbandTextTrackPrivate::Disabled;
648             else if (track->mode() == PlatformTextTrack::Showing)
649                 mode = InbandTextTrackPrivate::Showing;
650             
651             textTrack->setMode(mode);
652             break;
653         }
654     }
655 }
656 #endif
657
658
659 static NSURL *canonicalURL(const URL& url)
660 {
661     NSURL *cocoaURL = url;
662     if (url.isEmpty())
663         return cocoaURL;
664
665     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
666     if (!request)
667         return cocoaURL;
668
669     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
670     if (!canonicalRequest)
671         return cocoaURL;
672
673     return [canonicalRequest URL];
674 }
675
676 #if PLATFORM(IOS_FAMILY)
677 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
678 {
679     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
680     [properties setDictionary:@{
681         NSHTTPCookieName: cookie.name,
682         NSHTTPCookieValue: cookie.value,
683         NSHTTPCookieDomain: cookie.domain,
684         NSHTTPCookiePath: cookie.path,
685         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
686     }];
687     if (cookie.secure)
688         [properties setObject:@YES forKey:NSHTTPCookieSecure];
689     if (cookie.session)
690         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
691
692     return [NSHTTPCookie cookieWithProperties:properties.get()];
693 }
694 #endif
695
696 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
697 {
698     if (m_avAsset)
699         return;
700
701     INFO_LOG(LOGIDENTIFIER);
702
703     setDelayCallbacks(true);
704
705     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
706
707     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
708
709     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
710
711     String referrer = player()->referrer();
712     if (!referrer.isEmpty())
713         [headerFields.get() setObject:referrer forKey:@"Referer"];
714
715     String userAgent = player()->userAgent();
716     if (!userAgent.isEmpty())
717         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
718
719     if ([headerFields.get() count])
720         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
721
722     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
723         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
724
725     if (PAL::canLoad_AVFoundation_AVURLAssetUseClientURLLoadingExclusively())
726         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
727 #if PLATFORM(IOS_FAMILY)
728     else if (PAL::canLoad_AVFoundation_AVURLAssetRequiresCustomURLLoadingKey())
729         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
730     // FIXME: rdar://problem/20354688
731     String identifier = player()->sourceApplicationIdentifier();
732     if (!identifier.isEmpty())
733         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
734 #endif
735
736     auto type = player()->contentMIMEType();
737     if (PAL::canLoad_AVFoundation_AVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
738         auto codecs = player()->contentTypeCodecs();
739         if (!codecs.isEmpty()) {
740             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
741             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
742         } else
743             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
744     }
745
746 #if ENABLE(AVF_CAPTIONS)
747     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
748     if (!outOfBandTrackSources.isEmpty()) {
749         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
750         for (auto& trackSource : outOfBandTrackSources) {
751             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
752             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
753             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
754             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
755             [outOfBandTracks.get() addObject:@{
756                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
757                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
758                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
759                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
760                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
761                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
762             }];
763         }
764
765         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
766     }
767 #endif
768
769 #if PLATFORM(IOS_FAMILY)
770     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
771     if (!networkInterfaceName.isEmpty())
772         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
773 #endif
774
775 #if PLATFORM(IOS_FAMILY)
776     Vector<Cookie> cookies;
777     if (player()->getRawCookies(url, cookies)) {
778         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
779         for (auto& cookie : cookies)
780             [nsCookies addObject:toNSHTTPCookie(cookie)];
781
782         if (PAL::canLoad_AVFoundation_AVURLAssetHTTPCookiesKey())
783             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
784     }
785 #endif
786
787     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
788     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
789     
790     if (usePersistentCache)
791         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
792
793     NSURL *cocoaURL = canonicalURL(url);
794     m_avAsset = adoptNS([PAL::allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
795
796 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
797     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
798     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
799
800     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
801         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
802         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
803         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
804         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
805         if (mediaResourceLoader)
806             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
807     }
808
809 #endif
810
811     m_haveCheckedPlayability = false;
812
813     setDelayCallbacks(false);
814 }
815
816 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItem *item)
817 {
818     if (!m_avPlayer)
819         return;
820
821     if (pthread_main_np()) {
822         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
823         return;
824     }
825
826     RetainPtr<AVPlayer> strongPlayer = m_avPlayer.get();
827     RetainPtr<AVPlayerItem> strongItem = item;
828     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
829         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
830     });
831 }
832
833 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
834 {
835     if (m_avPlayer)
836         return;
837
838     INFO_LOG(LOGIDENTIFIER);
839
840     setDelayCallbacks(true);
841
842     m_avPlayer = adoptNS([PAL::allocAVPlayerInstance() init]);
843     for (NSString *keyName in playerKVOProperties())
844         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
845
846     setShouldObserveTimeControlStatus(true);
847
848     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
849
850 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
851     updateDisableExternalPlayback();
852     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
853 #endif
854
855 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS_FAMILY)
856     if (m_shouldPlayToPlaybackTarget) {
857         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
858         m_shouldPlayToPlaybackTarget = false;
859         setShouldPlayToPlaybackTarget(true);
860     }
861 #endif
862
863 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(IOSMAC)
864     setShouldDisableSleep(player()->shouldDisableSleep());
865 #endif
866
867     if (m_muted) {
868         // Clear m_muted so setMuted doesn't return without doing anything.
869         m_muted = false;
870         [m_avPlayer.get() setMuted:m_muted];
871     }
872
873     if (player()->client().mediaPlayerIsVideo())
874         createAVPlayerLayer();
875
876     if (m_avPlayerItem)
877         setAVPlayerItem(m_avPlayerItem.get());
878
879     setDelayCallbacks(false);
880 }
881
882 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
883 {
884     if (m_avPlayerItem)
885         return;
886
887     INFO_LOG(LOGIDENTIFIER);
888
889     setDelayCallbacks(true);
890
891     // Create the player item so we can load media data. 
892     m_avPlayerItem = adoptNS([PAL::allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
893
894     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
895
896     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
897     for (NSString *keyName in itemKVOProperties())
898         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
899
900     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
901
902     if (m_avPlayer)
903         setAVPlayerItem(m_avPlayerItem.get());
904
905     const NSTimeInterval legibleOutputAdvanceInterval = 2;
906
907     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
908     m_legibleOutput = adoptNS([PAL::allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
909     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
910
911     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
912     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
913     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
914     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
915
916 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
917     if (m_provider) {
918         m_provider->setPlayerItem(m_avPlayerItem.get());
919         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
920     }
921 #endif
922
923 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
924     createVideoOutput();
925 #endif
926
927     setDelayCallbacks(false);
928 }
929
930 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
931 {
932     if (m_haveCheckedPlayability)
933         return;
934     m_haveCheckedPlayability = true;
935
936     INFO_LOG(LOGIDENTIFIER);
937     auto weakThis = makeWeakPtr(*this);
938
939     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
940         callOnMainThread([weakThis] {
941             if (weakThis)
942                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
943         });
944     }];
945 }
946
947 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
948 {
949     INFO_LOG(LOGIDENTIFIER);
950
951     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
952     dispatch_group_enter(metadataLoadingGroup.get());
953     auto weakThis = makeWeakPtr(*this);
954     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
955
956         callOnMainThread([weakThis, metadataLoadingGroup] {
957             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
958                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
959                     dispatch_group_enter(metadataLoadingGroup.get());
960                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
961                         dispatch_group_leave(metadataLoadingGroup.get());
962                     }];
963                 }
964             }
965             dispatch_group_leave(metadataLoadingGroup.get());
966         });
967     }];
968
969     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
970         callOnMainThread([weakThis] {
971             if (weakThis)
972                 [weakThis->m_objcObserver.get() metadataLoaded];
973         });
974     });
975 }
976
977 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
978 {
979     if (!m_avPlayerItem)
980         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
981
982     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
983         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
984     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
985         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
986     if (m_cachedLikelyToKeepUp)
987         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
988     if (m_cachedBufferFull)
989         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
990     if (m_cachedBufferEmpty)
991         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
992
993     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
994 }
995
996 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
997 {
998     return m_videoFullscreenLayerManager->videoInlineLayer();
999 }
1000
1001 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1002 {
1003 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1004     updateLastImage(UpdateType::UpdateSynchronously);
1005     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1006 #endif
1007 }
1008
1009 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1010 {
1011 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1012     updateLastImage(UpdateType::UpdateSynchronously);
1013     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1014 #else
1015     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1016 #endif
1017     updateDisableExternalPlayback();
1018 }
1019
1020 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1021 {
1022     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1023 }
1024
1025 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1026 {
1027     m_videoFullscreenGravity = gravity;
1028
1029     if (!m_videoLayer)
1030         return;
1031
1032     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1033     if (gravity == MediaPlayer::VideoGravityResize)
1034         videoGravity = AVLayerVideoGravityResize;
1035     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1036         videoGravity = AVLayerVideoGravityResizeAspect;
1037     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1038         videoGravity = AVLayerVideoGravityResizeAspectFill;
1039     else
1040         ASSERT_NOT_REACHED();
1041     
1042     if ([m_videoLayer videoGravity] == videoGravity)
1043         return;
1044
1045     [m_videoLayer setVideoGravity:videoGravity];
1046     syncTextTrackBounds();
1047 }
1048
1049 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1050 {
1051 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1052     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1053         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1054     updateDisableExternalPlayback();
1055 #else
1056     UNUSED_PARAM(mode);
1057 #endif
1058 }
1059     
1060 void MediaPlayerPrivateAVFoundationObjC::videoFullscreenStandbyChanged()
1061 {
1062 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1063     updateDisableExternalPlayback();
1064 #endif
1065 }
1066
1067 #if PLATFORM(IOS_FAMILY)
1068 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1069 {
1070     if (m_currentMetaData)
1071         return m_currentMetaData.get();
1072     return nil;
1073 }
1074
1075 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1076 {
1077     if (!m_avPlayerItem)
1078         return emptyString();
1079     
1080     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1081     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1082
1083     return logString.get();
1084 }
1085
1086 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1087 {
1088     if (!m_avPlayerItem)
1089         return emptyString();
1090
1091     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1092     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1093
1094     return logString.get();
1095 }
1096 #endif
1097
1098 void MediaPlayerPrivateAVFoundationObjC::didEnd()
1099 {
1100     m_requestedPlaying = false;
1101     MediaPlayerPrivateAVFoundation::didEnd();
1102 }
1103
1104 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1105 {
1106     [CATransaction begin];
1107     [CATransaction setDisableActions:YES];    
1108     if (m_videoLayer)
1109         [m_videoLayer.get() setHidden:!isVisible];
1110     [CATransaction commit];
1111 }
1112     
1113 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1114 {
1115     INFO_LOG(LOGIDENTIFIER);
1116     if (!metaDataAvailable())
1117         return;
1118
1119     m_requestedPlaying = true;
1120     setPlayerRate(m_requestedRate);
1121 }
1122
1123 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1124 {
1125     INFO_LOG(LOGIDENTIFIER);
1126     if (!metaDataAvailable())
1127         return;
1128
1129     m_requestedPlaying = false;
1130     setPlayerRate(0);
1131 }
1132
1133 bool MediaPlayerPrivateAVFoundationObjC::platformPaused() const
1134 {
1135     return m_cachedTimeControlStatus == AVPlayerTimeControlStatusPaused;
1136 }
1137
1138 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1139 {
1140     // Do not ask the asset for duration before it has been loaded or it will fetch the
1141     // answer synchronously.
1142     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1143         return MediaTime::invalidTime();
1144     
1145     CMTime cmDuration;
1146     
1147     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1148     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1149         cmDuration = [m_avPlayerItem.get() duration];
1150     else
1151         cmDuration = [m_avAsset.get() duration];
1152
1153     if (CMTIME_IS_NUMERIC(cmDuration))
1154         return PAL::toMediaTime(cmDuration);
1155
1156     if (CMTIME_IS_INDEFINITE(cmDuration))
1157         return MediaTime::positiveInfiniteTime();
1158
1159     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1160     return MediaTime::invalidTime();
1161 }
1162
1163 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1164 {
1165     if (!metaDataAvailable() || !m_avPlayerItem)
1166         return MediaTime::zeroTime();
1167
1168     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1169     if (CMTIME_IS_NUMERIC(itemTime))
1170         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1171
1172     return MediaTime::zeroTime();
1173 }
1174
1175 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1176 {
1177     // setCurrentTime generates several event callbacks, update afterwards.
1178     setDelayCallbacks(true);
1179
1180     if (m_metadataTrack)
1181         m_metadataTrack->flushPartialCues();
1182
1183     CMTime cmTime = PAL::toCMTime(time);
1184     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1185     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1186
1187     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1188     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1189         cmBefore = kCMTimeZero;
1190     
1191     auto weakThis = makeWeakPtr(*this);
1192
1193     setShouldObserveTimeControlStatus(false);
1194     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1195         callOnMainThread([weakThis, finished] {
1196             auto _this = weakThis.get();
1197             if (!_this)
1198                 return;
1199
1200             _this->setShouldObserveTimeControlStatus(true);
1201             _this->seekCompleted(finished);
1202         });
1203     }];
1204
1205     setDelayCallbacks(false);
1206 }
1207
1208 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1209 {
1210 #if PLATFORM(IOS_FAMILY)
1211     UNUSED_PARAM(volume);
1212     return;
1213 #else
1214
1215     if (!m_avPlayer)
1216         return;
1217
1218     [m_avPlayer.get() setVolume:volume];
1219 #endif
1220 }
1221
1222 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1223 {
1224     if (m_muted == muted)
1225         return;
1226
1227     INFO_LOG(LOGIDENTIFIER, muted);
1228
1229     m_muted = muted;
1230
1231     if (!m_avPlayer)
1232         return;
1233
1234     [m_avPlayer.get() setMuted:m_muted];
1235 }
1236
1237 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1238 {
1239     UNUSED_PARAM(closedCaptionsVisible);
1240
1241     if (!metaDataAvailable())
1242         return;
1243
1244     INFO_LOG(LOGIDENTIFIER, closedCaptionsVisible);
1245 }
1246
1247 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1248 {
1249     m_requestedRate = rate;
1250     if (m_requestedPlaying)
1251         setPlayerRate(rate);
1252 }
1253
1254 void MediaPlayerPrivateAVFoundationObjC::setPlayerRate(double rate)
1255 {
1256     setDelayCallbacks(true);
1257     m_cachedRate = rate;
1258     setShouldObserveTimeControlStatus(false);
1259     [m_avPlayer setRate:rate];
1260     m_cachedTimeControlStatus = [m_avPlayer timeControlStatus];
1261     setShouldObserveTimeControlStatus(true);
1262     setDelayCallbacks(false);
1263 }
1264
1265 double MediaPlayerPrivateAVFoundationObjC::rate() const
1266 {
1267     if (!metaDataAvailable())
1268         return 0;
1269
1270     return m_cachedRate;
1271 }
1272
1273 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1274 {
1275 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1276     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1277 #else
1278     return 0;
1279 #endif
1280 }
1281
1282 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1283 {
1284 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1285     return [m_avPlayerItem liveUpdateInterval];
1286 #else
1287     return 0;
1288 #endif
1289 }
1290
1291 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1292 {
1293     if (m_avPlayerItem)
1294         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1295 }
1296
1297 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1298 {
1299     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1300
1301     if (!m_avPlayerItem)
1302         return timeRanges;
1303
1304     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1305         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1306         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1307             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1308     }
1309     return timeRanges;
1310 }
1311
1312 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1313 {
1314     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1315         return MediaTime::zeroTime();
1316
1317     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1318     bool hasValidRange = false;
1319     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1320         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1321         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1322             continue;
1323
1324         hasValidRange = true;
1325         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1326         if (minTimeSeekable > startOfRange)
1327             minTimeSeekable = startOfRange;
1328     }
1329     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1330 }
1331
1332 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1333 {
1334     if (!m_cachedSeekableRanges)
1335         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1336
1337     MediaTime maxTimeSeekable;
1338     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1339         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1340         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1341             continue;
1342         
1343         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1344         if (maxTimeSeekable < endOfRange)
1345             maxTimeSeekable = endOfRange;
1346     }
1347     return maxTimeSeekable;
1348 }
1349
1350 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1351 {
1352     if (!m_cachedLoadedRanges)
1353         return MediaTime::zeroTime();
1354
1355     MediaTime maxTimeLoaded;
1356     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1357         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1358         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1359             continue;
1360         
1361         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1362         if (maxTimeLoaded < endOfRange)
1363             maxTimeLoaded = endOfRange;
1364     }
1365
1366     return maxTimeLoaded;   
1367 }
1368
1369 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1370 {
1371     if (!metaDataAvailable())
1372         return 0;
1373
1374     if (m_cachedTotalBytes)
1375         return m_cachedTotalBytes;
1376
1377     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1378         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1379
1380     return m_cachedTotalBytes;
1381 }
1382
1383 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id>&& asset)
1384 {
1385     m_avAsset = WTFMove(asset);
1386 }
1387
1388 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1389 {
1390     if (!m_avAsset)
1391         return MediaPlayerAVAssetStatusDoesNotExist;
1392
1393     for (NSString *keyName in assetMetadataKeyNames()) {
1394         NSError *error = nil;
1395         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1396
1397         if (error)
1398             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1399
1400         if (keyStatus < AVKeyValueStatusLoaded)
1401             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1402         
1403         if (keyStatus == AVKeyValueStatusFailed)
1404             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1405
1406         if (keyStatus == AVKeyValueStatusCancelled)
1407             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1408     }
1409
1410     if (!player()->shouldCheckHardwareSupport())
1411         m_tracksArePlayable = true;
1412
1413     if (!m_tracksArePlayable) {
1414         m_tracksArePlayable = true;
1415         for (AVAssetTrack *track in [m_avAsset tracks]) {
1416             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1417                 m_tracksArePlayable = false;
1418                 break;
1419             }
1420         }
1421     }
1422
1423     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1424         return MediaPlayerAVAssetStatusPlayable;
1425
1426     return MediaPlayerAVAssetStatusLoaded;
1427 }
1428
1429 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1430 {
1431     if (!m_avAsset)
1432         return 0;
1433
1434     NSError *error = nil;
1435     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1436     return [error code];
1437 }
1438
1439 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1440 {
1441     if (!metaDataAvailable() || context.paintingDisabled())
1442         return;
1443
1444     setDelayCallbacks(true);
1445     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1446
1447 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1448     if (videoOutputHasAvailableFrame())
1449         paintWithVideoOutput(context, rect);
1450     else
1451 #endif
1452         paintWithImageGenerator(context, rect);
1453
1454     END_BLOCK_OBJC_EXCEPTIONS;
1455     setDelayCallbacks(false);
1456
1457     m_videoFrameHasDrawn = true;
1458 }
1459
1460 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1461 {
1462     if (!metaDataAvailable() || context.paintingDisabled())
1463         return;
1464
1465     // We can ignore the request if we are already rendering to a layer.
1466     if (currentRenderingMode() == MediaRenderingToLayer)
1467         return;
1468
1469     // paint() is best effort, so only paint if we already have an image generator or video output available.
1470     if (!hasContextRenderer())
1471         return;
1472
1473     paintCurrentFrameInContext(context, rect);
1474 }
1475
1476 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1477 {
1478     INFO_LOG(LOGIDENTIFIER);
1479
1480     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1481     if (image) {
1482         GraphicsContextStateSaver stateSaver(context);
1483         context.translate(rect.x(), rect.y() + rect.height());
1484         context.scale(FloatSize(1.0f, -1.0f));
1485         context.setImageInterpolationQuality(InterpolationLow);
1486         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1487         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1488     }
1489 }
1490
1491 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1492 {
1493     if (!m_imageGenerator)
1494         createImageGenerator();
1495     ASSERT(m_imageGenerator);
1496
1497 #if !RELEASE_LOG_DISABLED
1498     MonotonicTime start = MonotonicTime::now();
1499 #endif
1500
1501     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1502     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1503     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1504
1505 #if !RELEASE_LOG_DISABLED
1506     INFO_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1507 #endif
1508
1509     return image;
1510 }
1511
1512 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1513 {
1514     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1515
1516
1517 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1518 static bool keySystemIsSupported(const String& keySystem)
1519 {
1520     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1521         return true;
1522     return false;
1523 }
1524 #endif
1525
1526 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1527 {
1528 #if ENABLE(MEDIA_SOURCE)
1529     if (parameters.isMediaSource)
1530         return MediaPlayer::IsNotSupported;
1531 #endif
1532 #if ENABLE(MEDIA_STREAM)
1533     if (parameters.isMediaStream)
1534         return MediaPlayer::IsNotSupported;
1535 #endif
1536
1537     auto containerType = parameters.type.containerType();
1538     if (isUnsupportedMIMEType(containerType))
1539         return MediaPlayer::IsNotSupported;
1540
1541     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(containerType))
1542         return MediaPlayer::IsNotSupported;
1543
1544     // The spec says:
1545     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1546     if (parameters.type.codecs().isEmpty())
1547         return MediaPlayer::MayBeSupported;
1548
1549     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1550         return MediaPlayer::IsNotSupported;
1551
1552     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1553     return [PAL::getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1554 }
1555
1556 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1557 {
1558 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1559     if (!keySystem.isEmpty()) {
1560         // "Clear Key" is only supported with HLS:
1561         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1562             return MediaPlayer::IsNotSupported;
1563
1564         if (!keySystemIsSupported(keySystem))
1565             return false;
1566
1567         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1568             return false;
1569
1570         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(mimeType))
1571             return false;
1572
1573         return true;
1574     }
1575 #else
1576     UNUSED_PARAM(keySystem);
1577     UNUSED_PARAM(mimeType);
1578 #endif
1579     return false;
1580 }
1581
1582 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1583 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1584 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1585 {
1586     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1587         [infoRequest setContentLength:keyData->byteLength()];
1588         [infoRequest setByteRangeAccessSupported:YES];
1589     }
1590
1591     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1592         long long start = [dataRequest currentOffset];
1593         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1594
1595         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1596             [request finishLoadingWithError:nil];
1597             return;
1598         }
1599
1600         ASSERT(start <= std::numeric_limits<int>::max());
1601         ASSERT(end <= std::numeric_limits<int>::max());
1602         auto requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1603         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1604         [dataRequest respondWithData:nsData.get()];
1605     }
1606
1607     [request finishLoading];
1608 }
1609 #endif
1610
1611 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1612 {
1613     String scheme = [[[avRequest request] URL] scheme];
1614     String keyURI = [[[avRequest request] URL] absoluteString];
1615
1616 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1617     if (scheme == "skd") {
1618 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1619         // Create an initData with the following layout:
1620         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1621         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1622         auto initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1623         unsigned byteLength = initDataBuffer->byteLength();
1624         auto initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1625         initDataView->set<uint32_t>(0, keyURISize, true);
1626
1627         auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1628         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1629
1630         auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1631         if (!player()->keyNeeded(initData.ptr()))
1632             return false;
1633 #endif
1634
1635 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1636         if (m_cdmInstance) {
1637             avRequest.contentInformationRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
1638             [avRequest finishLoading];
1639             return true;
1640         }
1641
1642         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1643         m_keyID = SharedBuffer::create(keyURIData.get());
1644         player()->initializationDataEncountered("skd"_s, m_keyID->tryCreateArrayBuffer());
1645         setWaitingForKey(true);
1646 #endif
1647         m_keyURIToRequestMap.set(keyURI, avRequest);
1648
1649         return true;
1650     }
1651
1652 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1653     if (scheme == "clearkey") {
1654         String keyID = [[[avRequest request] URL] resourceSpecifier];
1655         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1656
1657         auto initData = Uint8Array::create(encodedKeyId.size());
1658         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1659
1660         auto keyData = player()->cachedKeyForKeyId(keyID);
1661         if (keyData) {
1662             fulfillRequestWithKeyData(avRequest, keyData.get());
1663             return false;
1664         }
1665
1666         if (!player()->keyNeeded(initData.ptr()))
1667             return false;
1668
1669         m_keyURIToRequestMap.set(keyID, avRequest);
1670         return true;
1671     }
1672 #endif
1673 #endif
1674
1675     auto resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1676     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader.copyRef());
1677     resourceLoader->startLoading();
1678     return true;
1679 }
1680
1681 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1682 {
1683     String scheme = [[[avRequest request] URL] scheme];
1684
1685     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1686
1687     if (resourceLoader)
1688         resourceLoader->stopLoading();
1689 }
1690
1691 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1692 {
1693     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1694 }
1695 #endif
1696
1697 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1698 {
1699     return PAL::AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1700 }
1701
1702 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1703 {
1704     if (!metaDataAvailable())
1705         return timeValue;
1706
1707     // FIXME - impossible to implement until rdar://8721510 is fixed.
1708     return timeValue;
1709 }
1710
1711 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1712 {
1713     return 0;
1714 }
1715
1716 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1717 {
1718     if (!m_videoLayer)
1719         return;
1720
1721     // Do not attempt to change the video gravity while in full screen mode.
1722     // See setVideoFullscreenGravity().
1723     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1724         return;
1725
1726     [CATransaction begin];
1727     [CATransaction setDisableActions:YES];    
1728     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1729     [m_videoLayer.get() setVideoGravity:gravity];
1730     [CATransaction commit];
1731 }
1732
1733 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1734 {
1735     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1736         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1737     }];
1738     if (index == NSNotFound)
1739         return nil;
1740     return [tracks objectAtIndex:index];
1741 }
1742
1743 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1744 {
1745     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1746     m_languageOfPrimaryAudioTrack = String();
1747
1748     if (!m_avAsset)
1749         return;
1750
1751     setDelayCharacteristicsChangedNotification(true);
1752
1753     bool haveCCTrack = false;
1754     bool hasCaptions = false;
1755
1756     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1757     // asked about those fairly fequently.
1758     if (!m_avPlayerItem) {
1759         // We don't have a player item yet, so check with the asset because some assets support inspection
1760         // prior to becoming ready to play.
1761         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1762         setHasVideo(firstEnabledVideoTrack);
1763         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1764         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1765         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1766         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1767         if (size.width() < 0)
1768             size.setWidth(-size.width());
1769         if (size.height() < 0)
1770             size.setHeight(-size.height());
1771         presentationSizeDidChange(size);
1772     } else {
1773         bool hasVideo = false;
1774         bool hasAudio = false;
1775         bool hasMetaData = false;
1776         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1777             if ([track isEnabled]) {
1778                 AVAssetTrack *assetTrack = [track assetTrack];
1779                 NSString *mediaType = [assetTrack mediaType];
1780                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1781                     hasVideo = true;
1782                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1783                     hasAudio = true;
1784                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1785                     haveCCTrack = true;
1786                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1787                     hasMetaData = true;
1788                 }
1789             }
1790         }
1791
1792         updateAudioTracks();
1793         updateVideoTracks();
1794
1795         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1796         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1797
1798         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1799         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1800         // when it is not.
1801         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1802
1803         setHasAudio(hasAudio);
1804 #if ENABLE(DATACUE_VALUE)
1805         if (hasMetaData)
1806             processMetadataTrack();
1807 #endif
1808     }
1809
1810     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1811     if (legibleGroup && m_cachedTracks) {
1812         hasCaptions = [[PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1813         if (hasCaptions)
1814             processMediaSelectionOptions();
1815     }
1816
1817     setHasClosedCaptions(hasCaptions);
1818
1819     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1820
1821     sizeChanged();
1822
1823     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1824         characteristicsChanged();
1825
1826 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1827     if (m_provider)
1828         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1829 #endif
1830
1831     setDelayCharacteristicsChangedNotification(false);
1832 }
1833
1834 #if ENABLE(VIDEO_TRACK)
1835
1836 template <typename RefT, typename PassRefT>
1837 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1838 {
1839     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1840         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1841     }]]]);
1842     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1843
1844     for (auto& oldItem : oldItems) {
1845         if (oldItem->playerItemTrack())
1846             [oldTracks addObject:oldItem->playerItemTrack()];
1847     }
1848
1849     // Find the added & removed AVPlayerItemTracks:
1850     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1851     [removedTracks minusSet:newTracks.get()];
1852
1853     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1854     [addedTracks minusSet:oldTracks.get()];
1855
1856     typedef Vector<RefT> ItemVector;
1857     ItemVector replacementItems;
1858     ItemVector addedItems;
1859     ItemVector removedItems;
1860     for (auto& oldItem : oldItems) {
1861         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1862             removedItems.append(oldItem);
1863         else
1864             replacementItems.append(oldItem);
1865     }
1866
1867     for (AVPlayerItemTrack* track in addedTracks.get())
1868         addedItems.append(itemFactory(track));
1869
1870     replacementItems.appendVector(addedItems);
1871     oldItems.swap(replacementItems);
1872
1873     for (auto& removedItem : removedItems)
1874         (player->*removedFunction)(*removedItem);
1875
1876     for (auto& addedItem : addedItems)
1877         (player->*addedFunction)(*addedItem);
1878 }
1879
1880 template <typename RefT, typename PassRefT>
1881 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1882 {
1883     group->updateOptions(characteristics);
1884
1885     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1886     for (auto& option : group->options()) {
1887         if (!option)
1888             continue;
1889         AVMediaSelectionOption* avOption = option->avMediaSelectionOption();
1890         if (!avOption)
1891             continue;
1892         newSelectionOptions.add(option);
1893     }
1894
1895     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1896     for (auto& oldItem : oldItems) {
1897         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1898             oldSelectionOptions.add(option);
1899     }
1900
1901     // Find the added & removed AVMediaSelectionOptions:
1902     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1903     for (auto& oldOption : oldSelectionOptions) {
1904         if (!newSelectionOptions.contains(oldOption))
1905             removedSelectionOptions.add(oldOption);
1906     }
1907
1908     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1909     for (auto& newOption : newSelectionOptions) {
1910         if (!oldSelectionOptions.contains(newOption))
1911             addedSelectionOptions.add(newOption);
1912     }
1913
1914     typedef Vector<RefT> ItemVector;
1915     ItemVector replacementItems;
1916     ItemVector addedItems;
1917     ItemVector removedItems;
1918     for (auto& oldItem : oldItems) {
1919         if (!oldItem->mediaSelectionOption())
1920             removedItems.append(oldItem);
1921         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1922             removedItems.append(oldItem);
1923         else
1924             replacementItems.append(oldItem);
1925     }
1926
1927     for (auto& option : addedSelectionOptions)
1928         addedItems.append(itemFactory(*option.get()));
1929
1930     replacementItems.appendVector(addedItems);
1931     oldItems.swap(replacementItems);
1932     
1933     for (auto& removedItem : removedItems)
1934         (player->*removedFunction)(*removedItem);
1935
1936     for (auto& addedItem : addedItems)
1937         (player->*addedFunction)(*addedItem);
1938 }
1939
1940 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1941 {
1942 #if !RELEASE_LOG_DISABLED
1943     size_t count = m_audioTracks.size();
1944 #endif
1945
1946     Vector<String> characteristics = player()->preferredAudioCharacteristics();
1947     if (!m_audibleGroup) {
1948         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForAudibleMedia())
1949             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
1950     }
1951
1952     if (m_audibleGroup)
1953         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1954     else
1955         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1956
1957     for (auto& track : m_audioTracks)
1958         track->resetPropertiesFromTrack();
1959
1960 #if !RELEASE_LOG_DISABLED
1961     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
1962 #endif
1963 }
1964
1965 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1966 {
1967 #if !RELEASE_LOG_DISABLED
1968     size_t count = m_videoTracks.size();
1969 #endif
1970
1971     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1972
1973     if (!m_visualGroup) {
1974         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForVisualMedia())
1975             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
1976     }
1977
1978     if (m_visualGroup)
1979         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1980
1981     for (auto& track : m_audioTracks)
1982         track->resetPropertiesFromTrack();
1983
1984 #if !RELEASE_LOG_DISABLED
1985     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
1986 #endif
1987 }
1988
1989 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
1990 {
1991     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
1992 }
1993
1994 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
1995 {
1996     m_videoFullscreenLayerManager->syncTextTrackBounds();
1997 }
1998
1999 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2000 {
2001     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2002 }
2003
2004 #endif // ENABLE(VIDEO_TRACK)
2005
2006 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2007
2008 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2009 {
2010     if (!m_provider) {
2011         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2012         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2013     }
2014     return m_provider.get();
2015 }
2016
2017 #endif
2018
2019 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2020 {
2021     if (!m_avAsset)
2022         return;
2023
2024     setNaturalSize(m_cachedPresentationSize);
2025 }
2026
2027 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2028 {
2029     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2030 }
2031
2032 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2033 {
2034     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2035     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2036         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2037         return false;
2038
2039     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2040     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2041         return session.didPassCORSAccessChecks;
2042
2043     return false;
2044 }
2045
2046 Optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2047 {
2048     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2049     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2050         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2051         return false;
2052
2053     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2054     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2055         return [session wouldTaintOrigin:origin];
2056
2057     return WTF::nullopt;
2058 }
2059
2060
2061 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2062
2063 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2064 {
2065     INFO_LOG(LOGIDENTIFIER);
2066
2067     if (!m_avPlayerItem || m_videoOutput)
2068         return;
2069
2070 #if USE(VIDEOTOOLBOX)
2071     NSDictionary* attributes = nil;
2072 #else
2073     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2074 #endif
2075     m_videoOutput = adoptNS([PAL::allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2076     ASSERT(m_videoOutput);
2077
2078     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2079
2080     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2081 }
2082
2083 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2084 {
2085     if (!m_videoOutput)
2086         return;
2087
2088     if (m_avPlayerItem)
2089         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2090
2091     INFO_LOG(LOGIDENTIFIER);
2092
2093     m_videoOutput = 0;
2094 }
2095
2096 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2097 {
2098     if (!m_avPlayerItem)
2099         return false;
2100
2101     if (!m_videoOutput)
2102         createVideoOutput();
2103     ASSERT(m_videoOutput);
2104
2105     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2106
2107     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2108         return false;
2109
2110     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2111     m_lastImage = nullptr;
2112     return true;
2113 }
2114
2115 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2116 {
2117     if (!m_avPlayerItem)
2118         return false;
2119
2120     if (m_lastImage)
2121         return true;
2122
2123     if (!m_videoOutput)
2124         createVideoOutput();
2125
2126     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2127 }
2128
2129 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2130 {
2131 #if HAVE(CORE_VIDEO)
2132     if (!m_avPlayerItem)
2133         return;
2134
2135     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2136         waitForVideoOutputMediaDataWillChange();
2137
2138     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2139     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2140     // should be displayed.
2141     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2142         return;
2143
2144     if (!m_pixelBufferConformer) {
2145 #if USE(VIDEOTOOLBOX)
2146         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2147 #else
2148         NSDictionary *attributes = nil;
2149 #endif
2150         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2151     }
2152
2153 #if !RELEASE_LOG_DISABLED
2154     MonotonicTime start = MonotonicTime::now();
2155 #endif
2156
2157     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2158
2159 #if !RELEASE_LOG_DISABLED
2160     INFO_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2161 #endif
2162 #endif // HAVE(CORE_VIDEO)
2163 }
2164
2165 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2166 {
2167     updateLastImage(UpdateType::UpdateSynchronously);
2168     if (!m_lastImage)
2169         return;
2170
2171     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2172     if (!firstEnabledVideoTrack)
2173         return;
2174
2175     INFO_LOG(LOGIDENTIFIER);
2176
2177     GraphicsContextStateSaver stateSaver(context);
2178     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2179     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2180     FloatRect transformedOutputRect = videoTransform.inverse().valueOr(AffineTransform()).mapRect(outputRect);
2181
2182     context.concatCTM(videoTransform);
2183     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2184
2185     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2186     // video frame, destroy it now that it is no longer needed.
2187     if (m_imageGenerator)
2188         destroyImageGenerator();
2189
2190 }
2191
2192 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2193 {
2194     ASSERT(context);
2195
2196     updateLastPixelBuffer();
2197     if (!m_lastPixelBuffer)
2198         return false;
2199
2200     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2201     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2202
2203     if (!m_videoTextureCopier)
2204         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2205
2206     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2207 }
2208
2209 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2210 {
2211     updateLastImage();
2212     return m_lastImage;
2213 }
2214
2215 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2216 {
2217     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2218
2219     // Wait for 1 second.
2220     bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2221     if (!satisfied)
2222         ERROR_LOG(LOGIDENTIFIER, "timed out");
2223 }
2224
2225 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput *)
2226 {
2227     m_videoOutputSemaphore.signal();
2228 }
2229
2230 #endif
2231
2232 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2233
2234 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2235 {
2236     return m_keyURIToRequestMap.take(keyURI);
2237 }
2238
2239 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2240 {
2241     Vector<String> fulfilledKeyIds;
2242
2243     for (auto& pair : m_keyURIToRequestMap) {
2244         const String& keyId = pair.key;
2245         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2246
2247         auto keyData = player()->cachedKeyForKeyId(keyId);
2248         if (!keyData)
2249             continue;
2250
2251         fulfillRequestWithKeyData(request.get(), keyData.get());
2252         fulfilledKeyIds.append(keyId);
2253     }
2254
2255     for (auto& keyId : fulfilledKeyIds)
2256         m_keyURIToRequestMap.remove(keyId);
2257 }
2258
2259 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2260 {
2261     ASSERT_UNUSED(session, &session == m_session);
2262     m_session = nullptr;
2263 }
2264
2265 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2266 {
2267     if (!keySystemIsSupported(keySystem))
2268         return nullptr;
2269     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2270     m_session = makeWeakPtr(*session);
2271     return WTFMove(session);
2272 }
2273 #endif
2274
2275 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2276 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2277 {
2278 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2279     if (m_session && newValue)
2280         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2281 #endif
2282
2283 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2284     if (m_cdmInstance)
2285         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2286 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2287     UNUSED_PARAM(newValue);
2288 #endif
2289 }
2290 #endif
2291
2292 #if ENABLE(ENCRYPTED_MEDIA)
2293 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2294 {
2295 #if HAVE(AVCONTENTKEYSESSION)
2296     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2297         return;
2298
2299     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2300     if (&fpsInstance == m_cdmInstance)
2301         return;
2302
2303     if (m_cdmInstance)
2304         cdmInstanceDetached(*m_cdmInstance);
2305
2306     m_cdmInstance = &fpsInstance;
2307 #else
2308     UNUSED_PARAM(instance);
2309 #endif
2310 }
2311
2312 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2313 {
2314 #if HAVE(AVCONTENTKEYSESSION)
2315     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2316     m_cdmInstance = nullptr;
2317 #else
2318     UNUSED_PARAM(instance);
2319 #endif
2320 }
2321
2322 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2323 {
2324 #if HAVE(AVCONTENTKEYSESSION)
2325     if (!m_keyID || !m_cdmInstance)
2326         return;
2327
2328     auto instanceSession = m_cdmInstance->sessionForKeyIDs(Vector<Ref<SharedBuffer>>::from(*m_keyID));
2329     if (!instanceSession)
2330         return;
2331
2332     [instanceSession->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2333
2334     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2335     for (auto& request : keyURIToRequestMap.values()) {
2336         if (auto *infoRequest = request.get().contentInformationRequest)
2337             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2338         [request finishLoading];
2339     }
2340     setWaitingForKey(false);
2341 #endif
2342 }
2343
2344 void MediaPlayerPrivateAVFoundationObjC::setWaitingForKey(bool waitingForKey)
2345 {
2346     if (m_waitingForKey == waitingForKey)
2347         return;
2348
2349     m_waitingForKey = waitingForKey;
2350     player()->waitingForKeyChanged();
2351 }
2352 #endif
2353
2354 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2355 {
2356     if (!m_avAsset)
2357         return nil;
2358
2359     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2360         return nil;
2361
2362     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2363 }
2364
2365 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2366 {
2367     if (!m_avAsset)
2368         return false;
2369
2370     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2371         return false;
2372
2373     return true;
2374 }
2375
2376 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2377 {
2378     if (!hasLoadedMediaSelectionGroups())
2379         return nil;
2380
2381     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2382 }
2383
2384 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2385 {
2386     if (!hasLoadedMediaSelectionGroups())
2387         return nil;
2388
2389     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2390 }
2391
2392 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2393 {
2394     if (!hasLoadedMediaSelectionGroups())
2395         return nil;
2396
2397     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2398 }
2399
2400 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2401 {
2402     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2403     if (!legibleGroup) {
2404         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2405         return;
2406     }
2407
2408     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2409     // but set the selected legible track to nil so text tracks will not be automatically configured.
2410     if (!m_textTracks.size())
2411         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2412
2413     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2414     NSArray *legibleOptions = [PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2415     for (AVMediaSelectionOption *option in legibleOptions) {
2416         bool newTrack = true;
2417         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2418             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2419                 continue;
2420             
2421             RetainPtr<AVMediaSelectionOption> currentOption;
2422 #if ENABLE(AVF_CAPTIONS)
2423             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2424                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2425                 currentOption = track->mediaSelectionOption();
2426             } else
2427 #endif
2428             {
2429                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2430                 currentOption = track->mediaSelectionOption();
2431             }
2432             
2433             if ([currentOption.get() isEqual:option]) {
2434                 removedTextTracks.remove(i - 1);
2435                 newTrack = false;
2436                 break;
2437             }
2438         }
2439         if (!newTrack)
2440             continue;
2441
2442 #if ENABLE(AVF_CAPTIONS)
2443         if ([option outOfBandSource]) {
2444             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2445             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2446             continue;
2447         }
2448 #endif
2449
2450         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2451     }
2452
2453     processNewAndRemovedTextTracks(removedTextTracks);
2454 }
2455
2456 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2457 {
2458     if (m_metadataTrack)
2459         return;
2460
2461     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2462     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2463     player()->addTextTrack(*m_metadataTrack);
2464 }
2465
2466 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2467 {
2468     ASSERT(time >= MediaTime::zeroTime());
2469
2470     if (!m_currentTextTrack)
2471         return;
2472
2473     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2474 }
2475
2476 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2477 {
2478     INFO_LOG(LOGIDENTIFIER);
2479
2480     if (!m_currentTextTrack)
2481         return;
2482     
2483     m_currentTextTrack->resetCueValues();
2484 }
2485
2486 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2487 {
2488     if (m_currentTextTrack == track)
2489         return;
2490
2491     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2492
2493     m_currentTextTrack = track;
2494
2495     if (track) {
2496         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2497             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2498             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2499             ALLOW_DEPRECATED_DECLARATIONS_END
2500 #if ENABLE(AVF_CAPTIONS)
2501         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2502             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2503 #endif
2504         else
2505             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2506     } else {
2507         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2508         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2509         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2510         ALLOW_DEPRECATED_DECLARATIONS_END
2511     }
2512
2513 }
2514
2515 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2516 {
2517     if (!m_languageOfPrimaryAudioTrack.isNull())
2518         return m_languageOfPrimaryAudioTrack;
2519
2520     if (!m_avPlayerItem.get())
2521         return emptyString();
2522
2523     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2524     AVMediaSelectionGroup *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2525     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2526     AVMediaSelectionOption *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2527     ALLOW_DEPRECATED_DECLARATIONS_END
2528     if (currentlySelectedAudibleOption) {
2529         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2530         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2531
2532         return m_languageOfPrimaryAudioTrack;
2533     }
2534
2535     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2536     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2537     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2538     if (!tracks || [tracks count] != 1) {
2539         m_languageOfPrimaryAudioTrack = emptyString();
2540         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2541         return m_languageOfPrimaryAudioTrack;
2542     }
2543
2544     AVAssetTrack *track = [tracks objectAtIndex:0];
2545     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2546
2547     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2548
2549     return m_languageOfPrimaryAudioTrack;
2550 }
2551
2552 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2553 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2554 {
2555     bool wirelessTarget = false;
2556
2557 #if !PLATFORM(IOS_FAMILY)
2558     if (m_playbackTarget) {
2559         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2560             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2561         else
2562             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2563     }
2564 #else
2565     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2566 #endif
2567
2568     INFO_LOG(LOGIDENTIFIER, wirelessTarget);
2569
2570     return wirelessTarget;
2571 }
2572
2573 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2574 {
2575     if (!m_avPlayer)
2576         return MediaPlayer::TargetTypeNone;
2577
2578 #if PLATFORM(IOS_FAMILY)
2579     if (!PAL::AVFoundationLibrary())
2580         return MediaPlayer::TargetTypeNone;
2581
2582     switch ([m_avPlayer externalPlaybackType]) {
2583     case AVPlayerExternalPlaybackTypeNone:
2584         return MediaPlayer::TargetTypeNone;
2585     case AVPlayerExternalPlaybackTypeAirPlay:
2586         return MediaPlayer::TargetTypeAirPlay;
2587     case AVPlayerExternalPlaybackTypeTVOut:
2588         return MediaPlayer::TargetTypeTVOut;
2589     }
2590
2591     ASSERT_NOT_REACHED();
2592     return MediaPlayer::TargetTypeNone;
2593
2594 #else
2595     return MediaPlayer::TargetTypeAirPlay;
2596 #endif
2597 }
2598     
2599 #if PLATFORM(IOS_FAMILY)
2600 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayer *player)
2601 {
2602 #if HAVE(CELESTIAL)
2603     if (!PAL::AVFoundationLibrary())
2604         return nil;
2605
2606     if ([PAL::getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2607         AVOutputContext *outputContext = [PAL::getAVOutputContextClass() sharedAudioPresentationOutputContext];
2608
2609         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2610             || ![outputContext supportsMultipleOutputDevices]
2611             || ![outputContext respondsToSelector:@selector(outputDevices)])
2612             return [outputContext deviceName];
2613
2614         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2615         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2616 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2617             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2618 ALLOW_DEPRECATED_DECLARATIONS_END
2619             [outputDeviceNames addObject:outputDeviceName.get()];
2620         }
2621
2622         return [outputDeviceNames componentsJoinedByString:@" + "];
2623     }
2624
2625     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2626         return nil;
2627
2628     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2629     if (!pickableRoutes.count)
2630         return nil;
2631
2632     NSString *displayName = nil;
2633     for (NSDictionary *pickableRoute in pickableRoutes) {
2634         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2635             continue;
2636
2637         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2638
2639         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2640         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2641             break;
2642
2643         // The route is a speaker or HDMI out, override the name to be the localized device model.
2644         NSString *localizedDeviceModel = [[PAL::getUIDeviceClass() currentDevice] localizedModel];
2645
2646         // In cases where a route with that name already exists, prefix the name with the model.
2647         BOOL includeLocalizedDeviceModelName = NO;
2648         for (NSDictionary *otherRoute in pickableRoutes) {
2649             if (otherRoute == pickableRoute)
2650                 continue;
2651
2652             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2653                 includeLocalizedDeviceModelName = YES;
2654                 break;
2655             }
2656         }
2657
2658         if (includeLocalizedDeviceModelName)
2659             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2660         else
2661             displayName = localizedDeviceModel;
2662
2663         break;
2664     }
2665
2666     return displayName;
2667 #else
2668     UNUSED_PARAM(player);
2669     return nil;
2670 #endif
2671 }
2672 #endif
2673
2674 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2675 {
2676     if (!m_avPlayer)
2677         return emptyString();
2678
2679     String wirelessTargetName;
2680 #if !PLATFORM(IOS_FAMILY)
2681     if (m_playbackTarget)
2682         wirelessTargetName = m_playbackTarget->deviceName();
2683 #else
2684     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2685 #endif
2686
2687     return wirelessTargetName;
2688 }
2689
2690 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2691 {
2692     if (!m_avPlayer)
2693         return !m_allowsWirelessVideoPlayback;
2694
2695     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2696     INFO_LOG(LOGIDENTIFIER, !m_allowsWirelessVideoPlayback);
2697
2698     return !m_allowsWirelessVideoPlayback;
2699 }
2700
2701 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2702 {
2703     INFO_LOG(LOGIDENTIFIER, disabled);
2704     m_allowsWirelessVideoPlayback = !disabled;
2705     if (!m_avPlayer)
2706         return;
2707
2708     setDelayCallbacks(true);
2709     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2710     setDelayCallbacks(false);
2711 }
2712
2713 #if !PLATFORM(IOS_FAMILY)
2714
2715 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2716 {
2717     m_playbackTarget = WTFMove(target);
2718
2719     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2720
2721     INFO_LOG(LOGIDENTIFIER);
2722
2723     if (!m_playbackTarget->hasActiveRoute())
2724         setShouldPlayToPlaybackTarget(false);
2725 }
2726
2727 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2728 {
2729     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2730         return;
2731
2732     m_shouldPlayToPlaybackTarget = shouldPlay;
2733
2734     if (!m_playbackTarget)
2735         return;
2736
2737     INFO_LOG(LOGIDENTIFIER, shouldPlay);
2738
2739     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2740         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2741
2742         if (!m_avPlayer)
2743             return;
2744
2745         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2746         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2747             return;
2748
2749         setDelayCallbacks(true);
2750         m_avPlayer.get().outputContext = newContext;
2751         setDelayCallbacks(false);
2752
2753         return;
2754     }
2755
2756     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2757
2758     setDelayCallbacks(true);
2759     auto weakThis = makeWeakPtr(*this);
2760     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2761         if (!weakThis)
2762             return;
2763         weakThis->playbackTargetIsWirelessDidChange();
2764     }));
2765     setDelayCallbacks(false);
2766 }
2767
2768 #endif // !PLATFORM(IOS_FAMILY)
2769
2770 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2771 {
2772 #if PLATFORM(IOS_FAMILY)
2773     if (!m_avPlayer)
2774         return;
2775
2776     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
2777         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:(player()->fullscreenMode() == MediaPlayer::VideoFullscreenModeStandard) || player()->isVideoFullscreenStandby()];
2778 #endif
2779 }
2780
2781 #endif
2782
2783 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2784 {
2785     m_cachedItemStatus = status;
2786
2787     updateStates();
2788 }
2789
2790 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2791 {
2792     m_pendingStatusChanges++;
2793 }
2794
2795 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2796 {
2797     m_cachedLikelyToKeepUp = likelyToKeepUp;
2798
2799     ASSERT(m_pendingStatusChanges);
2800     if (!--m_pendingStatusChanges)
2801         updateStates();
2802 }
2803
2804 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2805 {
2806     m_pendingStatusChanges++;
2807 }
2808
2809 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2810 {
2811     m_cachedBufferEmpty = bufferEmpty;
2812
2813     ASSERT(m_pendingStatusChanges);
2814     if (!--m_pendingStatusChanges)
2815         updateStates();
2816 }
2817
2818 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2819 {
2820     m_pendingStatusChanges++;
2821 }
2822
2823 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2824 {
2825     m_cachedBufferFull = bufferFull;
2826
2827     ASSERT(m_pendingStatusChanges);
2828     if (!--m_pendingStatusChanges)
2829         updateStates();
2830 }
2831
2832 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray>&& seekableRanges)
2833 {
2834     m_cachedSeekableRanges = WTFMove(seekableRanges);
2835
2836     seekableTimeRangesChanged();
2837     updateStates();
2838 }
2839
2840 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray>&& loadedRanges)
2841 {
2842     m_cachedLoadedRanges = WTFMove(loadedRanges);
2843
2844     loadedTimeRangesChanged();
2845     updateStates();
2846 }
2847
2848 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2849 {
2850     m_cachedIsReadyForDisplay = isReady;
2851     if (!hasVideo() && isReady)
2852         tracksChanged();
2853     updateStates();
2854 }
2855
2856 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2857 {
2858     tracksChanged();
2859     updateStates();
2860 }
2861
2862 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2863 {
2864     INFO_LOG(LOGIDENTIFIER, shouldBuffer);
2865
2866     if (m_shouldBufferData == shouldBuffer)
2867         return;
2868
2869     m_shouldBufferData = shouldBuffer;
2870     
2871     if (!m_avPlayer)
2872         return;
2873
2874     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2875     updateStates();
2876 }
2877
2878 #if ENABLE(DATACUE_VALUE)
2879
2880 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2881 {
2882     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2883     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2884     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2885     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2886     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2887
2888     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2889         return quickTimeUserData;
2890     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2891         return isoUserData;
2892     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2893         return quickTimeMetadata;
2894     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2895         return iTunesMetadata;
2896     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2897         return id3Metadata;
2898
2899     return emptyAtom();
2900 }
2901
2902 #endif
2903
2904 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(const RetainPtr<NSArray>& metadata, const MediaTime& mediaTime)
2905 {
2906     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2907
2908     INFO_LOG(LOGIDENTIFIER, "adding ", m_currentMetaData ? [m_currentMetaData.get() count] : 0, " at time ", mediaTime);
2909
2910 #if ENABLE(DATACUE_VALUE)
2911     if (seeking())
2912         return;
2913
2914     if (!m_metadataTrack)
2915         processMetadataTrack();
2916
2917     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2918         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2919         return;
2920     }
2921
2922     // Set the duration of all incomplete cues before adding new ones.
2923     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2924     for (AVMetadataItem *item in m_currentMetaData.get()) {
2925         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
2926         if (start < earliestStartTime)
2927             earliestStartTime = start;
2928     }
2929     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2930
2931     for (AVMetadataItem *item in m_currentMetaData.get()) {
2932         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
2933         MediaTime end = MediaTime::positiveInfiniteTime();
2934         if (CMTIME_IS_VALID(item.duration))
2935             end = start + PAL::toMediaTime(item.duration);
2936
2937         AtomicString type = nullAtom();
2938         if (item.keySpace)
2939             type = metadataType(item.keySpace);
2940
2941         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2942     }
2943 #endif
2944 }
2945
2946 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(const RetainPtr<NSArray>& tracks)
2947 {
2948     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2949         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2950
2951     NSArray *assetTracks = [m_avAsset tracks];
2952
2953     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2954         AVAssetTrack* assetTrack = [obj assetTrack];
2955
2956         if ([assetTracks containsObject:assetTrack])
2957             return YES;
2958
2959         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2960         if (!hasLoadedMediaSelectionGroups())
2961             return NO;
2962
2963         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2964             return NO;
2965
2966         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2967             return NO;
2968
2969         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2970             return NO;
2971
2972         return YES;
2973     }]];
2974
2975     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2976         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2977
2978     m_cachedTotalBytes = 0;
2979
2980     tracksChanged();
2981     updateStates();
2982 }
2983
2984 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2985 {
2986     m_cachedHasEnabledAudio = hasEnabledAudio;
2987
2988     tracksChanged();
2989     updateStates();
2990 }
2991
2992 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2993 {
2994     m_cachedPresentationSize = size;
2995
2996     sizeChanged();
2997     updateStates();
2998 }
2999
3000 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3001 {
3002     m_cachedDuration = duration;
3003
3004     invalidateCachedDuration();
3005 }
3006
3007 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3008 {
3009     m_cachedRate = rate;
3010
3011     updateStates();
3012     rateChanged();
3013 }
3014
3015 void MediaPlayerPrivateAVFoundationObjC::timeControlStatusDidChange(int timeControlStatus)
3016 {
3017     if (m_cachedTimeControlStatus == timeControlStatus)
3018         return;
3019
3020     if (!m_shouldObserveTimeControlStatus)
3021         return;
3022
3023     m_cachedTimeControlStatus = timeControlStatus;
3024
3025 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3026     if (!isCurrentPlaybackTargetWireless())
3027         return;
3028
3029     bool playerIsPlaying = m_cachedTimeControlStatus != AVPlayerTimeControlStatusPaused;
3030     if (playerIsPlaying != m_requestedPlaying) {
3031         m_requestedPlaying = playerIsPlaying;
3032         player()->playbackStateChanged();
3033     }
3034 #endif
3035 }
3036
3037 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3038
3039 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3040 {
3041     playbackTargetIsWirelessChanged();
3042 }
3043
3044 #endif
3045
3046 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3047 {
3048     m_cachedCanPlayFastForward = newValue;
3049 }
3050
3051 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3052 {
3053     m_cachedCanPlayFastReverse = newValue;
3054 }
3055
3056 void MediaPlayerPrivateAVFoundationObjC::setShouldDisableSleep(bool flag)
3057 {
3058 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(IOSMAC)
3059     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
3060     [m_avPlayer _setPreventsSleepDuringVideoPlayback:flag];
3061     ALLOW_DEPRECATED_DECLARATIONS_END
3062 #else
3063     UNUSED_PARAM(flag);
3064 #endif
3065 }
3066
3067 Optional<VideoPlaybackQualityMetrics> MediaPlayerPrivateAVFoundationObjC::videoPlaybackQualityMetrics()
3068 {
3069     if (![m_videoLayer respondsToSelector:@selector(videoPerformanceMetrics)])
3070         return WTF::nullopt;
3071
3072 #if PLATFORM(WATCHOS)
3073     return WTF::nullopt;
3074 #else
3075     ALLOW_NEW_API_WITHOUT_GUARDS_BEGIN
3076
3077     auto metrics = [m_videoLayer videoPerformanceMetrics];
3078     if (!metrics)
3079         return WTF::nullopt;
3080
3081     uint32_t displayCompositedFrames = 0;
3082     if ([metrics respondsToSelector:@selector(numberOfDisplayCompositedVideoFrames)])
3083         displayCompositedFrames = [metrics numberOfDisplayCompositedVideoFrames];
3084
3085     return VideoPlaybackQualityMetrics {
3086         static_cast<uint32_t>([metrics totalNumberOfVideoFrames]),
3087         static_cast<uint32_t>([metrics numberOfDroppedVideoFrames]),
3088         static_cast<uint32_t>([metrics numberOfCorruptedVideoFrames]),
3089         [metrics totalFrameDelay],
3090         displayCompositedFrames,
3091     };
3092
3093     ALLOW_NEW_API_WITHOUT_GUARDS_END
3094 #endif
3095 }
3096
3097 bool MediaPlayerPrivateAVFoundationObjC::performTaskAtMediaTime(WTF::Function<void()>&& task, MediaTime time)
3098 {
3099     if (!m_avPlayer)
3100         return false;
3101
3102     __block WTF::Function<void()> taskIn = WTFMove(task);
3103
3104     if (m_timeObserver)
3105         [m_avPlayer removeTimeObserver:m_timeObserver.get()];
3106
3107     m_timeObserver = [m_avPlayer addBoundaryTimeObserverForTimes:@[[NSValue valueWithCMTime:toCMTime(time)]] queue:dispatch_get_main_queue() usingBlock:^{
3108         taskIn();
3109     }];
3110     return true;
3111 }
3112
3113 void MediaPlayerPrivateAVFoundationObjC::setShouldObserveTimeControlStatus(bool shouldObserve)
3114 {
3115     if (shouldObserve == m_shouldObserveTimeControlStatus)
3116         return;
3117
3118     m_shouldObserveTimeControlStatus = shouldObserve;
3119     if (shouldObserve) {
3120         [m_avPlayer addObserver:m_objcObserver.get() forKeyPath:@"timeControlStatus" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
3121         timeControlStatusDidChange(m_avPlayer.get().timeControlStatus);
3122     } else {
3123 BEGIN_BLOCK_OBJC_EXCEPTIONS
3124         [m_avPlayer removeObserver:m_objcObserver.get() forKeyPath:@"timeControlStatus"];
3125 END_BLOCK_OBJC_EXCEPTIONS
3126     }
3127 }
3128
3129 NSArray* assetMetadataKeyNames()
3130 {
3131     static NSArray* keys = [[NSArray alloc] initWithObjects:
3132         @"duration",
3133         @"naturalSize",
3134         @"preferredTransform",
3135         @"preferredVolume",
3136         @"preferredRate",
3137         @"playable",
3138         @"resolvedURL",
3139         @"tracks",
3140         @"availableMediaCharacteristicsWithMediaSelectionOptions",
3141     nil];
3142     return keys;
3143 }
3144
3145 NSArray* itemKVOProperties()
3146 {
3147     static NSArray* keys = [[NSArray alloc] initWithObjects:
3148         @"presentationSize",
3149         @"status",
3150         @"asset",
3151         @"tracks",
3152         @"seekableTimeRanges",
3153         @"loadedTimeRanges",
3154         @"playbackLikelyToKeepUp",
3155         @"playbackBufferFull",
3156         @"playbackBufferEmpty",
3157         @"duration",
3158         @"hasEnabledAudio",
3159         @"timedMetadata",
3160         @"canPlayFastForward",
3161         @"canPlayFastReverse",
3162     nil];
3163     return keys;
3164 }
3165
3166 NSArray* assetTrackMetadataKeyNames()
3167 {
3168     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3169     return keys;
3170 }
3171
3172 NSArray* playerKVOProperties()
3173 {
3174     static NSArray* keys = [[NSArray alloc] initWithObjects:
3175         @"rate",
3176 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3177         @"externalPlaybackActive",
3178         @"allowsExternalPlayback",
3179 #endif
3180 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
3181         @"outputObscuredDueToInsufficientExternalProtection",
3182 #endif
3183     nil];
3184     return keys;
3185 }
3186 } // namespace WebCore
3187
3188 @implementation WebCoreAVFMovieObserver
3189
3190 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player
3191 {
3192     self = [super init];
3193     if (!self)
3194         return nil;
3195     m_player = WTFMove(player);
3196     return self;
3197 }
3198
3199 - (void)disconnect
3200 {
3201     m_player = nullptr;
3202 }
3203
3204 - (void)metadataLoaded
3205 {
3206     m_taskQueue.enqueueTask([player = m_player] {
3207         if (player)
3208             player->metadataLoaded();
3209     });
3210 }
3211
3212 - (void)didEnd:(NSNotification *)unusedNotification
3213 {
3214     UNUSED_PARAM(unusedNotification);
3215     m_taskQueue.enqueueTask([player = m_player] {
3216         if (player)
3217             player->didEnd();
3218     });
3219 }
3220
3221 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3222 {
3223     m_taskQueue.enqueueTask([player = m_player, keyPath = retainPtr(keyPath), change = retainPtr(change), object = retainPtr(object), context] {
3224         if (!player)
3225             return;
3226         id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3227         bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3228         bool shouldLogValue = !willChange;
3229
3230         if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3231             if ([keyPath isEqualToString:@"readyForDisplay"])
3232                 player->firstFrameAvailableDidChange([newValue boolValue]);
3233         }
3234
3235         if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3236             if ([keyPath isEqualToString:@"enabled"])
3237                 player->trackEnabledDidChange([newValue boolValue]);
3238         }
3239
3240         if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3241             if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3242                 player->playbackLikelyToKeepUpWillChange();
3243             else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3244                 player->playbackBufferEmptyWillChange();
3245             else if ([keyPath isEqualToString:@"playbackBufferFull"])
3246                 player->playbackBufferFullWillChange();
3247         }
3248
3249         if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3250             // A value changed for an AVPlayerItem
3251             if ([keyPath isEqualToString:@"status"])
3252                 player->playerItemStatusDidChange([newValue intValue]);
3253             else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3254                 player->playbackLikelyToKeepUpDidChange([newValue boolValue]);
3255             else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3256                 player->playbackBufferEmptyDidChange([newValue boolValue]);
3257             else if ([keyPath isEqualToString:@"playbackBufferFull"])
3258                 player->playbackBufferFullDidChange([newValue boolValue]);
3259             else if ([keyPath isEqualToString:@"asset"]) {
3260                 player->setAsset(RetainPtr<id>(newValue));
3261                 shouldLogValue = false;
3262             } else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3263                 player->loadedTimeRangesDidChange(RetainPtr<NSArray>(newValue));
3264             else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3265                 player->seekableTimeRangesDidChange(RetainPtr<NSArray>(newValue));
3266             else if ([keyPath isEqualToString:@"tracks"]) {
3267                 player->tracksDidChange(RetainPtr<NSArray>(newValue));
3268                 shouldLogValue = false;
3269             } else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3270                 player->hasEnabledAudioDidChange([newValue boolValue]);
3271             else if ([keyPath isEqualToString:@"presentationSize"])
3272                 player->presentationSizeDidChange(FloatSize([newValue sizeValue]));
3273             else if ([keyPath isEqualToString:@"duration"])
3274                 player->durationDidChange(PAL::toMediaTime([newValue CMTimeValue]));
3275             else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3276                 MediaTime now;
3277                 CMTime itemTime = [(AVPlayerItem *)object.get() currentTime];
3278                 if (CMTIME_IS_NUMERIC(itemTime))
3279                     now = std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
3280                 player->metadataDidArrive(RetainPtr<NSArray>(newValue), now);
3281                 shouldLogValue = false;
3282             } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3283                 player->canPlayFastReverseDidChange([newValue boolValue]);
3284             else if ([keyPath isEqualToString:@"canPlayFastForward"])
3285                 player->canPlayFastForwardDidChange([newValue boolValue]);
3286         }
3287
3288         if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3289             // A value changed for an AVPlayer.
3290             if ([keyPath isEqualToString:@"rate"])
3291                 player->rateDidChange([newValue doubleValue]);
3292             else if ([keyPath isEqualToString:@"timeControlStatus"])
3293                 player->timeControlStatusDidChange([newValue intValue]);
3294 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3295             else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3296                 player->playbackTargetIsWirelessDidChange();
3297 #endif
3298 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
3299             else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3300                 player->outputObscuredDueToInsufficientExternalProtectionChanged([newValue boolValue]);
3301 #endif
3302         }
3303
3304 #if !RELEASE_LOG_DISABLED
3305         if (player->logger().willLog(player->logChannel(), WTFLogLevel::Debug) && !([keyPath isEqualToString:@"loadedTimeRanges"] || [keyPath isEqualToString:@"seekableTimeRanges"])) {
3306             auto identifier = Logger::LogSiteIdentifier("MediaPlayerPrivateAVFoundation", "observeValueForKeyPath", player->logIdentifier());
3307
3308             if (shouldLogValue) {
3309                 if ([keyPath isEqualToString:@"duration"])
3310                     player->logger().debug(player->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", PAL::toMediaTime([newValue CMTimeValue]));
3311                 else {
3312                     RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3313                     player->logger().debug(player->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", [valueString.get() UTF8String]);
3314                 }
3315             } else
3316                 player->logger().debug(player->logChannel(), identifier, willChange ? "will" : "did", " change '", [keyPath UTF8String], "'");
3317         }
3318 #endif
3319     });
3320 }
3321
3322 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3323 {
3324     UNUSED_PARAM(output);
3325
3326     m_taskQueue.enqueueTask([player = m_player, strings = retainPtr(strings), nativeSamples = retainPtr(nativeSamples), itemTime] {
3327         if (!player)
3328             return;
3329         MediaTime time = std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
3330         player->processCue(strings.get(), nativeSamples.get(), time);
3331     });
3332 }
3333
3334 - (void)outputSequenceWasFlushed:(id)output
3335 {
3336     UNUSED_PARAM(output);
3337
3338     m_taskQueue.enqueueTask([player = m_player] {
3339         if (player)
3340             player->flushCues();
3341     });
3342 }
3343
3344 @end
3345
3346 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3347
3348 @implementation WebCoreAVFLoaderDelegate
3349
3350 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player
3351 {
3352     self = [super init];
3353     if (!self)
3354         return nil;
3355     m_player = WTFMove(player);
3356     return self;
3357 }
3358
3359 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3360 {
3361     UNUSED_PARAM(resourceLoader);
3362     if (!m_player)
3363         return NO;
3364
3365     m_taskQueue.enqueueTask([player = m_player, loadingRequest = retainPtr(loadingRequest)] {
3366         if (!player) {
3367             [loadingRequest finishLoadingWithError:nil];
3368             return;
3369         }
3370
3371         if (!player->shouldWaitForLoadingOfResource(loadingRequest.get()))
3372             [loadingRequest finishLoadingWithError:nil];
3373     });
3374
3375     return YES;
3376 }
3377
3378 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3379 {
3380     UNUSED_PARAM(resourceLoader);
3381     UNUSED_PARAM(challenge);
3382     ASSERT_NOT_REACHED();
3383     return NO;
3384 }
3385
3386 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3387 {
3388     UNUSED_PARAM(resourceLoader);
3389     m_taskQueue.enqueueTask([player = m_player, loadingRequest = retainPtr(loadingRequest)] {
3390         if (player)
3391             player->didCancelLoadingRequest(loadingRequest.get());
3392     });
3393 }
3394
3395 @end
3396
3397 #endif
3398
3399 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3400
3401 @implementation WebCoreAVFPullDelegate
3402
3403 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player
3404 {
3405     self = [super init];
3406     if (self)
3407         m_player = WTFMove(player);
3408     return self;
3409 }
3410
3411 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3412 {
3413     if (m_player)
3414         m_player->outputMediaDataWillChange(output);
3415 }
3416
3417 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3418 {
3419     UNUSED_PARAM(output);
3420     // No-op.
3421 }
3422
3423 @end
3424
3425 #endif
3426
3427 #endif