Unreviewed, rolling out r247292.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetMIMETypeCache.h"
32 #import "AVAssetTrackUtilities.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "ImageRotationSessionVT.h"
47 #import "InbandMetadataTextTrackPrivateAVF.h"
48 #import "InbandTextTrackPrivateAVFObjC.h"
49 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
50 #import "Logging.h"
51 #import "MediaPlaybackTargetMac.h"
52 #import "MediaPlaybackTargetMock.h"
53 #import "MediaSelectionGroupAVFObjC.h"
54 #import "OutOfBandTextTrackPrivateAVF.h"
55 #import "PixelBufferConformerCV.h"
56 #import "PlatformTimeRanges.h"
57 #import "SecurityOrigin.h"
58 #import "SerializedPlatformRepresentationMac.h"
59 #import "SharedBuffer.h"
60 #import "TextEncoding.h"
61 #import "TextTrackRepresentation.h"
62 #import "TextureCacheCV.h"
63 #import "VideoFullscreenLayerManagerObjC.h"
64 #import "VideoTextureCopierCV.h"
65 #import "VideoTrackPrivateAVFObjC.h"
66 #import "WebCoreAVFResourceLoader.h"
67 #import "WebCoreCALayerExtras.h"
68 #import "WebCoreNSURLSession.h"
69 #import <JavaScriptCore/DataView.h>
70 #import <JavaScriptCore/JSCInlines.h>
71 #import <JavaScriptCore/TypedArrayInlines.h>
72 #import <JavaScriptCore/Uint16Array.h>
73 #import <JavaScriptCore/Uint32Array.h>
74 #import <JavaScriptCore/Uint8Array.h>
75 #import <functional>
76 #import <objc/runtime.h>
77 #import <pal/avfoundation/MediaTimeAVFoundation.h>
78 #import <pal/spi/cocoa/QuartzCoreSPI.h>
79 #import <pal/spi/mac/AVFoundationSPI.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/URL.h>
85 #import <wtf/text/CString.h>
86
87 #if ENABLE(AVF_CAPTIONS)
88 #include "TextTrack.h"
89 #endif
90
91 #import <AVFoundation/AVAssetImageGenerator.h>
92 #import <AVFoundation/AVAssetTrack.h>
93 #import <AVFoundation/AVMediaSelectionGroup.h>
94 #import <AVFoundation/AVMetadataItem.h>
95 #import <AVFoundation/AVPlayer.h>
96 #import <AVFoundation/AVPlayerItem.h>
97 #import <AVFoundation/AVPlayerItemOutput.h>
98 #import <AVFoundation/AVPlayerItemTrack.h>
99 #import <AVFoundation/AVPlayerLayer.h>
100 #import <AVFoundation/AVTime.h>
101
102 #if PLATFORM(IOS_FAMILY)
103 #import "WAKAppKitStubs.h"
104 #import <CoreImage/CoreImage.h>
105 #import <UIKit/UIDevice.h>
106 #import <mach/mach_port.h>
107 #import <pal/ios/UIKitSoftLink.h>
108 #else
109 #import <Foundation/NSGeometry.h>
110 #import <QuartzCore/CoreImage.h>
111 #endif
112
113 #if USE(VIDEOTOOLBOX)
114 #import <CoreVideo/CoreVideo.h>
115 #import <VideoToolbox/VideoToolbox.h>
116 #endif
117
118 #import "CoreVideoSoftLink.h"
119 #import "MediaRemoteSoftLink.h"
120
121 namespace std {
122 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
123     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
124 };
125 }
126
127 #if ENABLE(AVF_CAPTIONS)
128 // Note: This must be defined before our SOFT_LINK macros:
129 @class AVMediaSelectionOption;
130 @interface AVMediaSelectionOption (OutOfBandExtensions)
131 @property (nonatomic, readonly) NSString* outOfBandSource;
132 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
133 @end
134 #endif
135
136 @interface AVURLAsset (WebKitExtensions)
137 @property (nonatomic, readonly) NSURL *resolvedURL;
138 @end
139
140 #import <pal/cf/CoreMediaSoftLink.h>
141 #import <pal/cocoa/AVFoundationSoftLink.h>
142
143 SOFT_LINK_FRAMEWORK(MediaToolbox)
144 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
145
146 #if PLATFORM(IOS_FAMILY)
147
148 #if HAVE(CELESTIAL)
149 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
150 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
151 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
152 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
153 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
154 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
155 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
156 #endif // HAVE(CELESTIAL)
157
158 #endif // PLATFORM(IOS_FAMILY)
159
160 using namespace WebCore;
161
162 enum MediaPlayerAVFoundationObservationContext {
163     MediaPlayerAVFoundationObservationContextPlayerItem,
164     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
165     MediaPlayerAVFoundationObservationContextPlayer,
166     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
167 };
168
169 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
170 {
171     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
172     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
173     int m_delayCallbacks;
174 }
175 -(id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)callback;
176 -(void)disconnect;
177 -(void)metadataLoaded;
178 -(void)didEnd:(NSNotification *)notification;
179 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
180 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
181 - (void)outputSequenceWasFlushed:(id)output;
182 @end
183
184 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
185 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
186     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
187     GenericTaskQueue<Timer, std::atomic<unsigned>> m_taskQueue;
188 }
189 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
190 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
191 @end
192 #endif
193
194 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
195 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
196     WeakPtr<MediaPlayerPrivateAVFoundationObjC> m_player;
197 }
198 - (id)initWithPlayer:(WeakPtr<MediaPlayerPrivateAVFoundationObjC>&&)player;
199 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
200 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
201 @end
202 #endif
203
204 namespace WebCore {
205 static String convertEnumerationToString(AVPlayerTimeControlStatus enumerationValue)
206 {
207     static const NeverDestroyed<String> values[] = {
208         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusPaused"),
209         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate"),
210         MAKE_STATIC_STRING_IMPL("AVPlayerTimeControlStatusPlaying"),
211     };
212     static_assert(!static_cast<size_t>(AVPlayerTimeControlStatusPaused), "AVPlayerTimeControlStatusPaused is not 0 as expected");
213     static_assert(static_cast<size_t>(AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) == 1, "AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate is not 1 as expected");
214     static_assert(static_cast<size_t>(AVPlayerTimeControlStatusPlaying) == 2, "AVPlayerTimeControlStatusPlaying is not 2 as expected");
215     ASSERT(static_cast<size_t>(enumerationValue) < WTF_ARRAY_LENGTH(values));
216     return values[static_cast<size_t>(enumerationValue)];
217 }
218 }
219
220 namespace WTF {
221 template<typename Type>
222 struct LogArgument;
223
224 template <>
225 struct LogArgument<AVPlayerTimeControlStatus> {
226     static String toString(const AVPlayerTimeControlStatus status)
227     {
228         return convertEnumerationToString(status);
229     }
230 };
231 }; // namespace WTF
232
233 namespace WebCore {
234 using namespace PAL;
235
236 static NSArray *assetMetadataKeyNames();
237 static NSArray *itemKVOProperties();
238 static NSArray *assetTrackMetadataKeyNames();
239 static NSArray *playerKVOProperties();
240 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
241
242 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
243 static dispatch_queue_t globalLoaderDelegateQueue()
244 {
245     static dispatch_queue_t globalQueue;
246     static dispatch_once_t onceToken;
247     dispatch_once(&onceToken, ^{
248         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
249     });
250     return globalQueue;
251 }
252 #endif
253
254 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
255 static dispatch_queue_t globalPullDelegateQueue()
256 {
257     static dispatch_queue_t globalQueue;
258     static dispatch_once_t onceToken;
259     dispatch_once(&onceToken, ^{
260         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
261     });
262     return globalQueue;
263 }
264 #endif
265
266 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
267 {
268     if (!isAvailable())
269         return;
270
271     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
272             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
273     ASSERT(AVAssetMIMETypeCache::singleton().isAvailable());
274 }
275
276 static AVAssetCache *assetCacheForPath(const String& path)
277 {
278     NSURL *assetCacheURL;
279     
280     if (path.isEmpty())
281         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
282     else
283         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
284
285     return [PAL::getAVAssetCacheClass() assetCacheWithURL:assetCacheURL];
286 }
287
288 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
289 {
290     HashSet<RefPtr<SecurityOrigin>> origins;
291     for (NSString *key in [assetCacheForPath(path) allKeys]) {
292         URL keyAsURL = URL(URL(), key);
293         if (keyAsURL.isValid())
294             origins.add(SecurityOrigin::create(keyAsURL));
295     }
296     return origins;
297 }
298
299 static WallTime toSystemClockTime(NSDate *date)
300 {
301     ASSERT(date);
302     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
303 }
304
305 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
306 {
307     AVAssetCache* assetCache = assetCacheForPath(path);
308     
309     for (NSString *key in [assetCache allKeys]) {
310         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
311             [assetCache removeEntryForKey:key];
312     }
313
314     NSFileManager *fileManager = [NSFileManager defaultManager];
315     NSURL *baseURL = [assetCache URL];
316
317     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
318         [fileManager removeItemAtURL:baseURL error:nil];
319         return;
320     }
321     
322     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
323     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
324         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
325         errorHandler:nil];
326     
327     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
328     for (NSURL *fileURL : enumerator) {
329         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
330     
331         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
332             continue;
333         
334         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
335             continue;
336         
337         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
338             continue;
339         
340         [urlsToDelete addObject:fileURL];
341     }
342     
343     for (NSURL *fileURL in urlsToDelete.get())
344         [fileManager removeItemAtURL:fileURL error:nil];
345 }
346
347 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
348 {
349     AVAssetCache* assetCache = assetCacheForPath(path);
350     for (NSString *key in [assetCache allKeys]) {
351         URL keyAsURL = URL(URL(), key);
352         if (keyAsURL.isValid()) {
353             if (origins.contains(SecurityOrigin::create(keyAsURL)))
354                 [assetCache removeEntryForKey:key];
355         }
356     }
357 }
358
359 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
360     : MediaPlayerPrivateAVFoundation(player)
361     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
362     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
363     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithPlayer:makeWeakPtr(*this)]))
364     , m_videoFrameHasDrawn(false)
365     , m_haveCheckedPlayability(false)
366 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
367     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithPlayer:makeWeakPtr(*this)]))
368 #endif
369 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
370     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithPlayer:makeWeakPtr(*this)]))
371 #endif
372     , m_currentTextTrack(0)
373     , m_cachedRate(0)
374     , m_cachedTotalBytes(0)
375     , m_pendingStatusChanges(0)
376     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
377     , m_cachedLikelyToKeepUp(false)
378     , m_cachedBufferEmpty(false)
379     , m_cachedBufferFull(false)
380     , m_cachedHasEnabledAudio(false)
381     , m_cachedIsReadyForDisplay(false)
382     , m_haveBeenAskedToCreateLayer(false)
383 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
384     , m_allowsWirelessVideoPlayback(true)
385 #endif
386 {
387 }
388
389 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
390 {
391     weakPtrFactory().revokeAll();
392
393 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
394     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
395
396     for (auto& pair : m_resourceLoaderMap)
397         pair.value->invalidate();
398 #endif
399 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
400     [m_videoOutput setDelegate:nil queue:0];
401 #endif
402
403     if (m_videoLayer)
404         destroyVideoLayer();
405
406     cancelLoad();
407 }
408
409 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
410 {
411     ALWAYS_LOG(LOGIDENTIFIER);
412     tearDownVideoRendering();
413
414     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
415     [m_objcObserver.get() disconnect];
416
417     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
418     setIgnoreLoadStateChanges(true);
419     if (m_avAsset) {
420         [m_avAsset.get() cancelLoading];
421         m_avAsset = nil;
422     }
423
424     clearTextTracks();
425
426     if (m_legibleOutput) {
427         if (m_avPlayerItem)
428             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
429         m_legibleOutput = nil;
430     }
431
432     if (m_avPlayerItem) {
433         for (NSString *keyName in itemKVOProperties())
434             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
435         
436         m_avPlayerItem = nil;
437     }
438     if (m_avPlayer) {
439         if (m_timeObserver)
440             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
441         m_timeObserver = nil;
442
443         for (NSString *keyName in playerKVOProperties())
444             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
445
446         setShouldObserveTimeControlStatus(false);
447
448         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
449 #if !PLATFORM(IOS_FAMILY)
450         [m_avPlayer setOutputContext:nil];
451 #endif
452         m_avPlayer = nil;
453     }
454
455     // Reset cached properties
456     m_pendingStatusChanges = 0;
457     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
458     m_cachedSeekableRanges = nullptr;
459     m_cachedLoadedRanges = nullptr;
460     m_cachedHasEnabledAudio = false;
461     m_cachedPresentationSize = FloatSize();
462     m_cachedDuration = MediaTime::zeroTime();
463
464     for (AVPlayerItemTrack *track in m_cachedTracks.get())
465         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
466     m_cachedTracks = nullptr;
467
468 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
469     if (m_provider) {
470         m_provider->setPlayerItem(nullptr);
471         m_provider->setAudioTrack(nullptr);
472     }
473 #endif
474
475     setIgnoreLoadStateChanges(false);
476 }
477
478 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
479 {
480     return m_haveBeenAskedToCreateLayer;
481 }
482
483 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
484 {
485 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
486     if (m_videoOutput)
487         return true;
488 #endif
489     return m_imageGenerator;
490 }
491
492 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
493 {
494 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
495     createVideoOutput();
496 #else
497     createImageGenerator();
498 #endif
499 }
500
501 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
502 {
503     using namespace PAL;
504     INFO_LOG(LOGIDENTIFIER);
505
506     if (!m_avAsset || m_imageGenerator)
507         return;
508
509     m_imageGenerator = [PAL::getAVAssetImageGeneratorClass() assetImageGeneratorWithAsset:m_avAsset.get()];
510
511     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
512     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
513     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
514     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
515 }
516
517 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
518 {
519 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
520     destroyVideoOutput();
521 #endif
522     destroyImageGenerator();
523 }
524
525 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
526 {
527     if (!m_imageGenerator)
528         return;
529
530     INFO_LOG(LOGIDENTIFIER);
531
532     m_imageGenerator = 0;
533 }
534
535 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
536 {
537     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
538         return;
539
540     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
541         if (!weakThis)
542             return;
543
544         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
545             return;
546         m_haveBeenAskedToCreateLayer = true;
547
548         if (!m_videoLayer)
549             createAVPlayerLayer();
550
551 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
552         if (!m_videoOutput)
553             createVideoOutput();
554 #endif
555
556         player()->client().mediaPlayerRenderingModeChanged(player());
557     });
558 }
559
560 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
561 {
562     if (!m_avPlayer)
563         return;
564
565     m_videoLayer = adoptNS([PAL::allocAVPlayerLayerInstance() init]);
566     [m_videoLayer setPlayer:m_avPlayer.get()];
567
568 #ifndef NDEBUG
569     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
570 #endif
571     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
572     updateVideoLayerGravity();
573     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
574     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
575     ALWAYS_LOG(LOGIDENTIFIER);
576
577     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
578
579 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
580     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
581         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
582 #endif
583 }
584
585 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
586 {
587     if (!m_videoLayer)
588         return;
589
590     ALWAYS_LOG(LOGIDENTIFIER);
591
592     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
593     [m_videoLayer setPlayer:nil];
594     m_videoFullscreenLayerManager->didDestroyVideoLayer();
595
596     m_videoLayer = nil;
597 }
598
599 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
600 {
601     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
602     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
603
604     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
605     if (!date)
606         return MediaTime::invalidTime();
607
608     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
609
610     // Rounding due to second offset error when subtracting.
611     return MediaTime::createWithDouble(round(date - currentTime));
612 }
613
614 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
615 {
616     if (currentRenderingMode() == MediaRenderingToLayer)
617         return m_cachedIsReadyForDisplay;
618
619 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
620     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
621         return true;
622 #endif
623
624     return m_videoFrameHasDrawn;
625 }
626
627 #if ENABLE(AVF_CAPTIONS)
628 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
629 {
630     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
631     if (manualSelectionMode)
632         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
633
634     // FIXME: Match these to correct types:
635     if (kind == PlatformTextTrack::Caption)
636         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
637
638     if (kind == PlatformTextTrack::Subtitle)
639         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
640
641     if (kind == PlatformTextTrack::Description)
642         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
643
644     if (kind == PlatformTextTrack::Forced)
645         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
646
647     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
648 }
649     
650 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
651 {
652     trackModeChanged();
653 }
654     
655 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
656 {
657     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
658     
659     for (auto& textTrack : m_textTracks) {
660         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
661             continue;
662         
663         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
664         RetainPtr<AVMediaSelectionOption> currentOption = trackPrivate->mediaSelectionOption();
665         
666         for (auto& track : outOfBandTrackSources) {
667             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
668             
669             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
670                 continue;
671             
672             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
673             if (track->mode() == PlatformTextTrack::Hidden)
674                 mode = InbandTextTrackPrivate::Hidden;
675             else if (track->mode() == PlatformTextTrack::Disabled)
676                 mode = InbandTextTrackPrivate::Disabled;
677             else if (track->mode() == PlatformTextTrack::Showing)
678                 mode = InbandTextTrackPrivate::Showing;
679             
680             textTrack->setMode(mode);
681             break;
682         }
683     }
684 }
685 #endif
686
687
688 static NSURL *canonicalURL(const URL& url)
689 {
690     NSURL *cocoaURL = url;
691     if (url.isEmpty())
692         return cocoaURL;
693
694     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
695     if (!request)
696         return cocoaURL;
697
698     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
699     if (!canonicalRequest)
700         return cocoaURL;
701
702     return [canonicalRequest URL];
703 }
704
705 #if PLATFORM(IOS_FAMILY)
706 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
707 {
708     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
709     [properties setDictionary:@{
710         NSHTTPCookieName: cookie.name,
711         NSHTTPCookieValue: cookie.value,
712         NSHTTPCookieDomain: cookie.domain,
713         NSHTTPCookiePath: cookie.path,
714         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
715     }];
716     if (cookie.secure)
717         [properties setObject:@YES forKey:NSHTTPCookieSecure];
718     if (cookie.session)
719         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
720
721     return [NSHTTPCookie cookieWithProperties:properties.get()];
722 }
723 #endif
724
725 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
726 {
727     if (m_avAsset)
728         return;
729
730     ALWAYS_LOG(LOGIDENTIFIER);
731
732     setDelayCallbacks(true);
733
734     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
735
736     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
737
738     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
739
740     String referrer = player()->referrer();
741     if (!referrer.isEmpty())
742         [headerFields.get() setObject:referrer forKey:@"Referer"];
743
744     String userAgent = player()->userAgent();
745     if (!userAgent.isEmpty())
746         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
747
748     if ([headerFields.get() count])
749         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
750
751     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
752         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
753
754     if (PAL::canLoad_AVFoundation_AVURLAssetUseClientURLLoadingExclusively())
755         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
756 #if PLATFORM(IOS_FAMILY)
757     else if (PAL::canLoad_AVFoundation_AVURLAssetRequiresCustomURLLoadingKey())
758         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
759     // FIXME: rdar://problem/20354688
760     String identifier = player()->sourceApplicationIdentifier();
761     if (!identifier.isEmpty())
762         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
763 #endif
764
765     auto type = player()->contentMIMEType();
766     if (PAL::canLoad_AVFoundation_AVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
767         auto codecs = player()->contentTypeCodecs();
768         if (!codecs.isEmpty()) {
769             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
770             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
771         } else
772             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
773     }
774
775 #if ENABLE(AVF_CAPTIONS)
776     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
777     if (!outOfBandTrackSources.isEmpty()) {
778         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
779         for (auto& trackSource : outOfBandTrackSources) {
780             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
781             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
782             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
783             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
784             [outOfBandTracks.get() addObject:@{
785                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
786                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
787                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
788                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
789                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
790                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
791             }];
792         }
793
794         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
795     }
796 #endif
797
798 #if PLATFORM(IOS_FAMILY)
799     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
800     if (!networkInterfaceName.isEmpty())
801         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
802 #endif
803
804 #if PLATFORM(IOS_FAMILY)
805     Vector<Cookie> cookies;
806     if (player()->getRawCookies(url, cookies)) {
807         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
808         for (auto& cookie : cookies)
809             [nsCookies addObject:toNSHTTPCookie(cookie)];
810
811         if (PAL::canLoad_AVFoundation_AVURLAssetHTTPCookiesKey())
812             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
813     }
814 #endif
815
816     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
817     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
818     
819     if (usePersistentCache)
820         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
821
822     NSURL *cocoaURL = canonicalURL(url);
823     m_avAsset = adoptNS([PAL::allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
824
825 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
826     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
827     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
828
829     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
830         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
831         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
832         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
833         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
834         if (mediaResourceLoader)
835             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
836     }
837
838 #endif
839
840     m_haveCheckedPlayability = false;
841
842     setDelayCallbacks(false);
843 }
844
845 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItem *item)
846 {
847     if (!m_avPlayer)
848         return;
849
850     if (pthread_main_np()) {
851         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
852         return;
853     }
854
855     RetainPtr<AVPlayer> strongPlayer = m_avPlayer.get();
856     RetainPtr<AVPlayerItem> strongItem = item;
857     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
858         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
859     });
860 }
861
862 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
863 {
864     if (m_avPlayer)
865         return;
866
867     ALWAYS_LOG(LOGIDENTIFIER);
868
869     setDelayCallbacks(true);
870
871     m_avPlayer = adoptNS([PAL::allocAVPlayerInstance() init]);
872     for (NSString *keyName in playerKVOProperties())
873         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
874
875     setShouldObserveTimeControlStatus(true);
876
877     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
878
879 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
880     updateDisableExternalPlayback();
881     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
882 #endif
883
884 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS_FAMILY)
885     if (m_shouldPlayToPlaybackTarget) {
886         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
887         m_shouldPlayToPlaybackTarget = false;
888         setShouldPlayToPlaybackTarget(true);
889     }
890 #endif
891
892 #if PLATFORM(IOS_FAMILY) && !PLATFORM(IOS_FAMILY_SIMULATOR) && !PLATFORM(MACCATALYST)
893     setShouldDisableSleep(player()->shouldDisableSleep());
894 #endif
895
896     if (m_muted) {
897         // Clear m_muted so setMuted doesn't return without doing anything.
898         m_muted = false;
899         [m_avPlayer.get() setMuted:m_muted];
900     }
901
902     if (player()->client().mediaPlayerIsVideo())
903         createAVPlayerLayer();
904
905     if (m_avPlayerItem)
906         setAVPlayerItem(m_avPlayerItem.get());
907
908     setDelayCallbacks(false);
909 }
910
911 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
912 {
913     if (m_avPlayerItem)
914         return;
915
916     ALWAYS_LOG(LOGIDENTIFIER);
917
918     setDelayCallbacks(true);
919
920     // Create the player item so we can load media data. 
921     m_avPlayerItem = adoptNS([PAL::allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
922
923     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
924
925     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
926     for (NSString *keyName in itemKVOProperties())
927         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
928
929     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
930
931     if (m_avPlayer)
932         setAVPlayerItem(m_avPlayerItem.get());
933
934     const NSTimeInterval legibleOutputAdvanceInterval = 2;
935
936     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
937     m_legibleOutput = adoptNS([PAL::allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
938     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
939
940     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
941     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
942     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
943     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
944
945 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
946     if (m_provider) {
947         m_provider->setPlayerItem(m_avPlayerItem.get());
948         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
949     }
950 #endif
951
952 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
953     createVideoOutput();
954 #endif
955
956     setDelayCallbacks(false);
957 }
958
959 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
960 {
961     if (m_haveCheckedPlayability)
962         return;
963     m_haveCheckedPlayability = true;
964
965     INFO_LOG(LOGIDENTIFIER);
966     auto weakThis = makeWeakPtr(*this);
967
968     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
969         callOnMainThread([weakThis] {
970             if (weakThis)
971                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
972         });
973     }];
974 }
975
976 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
977 {
978     INFO_LOG(LOGIDENTIFIER);
979
980     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
981     dispatch_group_enter(metadataLoadingGroup.get());
982     auto weakThis = makeWeakPtr(*this);
983     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
984
985         callOnMainThread([weakThis, metadataLoadingGroup] {
986             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
987                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
988                     dispatch_group_enter(metadataLoadingGroup.get());
989                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
990                         dispatch_group_leave(metadataLoadingGroup.get());
991                     }];
992                 }
993             }
994             dispatch_group_leave(metadataLoadingGroup.get());
995         });
996     }];
997
998     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
999         callOnMainThread([weakThis] {
1000             if (weakThis)
1001                 [weakThis->m_objcObserver.get() metadataLoaded];
1002         });
1003     });
1004 }
1005
1006 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1007 {
1008     if (!m_avPlayerItem)
1009         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1010
1011     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1012         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1013     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1014         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1015     if (m_cachedLikelyToKeepUp)
1016         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1017     if (m_cachedBufferFull)
1018         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1019     if (m_cachedBufferEmpty)
1020         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1021
1022     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1023 }
1024
1025 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1026 {
1027     return m_videoFullscreenLayerManager->videoInlineLayer();
1028 }
1029
1030 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1031 {
1032 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1033     updateLastImage(UpdateType::UpdateSynchronously);
1034     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1035 #endif
1036 }
1037
1038 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1039 {
1040 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1041     updateLastImage(UpdateType::UpdateSynchronously);
1042     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1043 #else
1044     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1045 #endif
1046     updateDisableExternalPlayback();
1047 }
1048
1049 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1050 {
1051     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1052 }
1053
1054 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1055 {
1056     m_videoFullscreenGravity = gravity;
1057
1058     if (!m_videoLayer)
1059         return;
1060
1061     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1062     if (gravity == MediaPlayer::VideoGravityResize)
1063         videoGravity = AVLayerVideoGravityResize;
1064     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1065         videoGravity = AVLayerVideoGravityResizeAspect;
1066     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1067         videoGravity = AVLayerVideoGravityResizeAspectFill;
1068     else
1069         ASSERT_NOT_REACHED();
1070     
1071     if ([m_videoLayer videoGravity] == videoGravity)
1072         return;
1073
1074     [m_videoLayer setVideoGravity:videoGravity];
1075     syncTextTrackBounds();
1076 }
1077
1078 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1079 {
1080 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1081     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1082         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1083     updateDisableExternalPlayback();
1084 #else
1085     UNUSED_PARAM(mode);
1086 #endif
1087 }
1088     
1089 void MediaPlayerPrivateAVFoundationObjC::videoFullscreenStandbyChanged()
1090 {
1091 #if PLATFORM(IOS_FAMILY) && !PLATFORM(WATCHOS)
1092     updateDisableExternalPlayback();
1093 #endif
1094 }
1095
1096 #if PLATFORM(IOS_FAMILY)
1097 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1098 {
1099     if (m_currentMetaData)
1100         return m_currentMetaData.get();
1101     return nil;
1102 }
1103
1104 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1105 {
1106     if (!m_avPlayerItem)
1107         return emptyString();
1108     
1109     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1110     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1111
1112     return logString.get();
1113 }
1114
1115 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1116 {
1117     if (!m_avPlayerItem)
1118         return emptyString();
1119
1120     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1121     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1122
1123     return logString.get();
1124 }
1125 #endif
1126
1127 void MediaPlayerPrivateAVFoundationObjC::didEnd()
1128 {
1129     m_requestedPlaying = false;
1130     MediaPlayerPrivateAVFoundation::didEnd();
1131 }
1132
1133 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1134 {
1135     [CATransaction begin];
1136     [CATransaction setDisableActions:YES];    
1137     if (m_videoLayer)
1138         [m_videoLayer.get() setHidden:!isVisible];
1139     [CATransaction commit];
1140 }
1141     
1142 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1143 {
1144     if (!metaDataAvailable())
1145         return;
1146
1147     ALWAYS_LOG(LOGIDENTIFIER);
1148
1149     m_requestedPlaying = true;
1150     setPlayerRate(m_requestedRate);
1151 }
1152
1153 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1154 {
1155     if (!metaDataAvailable())
1156         return;
1157
1158     ALWAYS_LOG(LOGIDENTIFIER);
1159
1160     m_requestedPlaying = false;
1161     setPlayerRate(0);
1162 }
1163
1164 bool MediaPlayerPrivateAVFoundationObjC::platformPaused() const
1165 {
1166     return m_cachedTimeControlStatus == AVPlayerTimeControlStatusPaused;
1167 }
1168
1169 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1170 {
1171     // Do not ask the asset for duration before it has been loaded or it will fetch the
1172     // answer synchronously.
1173     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1174         return MediaTime::invalidTime();
1175     
1176     CMTime cmDuration;
1177     
1178     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1179     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1180         cmDuration = [m_avPlayerItem.get() duration];
1181     else
1182         cmDuration = [m_avAsset.get() duration];
1183
1184     if (CMTIME_IS_NUMERIC(cmDuration))
1185         return PAL::toMediaTime(cmDuration);
1186
1187     if (CMTIME_IS_INDEFINITE(cmDuration))
1188         return MediaTime::positiveInfiniteTime();
1189
1190     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1191     return MediaTime::invalidTime();
1192 }
1193
1194 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1195 {
1196     if (!metaDataAvailable() || !m_avPlayerItem)
1197         return MediaTime::zeroTime();
1198
1199     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1200     if (CMTIME_IS_NUMERIC(itemTime))
1201         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1202
1203     return MediaTime::zeroTime();
1204 }
1205
1206 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1207 {
1208     // setCurrentTime generates several event callbacks, update afterwards.
1209     setDelayCallbacks(true);
1210
1211     if (m_metadataTrack)
1212         m_metadataTrack->flushPartialCues();
1213
1214     CMTime cmTime = PAL::toCMTime(time);
1215     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1216     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1217
1218     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1219     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1220         cmBefore = kCMTimeZero;
1221     
1222     auto weakThis = makeWeakPtr(*this);
1223
1224     setShouldObserveTimeControlStatus(false);
1225     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1226         callOnMainThread([weakThis, finished] {
1227             auto _this = weakThis.get();
1228             if (!_this)
1229                 return;
1230
1231             _this->setShouldObserveTimeControlStatus(true);
1232             _this->seekCompleted(finished);
1233         });
1234     }];
1235
1236     setDelayCallbacks(false);
1237 }
1238
1239 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1240 {
1241 #if PLATFORM(IOS_FAMILY)
1242     UNUSED_PARAM(volume);
1243     return;
1244 #else
1245
1246     if (!m_avPlayer)
1247         return;
1248
1249     ALWAYS_LOG(LOGIDENTIFIER, volume);
1250
1251     [m_avPlayer.get() setVolume:volume];
1252 #endif
1253 }
1254
1255 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1256 {
1257     if (m_muted == muted)
1258         return;
1259
1260     ALWAYS_LOG(LOGIDENTIFIER, muted);
1261
1262     m_muted = muted;
1263
1264     if (!m_avPlayer)
1265         return;
1266
1267     [m_avPlayer.get() setMuted:m_muted];
1268 }
1269
1270 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1271 {
1272     UNUSED_PARAM(closedCaptionsVisible);
1273
1274     if (!metaDataAvailable())
1275         return;
1276
1277     ALWAYS_LOG(LOGIDENTIFIER, closedCaptionsVisible);
1278 }
1279
1280 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1281 {
1282     m_requestedRate = rate;
1283     if (m_requestedPlaying)
1284         setPlayerRate(rate);
1285 }
1286
1287 void MediaPlayerPrivateAVFoundationObjC::setPlayerRate(double rate)
1288 {
1289     setDelayCallbacks(true);
1290     m_cachedRate = rate;
1291     setShouldObserveTimeControlStatus(false);
1292     [m_avPlayer setRate:rate];
1293     m_cachedTimeControlStatus = [m_avPlayer timeControlStatus];
1294     setShouldObserveTimeControlStatus(true);
1295     setDelayCallbacks(false);
1296 }
1297
1298 double MediaPlayerPrivateAVFoundationObjC::rate() const
1299 {
1300     if (!metaDataAvailable())
1301         return 0;
1302
1303     return m_cachedRate;
1304 }
1305
1306 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1307 {
1308 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1309     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1310 #else
1311     return 0;
1312 #endif
1313 }
1314
1315 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1316 {
1317 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS_FAMILY) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1318     return [m_avPlayerItem liveUpdateInterval];
1319 #else
1320     return 0;
1321 #endif
1322 }
1323
1324 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1325 {
1326     if (m_avPlayerItem)
1327         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1328 }
1329
1330 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1331 {
1332     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1333
1334     if (!m_avPlayerItem)
1335         return timeRanges;
1336
1337     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1338         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1339         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1340             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1341     }
1342     return timeRanges;
1343 }
1344
1345 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1346 {
1347     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1348         return MediaTime::zeroTime();
1349
1350     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1351     bool hasValidRange = false;
1352     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1353         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1354         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1355             continue;
1356
1357         hasValidRange = true;
1358         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1359         if (minTimeSeekable > startOfRange)
1360             minTimeSeekable = startOfRange;
1361     }
1362     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1363 }
1364
1365 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1366 {
1367     if (!m_cachedSeekableRanges)
1368         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1369
1370     MediaTime maxTimeSeekable;
1371     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1372         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1373         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1374             continue;
1375         
1376         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1377         if (maxTimeSeekable < endOfRange)
1378             maxTimeSeekable = endOfRange;
1379     }
1380     return maxTimeSeekable;
1381 }
1382
1383 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1384 {
1385     if (!m_cachedLoadedRanges)
1386         return MediaTime::zeroTime();
1387
1388     MediaTime maxTimeLoaded;
1389     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1390         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1391         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1392             continue;
1393         
1394         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1395         if (maxTimeLoaded < endOfRange)
1396             maxTimeLoaded = endOfRange;
1397     }
1398
1399     return maxTimeLoaded;   
1400 }
1401
1402 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1403 {
1404     if (!metaDataAvailable())
1405         return 0;
1406
1407     if (m_cachedTotalBytes)
1408         return m_cachedTotalBytes;
1409
1410     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1411         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1412
1413     return m_cachedTotalBytes;
1414 }
1415
1416 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id>&& asset)
1417 {
1418     m_avAsset = WTFMove(asset);
1419 }
1420
1421 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1422 {
1423     if (!m_avAsset)
1424         return MediaPlayerAVAssetStatusDoesNotExist;
1425
1426     for (NSString *keyName in assetMetadataKeyNames()) {
1427         NSError *error = nil;
1428         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1429
1430         if (error)
1431             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1432
1433         if (keyStatus < AVKeyValueStatusLoaded)
1434             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1435         
1436         if (keyStatus == AVKeyValueStatusFailed)
1437             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1438
1439         if (keyStatus == AVKeyValueStatusCancelled)
1440             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1441     }
1442
1443     if (!player()->shouldCheckHardwareSupport())
1444         m_tracksArePlayable = true;
1445
1446     if (!m_tracksArePlayable) {
1447         m_tracksArePlayable = true;
1448         for (AVAssetTrack *track in [m_avAsset tracks]) {
1449             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1450                 m_tracksArePlayable = false;
1451                 break;
1452             }
1453         }
1454     }
1455
1456     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1457         return MediaPlayerAVAssetStatusPlayable;
1458
1459     return MediaPlayerAVAssetStatusLoaded;
1460 }
1461
1462 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1463 {
1464     if (!m_avAsset)
1465         return 0;
1466
1467     NSError *error = nil;
1468     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1469     return [error code];
1470 }
1471
1472 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1473 {
1474     if (!metaDataAvailable() || context.paintingDisabled())
1475         return;
1476
1477     setDelayCallbacks(true);
1478     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1479
1480 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1481     if (videoOutputHasAvailableFrame())
1482         paintWithVideoOutput(context, rect);
1483     else
1484 #endif
1485         paintWithImageGenerator(context, rect);
1486
1487     END_BLOCK_OBJC_EXCEPTIONS;
1488     setDelayCallbacks(false);
1489
1490     m_videoFrameHasDrawn = true;
1491 }
1492
1493 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1494 {
1495     if (!metaDataAvailable() || context.paintingDisabled())
1496         return;
1497
1498     // We can ignore the request if we are already rendering to a layer.
1499     if (currentRenderingMode() == MediaRenderingToLayer)
1500         return;
1501
1502     // paint() is best effort, so only paint if we already have an image generator or video output available.
1503     if (!hasContextRenderer())
1504         return;
1505
1506     paintCurrentFrameInContext(context, rect);
1507 }
1508
1509 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1510 {
1511     INFO_LOG(LOGIDENTIFIER);
1512
1513     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1514     if (image) {
1515         GraphicsContextStateSaver stateSaver(context);
1516         context.translate(rect.x(), rect.y() + rect.height());
1517         context.scale(FloatSize(1.0f, -1.0f));
1518         context.setImageInterpolationQuality(InterpolationLow);
1519         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1520         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1521     }
1522 }
1523
1524 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1525 {
1526     if (!m_imageGenerator)
1527         createImageGenerator();
1528     ASSERT(m_imageGenerator);
1529
1530 #if !RELEASE_LOG_DISABLED
1531     MonotonicTime start = MonotonicTime::now();
1532 #endif
1533
1534     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1535     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1536     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1537
1538 #if !RELEASE_LOG_DISABLED
1539     INFO_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1540 #endif
1541
1542     return image;
1543 }
1544
1545 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1546 {
1547     supportedTypes = AVAssetMIMETypeCache::singleton().types();
1548 }
1549
1550 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1551 static bool keySystemIsSupported(const String& keySystem)
1552 {
1553     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1554         return true;
1555     return false;
1556 }
1557 #endif
1558
1559 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1560 {
1561 #if ENABLE(MEDIA_SOURCE)
1562     if (parameters.isMediaSource)
1563         return MediaPlayer::IsNotSupported;
1564 #endif
1565 #if ENABLE(MEDIA_STREAM)
1566     if (parameters.isMediaStream)
1567         return MediaPlayer::IsNotSupported;
1568 #endif
1569
1570     auto containerType = parameters.type.containerType();
1571     if (isUnsupportedMIMEType(containerType))
1572         return MediaPlayer::IsNotSupported;
1573
1574     if (!staticMIMETypeList().contains(containerType) && !AVAssetMIMETypeCache::singleton().canDecodeType(containerType))
1575         return MediaPlayer::IsNotSupported;
1576
1577     // The spec says:
1578     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1579     if (parameters.type.codecs().isEmpty())
1580         return MediaPlayer::MayBeSupported;
1581
1582     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1583         return MediaPlayer::IsNotSupported;
1584
1585     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1586     return [PAL::getAVURLAssetClass() isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1587 }
1588
1589 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1590 {
1591 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1592     if (!keySystem.isEmpty()) {
1593         // "Clear Key" is only supported with HLS:
1594         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1595             return MediaPlayer::IsNotSupported;
1596
1597         if (!keySystemIsSupported(keySystem))
1598             return false;
1599
1600         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1601             return false;
1602
1603         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVAssetMIMETypeCache::singleton().canDecodeType(mimeType))
1604             return false;
1605
1606         return true;
1607     }
1608 #else
1609     UNUSED_PARAM(keySystem);
1610     UNUSED_PARAM(mimeType);
1611 #endif
1612     return false;
1613 }
1614
1615 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1616 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1617 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1618 {
1619     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1620         [infoRequest setContentLength:keyData->byteLength()];
1621         [infoRequest setByteRangeAccessSupported:YES];
1622     }
1623
1624     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1625         long long start = [dataRequest currentOffset];
1626         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1627
1628         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1629             [request finishLoadingWithError:nil];
1630             return;
1631         }
1632
1633         ASSERT(start <= std::numeric_limits<int>::max());
1634         ASSERT(end <= std::numeric_limits<int>::max());
1635         auto requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1636         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1637         [dataRequest respondWithData:nsData.get()];
1638     }
1639
1640     [request finishLoading];
1641 }
1642 #endif
1643
1644 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1645 {
1646     String scheme = [[[avRequest request] URL] scheme];
1647     String keyURI = [[[avRequest request] URL] absoluteString];
1648
1649 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1650     if (scheme == "skd") {
1651 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1652         // Create an initData with the following layout:
1653         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1654         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1655         auto initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1656         unsigned byteLength = initDataBuffer->byteLength();
1657         auto initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1658         initDataView->set<uint32_t>(0, keyURISize, true);
1659
1660         auto keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1661         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1662
1663         auto initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1664         if (!player()->keyNeeded(initData.ptr()))
1665             return false;
1666 #endif
1667
1668 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1669         if (m_cdmInstance) {
1670             avRequest.contentInformationRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
1671             [avRequest finishLoading];
1672             return true;
1673         }
1674
1675         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1676         m_keyID = SharedBuffer::create(keyURIData.get());
1677         player()->initializationDataEncountered("skd"_s, m_keyID->tryCreateArrayBuffer());
1678         setWaitingForKey(true);
1679 #endif
1680         m_keyURIToRequestMap.set(keyURI, avRequest);
1681
1682         return true;
1683     }
1684
1685 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1686     if (scheme == "clearkey") {
1687         String keyID = [[[avRequest request] URL] resourceSpecifier];
1688         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1689
1690         auto initData = Uint8Array::create(encodedKeyId.size());
1691         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1692
1693         auto keyData = player()->cachedKeyForKeyId(keyID);
1694         if (keyData) {
1695             fulfillRequestWithKeyData(avRequest, keyData.get());
1696             return false;
1697         }
1698
1699         if (!player()->keyNeeded(initData.ptr()))
1700             return false;
1701
1702         m_keyURIToRequestMap.set(keyID, avRequest);
1703         return true;
1704     }
1705 #endif
1706 #endif
1707
1708     auto resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1709     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader.copyRef());
1710     resourceLoader->startLoading();
1711     return true;
1712 }
1713
1714 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1715 {
1716     String scheme = [[[avRequest request] URL] scheme];
1717
1718     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1719
1720     if (resourceLoader)
1721         resourceLoader->stopLoading();
1722 }
1723
1724 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1725 {
1726     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1727 }
1728 #endif
1729
1730 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1731 {
1732     return PAL::isAVFoundationFrameworkAvailable() && isCoreMediaFrameworkAvailable();
1733 }
1734
1735 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1736 {
1737     if (!metaDataAvailable())
1738         return timeValue;
1739
1740     // FIXME - impossible to implement until rdar://8721510 is fixed.
1741     return timeValue;
1742 }
1743
1744 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1745 {
1746     return 0;
1747 }
1748
1749 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1750 {
1751     if (!m_videoLayer)
1752         return;
1753
1754     // Do not attempt to change the video gravity while in full screen mode.
1755     // See setVideoFullscreenGravity().
1756     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1757         return;
1758
1759     [CATransaction begin];
1760     [CATransaction setDisableActions:YES];    
1761     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1762     [m_videoLayer.get() setVideoGravity:gravity];
1763     [CATransaction commit];
1764 }
1765
1766 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1767 {
1768     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1769         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1770     }];
1771     if (index == NSNotFound)
1772         return nil;
1773     return [tracks objectAtIndex:index];
1774 }
1775
1776 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1777 {
1778     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1779     m_languageOfPrimaryAudioTrack = String();
1780
1781     if (!m_avAsset)
1782         return;
1783
1784     setDelayCharacteristicsChangedNotification(true);
1785
1786     bool haveCCTrack = false;
1787     bool hasCaptions = false;
1788
1789     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1790     // asked about those fairly fequently.
1791     if (!m_avPlayerItem) {
1792         // We don't have a player item yet, so check with the asset because some assets support inspection
1793         // prior to becoming ready to play.
1794         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1795         setHasVideo(firstEnabledVideoTrack);
1796         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1797         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1798         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1799         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1800         if (size.width() < 0)
1801             size.setWidth(-size.width());
1802         if (size.height() < 0)
1803             size.setHeight(-size.height());
1804         presentationSizeDidChange(size);
1805     } else {
1806         bool hasVideo = false;
1807         bool hasAudio = false;
1808         bool hasMetaData = false;
1809         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1810             if ([track isEnabled]) {
1811                 AVAssetTrack *assetTrack = [track assetTrack];
1812                 NSString *mediaType = [assetTrack mediaType];
1813                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1814                     hasVideo = true;
1815                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1816                     hasAudio = true;
1817                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1818                     haveCCTrack = true;
1819                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1820                     hasMetaData = true;
1821                 }
1822             }
1823         }
1824
1825         updateAudioTracks();
1826         updateVideoTracks();
1827
1828         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1829         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1830
1831         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1832         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1833         // when it is not.
1834         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1835
1836         setHasAudio(hasAudio);
1837 #if ENABLE(DATACUE_VALUE)
1838         if (hasMetaData)
1839             processMetadataTrack();
1840 #endif
1841     }
1842
1843     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1844     if (legibleGroup && m_cachedTracks) {
1845         hasCaptions = [[PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1846         if (hasCaptions)
1847             processMediaSelectionOptions();
1848     }
1849
1850     setHasClosedCaptions(hasCaptions);
1851
1852     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1853
1854     sizeChanged();
1855
1856     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1857         characteristicsChanged();
1858
1859 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1860     if (m_provider)
1861         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1862 #endif
1863
1864     setDelayCharacteristicsChangedNotification(false);
1865 }
1866
1867 void MediaPlayerPrivateAVFoundationObjC::updateRotationSession()
1868 {
1869     AffineTransform finalTransform = m_avAsset.get().preferredTransform;
1870     FloatSize naturalSize;
1871     if (auto* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual])) {
1872         naturalSize = FloatSize(firstEnabledVideoTrack.naturalSize);
1873         finalTransform *= firstEnabledVideoTrack.preferredTransform;
1874     }
1875
1876     if (finalTransform.isIdentity()) {
1877         m_imageRotationSession = nullptr;
1878         return;
1879     }
1880
1881     if (m_imageRotationSession
1882         && m_imageRotationSession->transform()
1883         && m_imageRotationSession->transform().value() == finalTransform
1884         && m_imageRotationSession->size() == naturalSize)
1885         return;
1886
1887     m_imageRotationSession = std::make_unique<ImageRotationSessionVT>(WTFMove(finalTransform), naturalSize, kCVPixelFormatType_32BGRA, ImageRotationSessionVT::IsCGImageCompatible::Yes);
1888 }
1889
1890 #if ENABLE(VIDEO_TRACK)
1891
1892 template <typename RefT, typename PassRefT>
1893 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1894 {
1895     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1896         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1897     }]]]);
1898     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1899
1900     for (auto& oldItem : oldItems) {
1901         if (oldItem->playerItemTrack())
1902             [oldTracks addObject:oldItem->playerItemTrack()];
1903     }
1904
1905     // Find the added & removed AVPlayerItemTracks:
1906     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1907     [removedTracks minusSet:newTracks.get()];
1908
1909     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1910     [addedTracks minusSet:oldTracks.get()];
1911
1912     typedef Vector<RefT> ItemVector;
1913     ItemVector replacementItems;
1914     ItemVector addedItems;
1915     ItemVector removedItems;
1916     for (auto& oldItem : oldItems) {
1917         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1918             removedItems.append(oldItem);
1919         else
1920             replacementItems.append(oldItem);
1921     }
1922
1923     for (AVPlayerItemTrack* track in addedTracks.get())
1924         addedItems.append(itemFactory(track));
1925
1926     replacementItems.appendVector(addedItems);
1927     oldItems.swap(replacementItems);
1928
1929     for (auto& removedItem : removedItems)
1930         (player->*removedFunction)(*removedItem);
1931
1932     for (auto& addedItem : addedItems)
1933         (player->*addedFunction)(*addedItem);
1934 }
1935
1936 template <typename RefT, typename PassRefT>
1937 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1938 {
1939     group->updateOptions(characteristics);
1940
1941     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1942     for (auto& option : group->options()) {
1943         if (!option)
1944             continue;
1945         AVMediaSelectionOption* avOption = option->avMediaSelectionOption();
1946         if (!avOption)
1947             continue;
1948         newSelectionOptions.add(option);
1949     }
1950
1951     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1952     for (auto& oldItem : oldItems) {
1953         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1954             oldSelectionOptions.add(option);
1955     }
1956
1957     // Find the added & removed AVMediaSelectionOptions:
1958     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1959     for (auto& oldOption : oldSelectionOptions) {
1960         if (!newSelectionOptions.contains(oldOption))
1961             removedSelectionOptions.add(oldOption);
1962     }
1963
1964     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1965     for (auto& newOption : newSelectionOptions) {
1966         if (!oldSelectionOptions.contains(newOption))
1967             addedSelectionOptions.add(newOption);
1968     }
1969
1970     typedef Vector<RefT> ItemVector;
1971     ItemVector replacementItems;
1972     ItemVector addedItems;
1973     ItemVector removedItems;
1974     for (auto& oldItem : oldItems) {
1975         if (!oldItem->mediaSelectionOption())
1976             removedItems.append(oldItem);
1977         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1978             removedItems.append(oldItem);
1979         else
1980             replacementItems.append(oldItem);
1981     }
1982
1983     for (auto& option : addedSelectionOptions)
1984         addedItems.append(itemFactory(*option.get()));
1985
1986     replacementItems.appendVector(addedItems);
1987     oldItems.swap(replacementItems);
1988     
1989     for (auto& removedItem : removedItems)
1990         (player->*removedFunction)(*removedItem);
1991
1992     for (auto& addedItem : addedItems)
1993         (player->*addedFunction)(*addedItem);
1994 }
1995
1996 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1997 {
1998 #if !RELEASE_LOG_DISABLED
1999     size_t count = m_audioTracks.size();
2000 #endif
2001
2002     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2003     if (!m_audibleGroup) {
2004         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForAudibleMedia())
2005             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2006     }
2007
2008     if (m_audibleGroup)
2009         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2010     else
2011         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2012
2013     for (auto& track : m_audioTracks)
2014         track->resetPropertiesFromTrack();
2015
2016 #if !RELEASE_LOG_DISABLED
2017     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2018 #endif
2019 }
2020
2021 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2022 {
2023 #if !RELEASE_LOG_DISABLED
2024     size_t count = m_videoTracks.size();
2025 #endif
2026
2027     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2028
2029     if (!m_visualGroup) {
2030         if (AVMediaSelectionGroup *group = safeMediaSelectionGroupForVisualMedia())
2031             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2032     }
2033
2034     if (m_visualGroup)
2035         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2036
2037     for (auto& track : m_audioTracks)
2038         track->resetPropertiesFromTrack();
2039
2040 #if !RELEASE_LOG_DISABLED
2041     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2042 #endif
2043 }
2044
2045 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2046 {
2047     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2048 }
2049
2050 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2051 {
2052     m_videoFullscreenLayerManager->syncTextTrackBounds();
2053 }
2054
2055 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2056 {
2057     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2058 }
2059
2060 #endif // ENABLE(VIDEO_TRACK)
2061
2062 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2063
2064 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2065 {
2066     if (!m_provider) {
2067         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2068         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2069     }
2070     return m_provider.get();
2071 }
2072
2073 #endif
2074
2075 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2076 {
2077     if (!m_avAsset)
2078         return;
2079
2080     updateRotationSession();
2081     setNaturalSize(m_cachedPresentationSize);
2082 }
2083
2084 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2085 {
2086     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2087 }
2088
2089 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2090 {
2091     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2092     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2093         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2094         return false;
2095
2096     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2097     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2098         return session.didPassCORSAccessChecks;
2099
2100     return false;
2101 }
2102
2103 Optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2104 {
2105     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2106     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2107         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2108         return false;
2109
2110     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2111     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2112         return [session wouldTaintOrigin:origin];
2113
2114     return WTF::nullopt;
2115 }
2116
2117
2118 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2119
2120 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2121 {
2122     INFO_LOG(LOGIDENTIFIER);
2123
2124     if (!m_avPlayerItem || m_videoOutput)
2125         return;
2126
2127 #if USE(VIDEOTOOLBOX)
2128     NSDictionary* attributes = nil;
2129 #else
2130     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2131 #endif
2132     m_videoOutput = adoptNS([PAL::allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2133     ASSERT(m_videoOutput);
2134
2135     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2136
2137     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2138 }
2139
2140 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2141 {
2142     if (!m_videoOutput)
2143         return;
2144
2145     if (m_avPlayerItem)
2146         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2147
2148     INFO_LOG(LOGIDENTIFIER);
2149
2150     m_videoOutput = 0;
2151 }
2152
2153 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2154 {
2155     if (!m_avPlayerItem)
2156         return false;
2157
2158     if (!m_videoOutput)
2159         createVideoOutput();
2160     ASSERT(m_videoOutput);
2161
2162     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2163
2164     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2165         return false;
2166
2167     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2168
2169     if (m_imageRotationSession)
2170         m_lastPixelBuffer = m_imageRotationSession->rotate(m_lastPixelBuffer.get());
2171
2172     m_lastImage = nullptr;
2173     return true;
2174 }
2175
2176 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2177 {
2178     if (!m_avPlayerItem)
2179         return false;
2180
2181     if (m_lastImage)
2182         return true;
2183
2184     if (!m_videoOutput)
2185         createVideoOutput();
2186
2187     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2188 }
2189
2190 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2191 {
2192 #if HAVE(CORE_VIDEO)
2193     if (!m_avPlayerItem)
2194         return;
2195
2196     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2197         waitForVideoOutputMediaDataWillChange();
2198
2199     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2200     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2201     // should be displayed.
2202     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2203         return;
2204
2205     if (!m_pixelBufferConformer) {
2206 #if USE(VIDEOTOOLBOX)
2207         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2208 #else
2209         NSDictionary *attributes = nil;
2210 #endif
2211         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2212     }
2213
2214 #if !RELEASE_LOG_DISABLED
2215     MonotonicTime start = MonotonicTime::now();
2216 #endif
2217
2218     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2219
2220 #if !RELEASE_LOG_DISABLED
2221     INFO_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2222 #endif
2223 #endif // HAVE(CORE_VIDEO)
2224 }
2225
2226 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2227 {
2228     updateLastImage(UpdateType::UpdateSynchronously);
2229     if (!m_lastImage)
2230         return;
2231
2232     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2233     if (!firstEnabledVideoTrack)
2234         return;
2235
2236     INFO_LOG(LOGIDENTIFIER);
2237
2238     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2239     context.drawNativeImage(m_lastImage.get(), imageRect.size(), outputRect, imageRect);
2240
2241     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2242     // video frame, destroy it now that it is no longer needed.
2243     if (m_imageGenerator)
2244         destroyImageGenerator();
2245
2246 }
2247
2248 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2249 {
2250     ASSERT(context);
2251
2252     updateLastPixelBuffer();
2253     if (!m_lastPixelBuffer)
2254         return false;
2255
2256     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2257     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2258
2259     if (!m_videoTextureCopier)
2260         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2261
2262     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2263 }
2264
2265 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2266 {
2267     updateLastImage();
2268     return m_lastImage;
2269 }
2270
2271 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2272 {
2273     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2274
2275     // Wait for 1 second.
2276     bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2277     if (!satisfied)
2278         ERROR_LOG(LOGIDENTIFIER, "timed out");
2279 }
2280
2281 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput *)
2282 {
2283     m_videoOutputSemaphore.signal();
2284 }
2285
2286 #endif
2287
2288 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2289
2290 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2291 {
2292     return m_keyURIToRequestMap.take(keyURI);
2293 }
2294
2295 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2296 {
2297     Vector<String> fulfilledKeyIds;
2298
2299     for (auto& pair : m_keyURIToRequestMap) {
2300         const String& keyId = pair.key;
2301         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2302
2303         auto keyData = player()->cachedKeyForKeyId(keyId);
2304         if (!keyData)
2305             continue;
2306
2307         fulfillRequestWithKeyData(request.get(), keyData.get());
2308         fulfilledKeyIds.append(keyId);
2309     }
2310
2311     for (auto& keyId : fulfilledKeyIds)
2312         m_keyURIToRequestMap.remove(keyId);
2313 }
2314
2315 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2316 {
2317     ASSERT_UNUSED(session, &session == m_session);
2318     m_session = nullptr;
2319 }
2320
2321 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2322 {
2323     if (!keySystemIsSupported(keySystem))
2324         return nullptr;
2325     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2326     m_session = makeWeakPtr(*session);
2327     return WTFMove(session);
2328 }
2329 #endif
2330
2331 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2332 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2333 {
2334 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2335     if (m_session && newValue)
2336         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2337 #endif
2338
2339 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2340     if (m_cdmInstance)
2341         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2342 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2343     UNUSED_PARAM(newValue);
2344 #endif
2345 }
2346 #endif
2347
2348 #if ENABLE(ENCRYPTED_MEDIA)
2349 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2350 {
2351 #if HAVE(AVCONTENTKEYSESSION)
2352     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2353         return;
2354
2355     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2356     if (&fpsInstance == m_cdmInstance)
2357         return;
2358
2359     if (m_cdmInstance)
2360         cdmInstanceDetached(*m_cdmInstance);
2361
2362     m_cdmInstance = &fpsInstance;
2363 #else
2364     UNUSED_PARAM(instance);
2365 #endif
2366 }
2367
2368 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2369 {
2370 #if HAVE(AVCONTENTKEYSESSION)
2371     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2372     m_cdmInstance = nullptr;
2373 #else
2374     UNUSED_PARAM(instance);
2375 #endif
2376 }
2377
2378 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2379 {
2380 #if HAVE(AVCONTENTKEYSESSION)
2381     if (!m_keyID || !m_cdmInstance)
2382         return;
2383
2384     auto instanceSession = m_cdmInstance->sessionForKeyIDs(Vector<Ref<SharedBuffer>>::from(*m_keyID));
2385     if (!instanceSession)
2386         return;
2387
2388     [instanceSession->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2389
2390     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2391     for (auto& request : keyURIToRequestMap.values()) {
2392         if (auto *infoRequest = request.get().contentInformationRequest)
2393             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2394         [request finishLoading];
2395     }
2396     setWaitingForKey(false);
2397 #endif
2398 }
2399
2400 void MediaPlayerPrivateAVFoundationObjC::setWaitingForKey(bool waitingForKey)
2401 {
2402     if (m_waitingForKey == waitingForKey)
2403         return;
2404
2405     m_waitingForKey = waitingForKey;
2406     player()->waitingForKeyChanged();
2407 }
2408 #endif
2409
2410 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2411 {
2412     if (!m_avAsset)
2413         return nil;
2414
2415     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2416         return nil;
2417
2418     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2419 }
2420
2421 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2422 {
2423     if (!m_avAsset)
2424         return false;
2425
2426     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2427         return false;
2428
2429     return true;
2430 }
2431
2432 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2433 {
2434     if (!hasLoadedMediaSelectionGroups())
2435         return nil;
2436
2437     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2438 }
2439
2440 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2441 {
2442     if (!hasLoadedMediaSelectionGroups())
2443         return nil;
2444
2445     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2446 }
2447
2448 AVMediaSelectionGroup* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2449 {
2450     if (!hasLoadedMediaSelectionGroups())
2451         return nil;
2452
2453     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2454 }
2455
2456 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2457 {
2458     AVMediaSelectionGroup *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2459     if (!legibleGroup) {
2460         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2461         return;
2462     }
2463
2464     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2465     // but set the selected legible track to nil so text tracks will not be automatically configured.
2466     if (!m_textTracks.size())
2467         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2468
2469     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2470     NSArray *legibleOptions = [PAL::getAVMediaSelectionGroupClass() playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2471     for (AVMediaSelectionOption *option in legibleOptions) {
2472         bool newTrack = true;
2473         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2474             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2475                 continue;
2476             
2477             RetainPtr<AVMediaSelectionOption> currentOption;
2478 #if ENABLE(AVF_CAPTIONS)
2479             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2480                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2481                 currentOption = track->mediaSelectionOption();
2482             } else
2483 #endif
2484             {
2485                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2486                 currentOption = track->mediaSelectionOption();
2487             }
2488             
2489             if ([currentOption.get() isEqual:option]) {
2490                 removedTextTracks.remove(i - 1);
2491                 newTrack = false;
2492                 break;
2493             }
2494         }
2495         if (!newTrack)
2496             continue;
2497
2498 #if ENABLE(AVF_CAPTIONS)
2499         if ([option outOfBandSource]) {
2500             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2501             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2502             continue;
2503         }
2504 #endif
2505
2506         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2507     }
2508
2509     processNewAndRemovedTextTracks(removedTextTracks);
2510 }
2511
2512 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2513 {
2514     if (m_metadataTrack)
2515         return;
2516
2517     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2518     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2519     player()->addTextTrack(*m_metadataTrack);
2520 }
2521
2522 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2523 {
2524     ASSERT(time >= MediaTime::zeroTime());
2525
2526     if (!m_currentTextTrack)
2527         return;
2528
2529     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2530 }
2531
2532 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2533 {
2534     INFO_LOG(LOGIDENTIFIER);
2535
2536     if (!m_currentTextTrack)
2537         return;
2538     
2539     m_currentTextTrack->resetCueValues();
2540 }
2541
2542 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2543 {
2544     if (m_currentTextTrack == track)
2545         return;
2546
2547     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2548
2549     m_currentTextTrack = track;
2550
2551     if (track) {
2552         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2553             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2554             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2555             ALLOW_DEPRECATED_DECLARATIONS_END
2556 #if ENABLE(AVF_CAPTIONS)
2557         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2558             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2559 #endif
2560         else
2561             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2562     } else {
2563         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2564         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2565         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2566         ALLOW_DEPRECATED_DECLARATIONS_END
2567     }
2568
2569 }
2570
2571 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2572 {
2573     if (!m_languageOfPrimaryAudioTrack.isNull())
2574         return m_languageOfPrimaryAudioTrack;
2575
2576     if (!m_avPlayerItem.get())
2577         return emptyString();
2578
2579     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2580     AVMediaSelectionGroup *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2581     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2582     AVMediaSelectionOption *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2583     ALLOW_DEPRECATED_DECLARATIONS_END
2584     if (currentlySelectedAudibleOption) {
2585         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2586         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2587
2588         return m_languageOfPrimaryAudioTrack;
2589     }
2590
2591     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2592     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2593     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2594     if (!tracks || [tracks count] != 1) {
2595         m_languageOfPrimaryAudioTrack = emptyString();
2596         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2597         return m_languageOfPrimaryAudioTrack;
2598     }
2599
2600     AVAssetTrack *track = [tracks objectAtIndex:0];
2601     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2602
2603     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2604
2605     return m_languageOfPrimaryAudioTrack;
2606 }
2607
2608 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2609 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2610 {
2611     bool wirelessTarget = false;
2612
2613 #if !PLATFORM(IOS_FAMILY)
2614     if (m_playbackTarget) {
2615         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2616             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2617         else
2618             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2619     }
2620 #else
2621     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2622 #endif
2623
2624     INFO_LOG(LOGIDENTIFIER, wirelessTarget);
2625
2626     return wirelessTarget;
2627 }
2628
2629 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2630 {
2631     if (!m_avPlayer)
2632         return MediaPlayer::TargetTypeNone;
2633
2634 #if PLATFORM(IOS_FAMILY)
2635     if (!PAL::isAVFoundationFrameworkAvailable())
2636         return MediaPlayer::TargetTypeNone;
2637
2638     switch ([m_avPlayer externalPlaybackType]) {
2639     case AVPlayerExternalPlaybackTypeNone:
2640         return MediaPlayer::TargetTypeNone;
2641     case AVPlayerExternalPlaybackTypeAirPlay:
2642         return MediaPlayer::TargetTypeAirPlay;
2643     case AVPlayerExternalPlaybackTypeTVOut:
2644         return MediaPlayer::TargetTypeTVOut;
2645     }
2646
2647     ASSERT_NOT_REACHED();
2648     return MediaPlayer::TargetTypeNone;
2649
2650 #else
2651     return MediaPlayer::TargetTypeAirPlay;
2652 #endif
2653 }
2654     
2655 #if PLATFORM(IOS_FAMILY)
2656 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayer *player)
2657 {
2658 #if HAVE(CELESTIAL)
2659     if (!PAL::isAVFoundationFrameworkAvailable())
2660         return nil;
2661
2662     if ([PAL::getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2663         AVOutputContext *outputContext = [PAL::getAVOutputContextClass() sharedAudioPresentationOutputContext];
2664
2665         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2666             || ![outputContext supportsMultipleOutputDevices]
2667             || ![outputContext respondsToSelector:@selector(outputDevices)])
2668             return [outputContext deviceName];
2669
2670         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2671         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2672 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2673             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2674 ALLOW_DEPRECATED_DECLARATIONS_END
2675             [outputDeviceNames addObject:outputDeviceName.get()];
2676         }
2677
2678         return [outputDeviceNames componentsJoinedByString:@" + "];
2679     }
2680
2681     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2682         return nil;
2683
2684     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2685     if (!pickableRoutes.count)
2686         return nil;
2687
2688     NSString *displayName = nil;
2689     for (NSDictionary *pickableRoute in pickableRoutes) {
2690         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2691             continue;
2692
2693         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2694
2695         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2696         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2697             break;
2698
2699         // The route is a speaker or HDMI out, override the name to be the localized device model.
2700         NSString *localizedDeviceModel = [[PAL::getUIDeviceClass() currentDevice] localizedModel];
2701
2702         // In cases where a route with that name already exists, prefix the name with the model.
2703         BOOL includeLocalizedDeviceModelName = NO;
2704         for (NSDictionary *otherRoute in pickableRoutes) {
2705             if (otherRoute == pickableRoute)
2706                 continue;
2707
2708             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2709                 includeLocalizedDeviceModelName = YES;
2710                 break;
2711             }
2712         }
2713
2714         if (includeLocalizedDeviceModelName)
2715             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2716         else
2717             displayName = localizedDeviceModel;
2718
2719         break;
2720     }
2721
2722     return displayName;
2723 #else
2724     UNUSED_PARAM(player);
2725     return nil;
2726 #endif
2727 }
2728 #endif
2729
2730 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2731 {
2732     if (!m_avPlayer)
2733         return emptyString();
2734
2735     String wirelessTargetName;
2736 #if !PLATFORM(IOS_FAMILY)
2737     if (m_playbackTarget)
2738         wirelessTargetName = m_playbackTarget->deviceName();
2739 #else
2740     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2741 #endif
2742
2743     return wirelessTargetName;
2744 }
2745
2746 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2747 {
2748     if (!m_avPlayer)
2749         return !m_allowsWirelessVideoPlayback;
2750
2751     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2752     INFO_LOG(LOGIDENTIFIER, !m_allowsWirelessVideoPlayback);
2753
2754     return !m_allowsWirelessVideoPlayback;
2755 }
2756
2757 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2758 {
2759     INFO_LOG(LOGIDENTIFIER, disabled);
2760     m_allowsWirelessVideoPlayback = !disabled;
2761     if (!m_avPlayer)
2762         return;
2763
2764     setDelayCallbacks(true);
2765     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2766     setDelayCallbacks(false);
2767 }
2768
2769 #if !PLATFORM(IOS_FAMILY)
2770
2771 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2772 {
2773     m_playbackTarget = WTFMove(target);
2774
2775     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2776
2777     INFO_LOG(LOGIDENTIFIER);
2778
2779     if (!m_playbackTarget->hasActiveRoute())
2780         setShouldPlayToPlaybackTarget(false);
2781 }
2782
2783 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2784 {
2785     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2786         return;
2787
2788     m_shouldPlayToPlaybackTarget = shouldPlay;
2789
2790     if (!m_playbackTarget)
2791         return;
2792
2793     INFO_LOG(LOGIDENTIFIER, shouldPlay);
2794
2795     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2796         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2797
2798         if (!m_avPlayer)
2799             return;
2800
2801         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2802         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2803             return;
2804
2805         setDelayCallbacks(true);
2806         m_avPlayer.get().outputContext = newContext;
2807         setDelayCallbacks(false);
2808
2809         return;
2810     }
2811
2812     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2813
2814     setDelayCallbacks(true);
2815     auto weakThis = makeWeakPtr(*this);
2816     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2817         if (!weakThis)
2818             return;
2819         weakThis->playbackTargetIsWirelessDidChange();
2820     }));
2821     setDelayCallbacks(false);
2822 }
2823
2824 #endif // !PLATFORM(IOS_FAMILY)
2825
2826 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2827 {
2828 #if PLATFORM(IOS_FAMILY)
2829     if (!m_avPlayer)
2830         return;
2831
2832     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
2833         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:(player()->fullscreenMode() == MediaPlayer::VideoFullscreenModeStandard) || player()->isVideoFullscreenStandby()];
2834 #endif
2835 }
2836
2837 #endif
2838
2839 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2840 {
2841     m_cachedItemStatus = status;
2842
2843     updateStates();
2844 }
2845
2846 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2847 {
2848     m_pendingStatusChanges++;
2849 }
2850
2851 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2852 {
2853     m_cachedLikelyToKeepUp = likelyToKeepUp;
2854
2855     ASSERT(m_pendingStatusChanges);
2856     if (!--m_pendingStatusChanges)
2857         updateStates();
2858 }
2859
2860 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2861 {
2862     m_pendingStatusChanges++;
2863 }
2864
2865 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2866 {
2867     m_cachedBufferEmpty = bufferEmpty;
2868
2869     ASSERT(m_pendingStatusChanges);
2870     if (!--m_pendingStatusChanges)
2871         updateStates();
2872 }
2873
2874 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2875 {
2876     m_pendingStatusChanges++;
2877 }
2878
2879 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2880 {
2881     m_cachedBufferFull = bufferFull;
2882
2883     ASSERT(m_pendingStatusChanges);
2884     if (!--m_pendingStatusChanges)
2885         updateStates();
2886 }
2887
2888 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray>&& seekableRanges)
2889 {
2890     m_cachedSeekableRanges = WTFMove(seekableRanges);
2891
2892     seekableTimeRangesChanged();
2893     updateStates();
2894 }
2895
2896 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray>&& loadedRanges)
2897 {
2898     m_cachedLoadedRanges = WTFMove(loadedRanges);
2899
2900     loadedTimeRangesChanged();
2901     updateStates();
2902 }
2903
2904 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2905 {
2906     m_cachedIsReadyForDisplay = isReady;
2907     if (!hasVideo() && isReady)
2908         tracksChanged();
2909     updateStates();
2910 }
2911
2912 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2913 {
2914     tracksChanged();
2915     updateStates();
2916 }
2917
2918 void MediaPlayerPrivateAVFoundationObjC::setBufferingPolicy(MediaPlayer::BufferingPolicy policy)
2919 {
2920     ALWAYS_LOG(LOGIDENTIFIER, policy);
2921
2922     if (m_bufferingPolicy == policy)
2923         return;
2924
2925     m_bufferingPolicy = policy;
2926     
2927     if (!m_avPlayer)
2928         return;
2929
2930 #if HAVE(AVPLAYER_RESOURCE_CONSERVATION_LEVEL)
2931     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::Default) == AVPlayerResourceConservationLevelNone, "MediaPlayer::BufferingPolicy::Default is not AVPlayerResourceConservationLevelNone as expected");
2932     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::LimitReadAhead) == AVPlayerResourceConservationLevelReduceReadAhead, "MediaPlayer::BufferingPolicy::LimitReadAhead is not AVPlayerResourceConservationLevelReduceReadAhead as expected");
2933     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::MakeResourcesPurgeable) == AVPlayerResourceConservationLevelReuseActivePlayerResources, "MediaPlayer::BufferingPolicy::MakeResourcesPurgeable is not AVPlayerResourceConservationLevelReuseActivePlayerResources as expected");
2934     static_assert(static_cast<size_t>(MediaPlayer::BufferingPolicy::PurgeResources) == AVPlayerResourceConservationLevelRecycleBuffer, "MediaPlayer::BufferingPolicy::PurgeResources is not AVPlayerResourceConservationLevelRecycleBuffer as expected");
2935
2936     if ([m_avPlayer respondsToSelector:@selector(setResourceConservationLevelWhilePaused:)]) {
2937         m_avPlayer.get().resourceConservationLevelWhilePaused = static_cast<AVPlayerResourceConservationLevel>(policy);
2938         updateStates();
2939         return;
2940     }
2941 #endif
2942
2943     switch (policy) {
2944     case MediaPlayer::BufferingPolicy::Default:
2945         setAVPlayerItem(m_avPlayerItem.get());
2946         break;
2947     case MediaPlayer::BufferingPolicy::LimitReadAhead:
2948     case MediaPlayer::BufferingPolicy::MakeResourcesPurgeable:
2949         setAVPlayerItem(nil);
2950         break;
2951     case MediaPlayer::BufferingPolicy::PurgeResources:
2952         setAVPlayerItem(nil);
2953         setAVPlayerItem(m_avPlayerItem.get());
2954         break;
2955     }
2956
2957     updateStates();
2958 }
2959
2960 #if ENABLE(DATACUE_VALUE)
2961
2962 static const AtomString& metadataType(NSString *avMetadataKeySpace)
2963 {
2964     static NeverDestroyed<const AtomString> quickTimeUserData("com.apple.quicktime.udta", AtomString::ConstructFromLiteral);
2965     static NeverDestroyed<const AtomString> isoUserData("org.mp4ra", AtomString::ConstructFromLiteral);
2966     static NeverDestroyed<const AtomString> quickTimeMetadata("com.apple.quicktime.mdta", AtomString::ConstructFromLiteral);
2967     static NeverDestroyed<const AtomString> iTunesMetadata("com.apple.itunes", AtomString::ConstructFromLiteral);
2968     static NeverDestroyed<const AtomString> id3Metadata("org.id3", AtomString::ConstructFromLiteral);
2969
2970     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2971         return quickTimeUserData;
2972     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2973         return isoUserData;
2974     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2975         return quickTimeMetadata;
2976     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2977         return iTunesMetadata;
2978     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2979         return id3Metadata;
2980
2981     return emptyAtom();
2982 }
2983
2984 #endif
2985
2986 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(const RetainPtr<NSArray>& metadata, const MediaTime& mediaTime)
2987 {
2988     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2989
2990     INFO_LOG(LOGIDENTIFIER, "adding ", m_currentMetaData ? [m_currentMetaData.get() count] : 0, " at time ", mediaTime);
2991
2992 #if ENABLE(DATACUE_VALUE)
2993     if (seeking())
2994         return;
2995
2996     if (!m_metadataTrack)
2997         processMetadataTrack();
2998
2999     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3000         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3001         return;
3002     }
3003
3004     // Set the duration of all incomplete cues before adding new ones.
3005     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3006     for (AVMetadataItem *item in m_currentMetaData.get()) {
3007         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
3008         if (start < earliestStartTime)
3009             earliestStartTime = start;
3010     }
3011     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3012
3013     for (AVMetadataItem *item in m_currentMetaData.get()) {
3014         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
3015         MediaTime end = MediaTime::positiveInfiniteTime();
3016         if (CMTIME_IS_VALID(item.duration))
3017             end = start + PAL::toMediaTime(item.duration);
3018
3019         AtomString type = nullAtom();
3020         if (item.keySpace)
3021             type = metadataType(item.keySpace);
3022
3023         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3024     }
3025 #endif
3026 }
3027
3028 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(const RetainPtr<NSArray>& tracks)
3029 {
3030     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3031         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3032
3033     NSArray *assetTracks = [m_avAsset tracks];
3034
3035     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3036         AVAssetTrack* assetTrack = [obj assetTrack];
3037
3038         if ([assetTracks containsObject:assetTrack])
3039             return YES;
3040
3041         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3042         if (!hasLoadedMediaSelectionGroups())
3043             return NO;
3044
3045         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3046             return NO;
3047
3048         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3049             return NO;
3050
3051         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3052             return NO;
3053
3054         return YES;
3055     }]];
3056
3057     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3058         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3059