[WTF] Use Semaphore and BinarySemaphore instead of dispatch_semaphore_t
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2018 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "URL.h"
63 #import "VideoFullscreenLayerManagerObjC.h"
64 #import "VideoTextureCopierCV.h"
65 #import "VideoTrackPrivateAVFObjC.h"
66 #import "WebCoreAVFResourceLoader.h"
67 #import "WebCoreCALayerExtras.h"
68 #import "WebCoreNSURLSession.h"
69 #import <JavaScriptCore/DataView.h>
70 #import <JavaScriptCore/JSCInlines.h>
71 #import <JavaScriptCore/TypedArrayInlines.h>
72 #import <JavaScriptCore/Uint16Array.h>
73 #import <JavaScriptCore/Uint32Array.h>
74 #import <JavaScriptCore/Uint8Array.h>
75 #import <functional>
76 #import <objc/runtime.h>
77 #import <pal/avfoundation/MediaTimeAVFoundation.h>
78 #import <pal/spi/cocoa/QuartzCoreSPI.h>
79 #import <pal/spi/mac/AVFoundationSPI.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #else
107 #import <Foundation/NSGeometry.h>
108 #import <QuartzCore/CoreImage.h>
109 #endif
110
111 #if USE(VIDEOTOOLBOX)
112 #import <CoreVideo/CoreVideo.h>
113 #import <VideoToolbox/VideoToolbox.h>
114 #endif
115
116 #import "CoreVideoSoftLink.h"
117 #import "MediaRemoteSoftLink.h"
118
119 namespace std {
120 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
121     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
122 };
123 }
124
125 #if ENABLE(AVF_CAPTIONS)
126 // Note: This must be defined before our SOFT_LINK macros:
127 @class AVMediaSelectionOption;
128 @interface AVMediaSelectionOption (OutOfBandExtensions)
129 @property (nonatomic, readonly) NSString* outOfBandSource;
130 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
131 @end
132 #endif
133
134 @interface AVURLAsset (WebKitExtensions)
135 @property (nonatomic, readonly) NSURL *resolvedURL;
136 @end
137
138 typedef AVPlayer AVPlayerType;
139 typedef AVPlayerItem AVPlayerItemType;
140 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
141 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
142 typedef AVMetadataItem AVMetadataItemType;
143 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
144 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
145 typedef AVAssetCache AVAssetCacheType;
146
147 #pragma mark - Soft Linking
148
149 // Soft-linking headers must be included last since they #define functions, constants, etc.
150 #import <pal/cf/CoreMediaSoftLink.h>
151
152 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
153
154 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
155
156 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
164
165 SOFT_LINK_CLASS(CoreImage, CIContext)
166 SOFT_LINK_CLASS(CoreImage, CIImage)
167
168 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString *)
169 SOFT_LINK_CONSTANT(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString *)
170 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicVisual, NSString *)
171 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicAudible, NSString *)
172 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeClosedCaption, NSString *)
173 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeVideo, NSString *)
174 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeAudio, NSString *)
175 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeMetadata, NSString *)
176 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
177 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
178 SOFT_LINK_CONSTANT(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
179 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
180 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
181 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
182 SOFT_LINK_CONSTANT(AVFoundation, AVLayerVideoGravityResize, NSString *)
183 SOFT_LINK_CONSTANT(AVFoundation, AVStreamingKeyDeliveryContentKeyType, NSString *)
184
185 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
186 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetUseClientURLLoadingExclusively, NSString *)
187
188 #define AVPlayer initAVPlayer()
189 #define AVPlayerItem initAVPlayerItem()
190 #define AVPlayerLayer initAVPlayerLayer()
191 #define AVURLAsset initAVURLAsset()
192 #define AVAssetImageGenerator initAVAssetImageGenerator()
193 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
194 #define AVMetadataItem initAVMetadataItem()
195 #define AVAssetCache initAVAssetCache()
196
197 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
198 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
199 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
200 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
201 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
202 #define AVMediaTypeVideo getAVMediaTypeVideo()
203 #define AVMediaTypeAudio getAVMediaTypeAudio()
204 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
205 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
206 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
207 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
208 #define AVURLAssetUseClientURLLoadingExclusively getAVURLAssetUseClientURLLoadingExclusively()
209 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
210 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
211 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
212 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
213 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
214 #define AVStreamingKeyDeliveryContentKeyType getAVStreamingKeyDeliveryContentKeyType()
215
216 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
217
218 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
219 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
220
221 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
222 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
223 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
224 SOFT_LINK_CLASS(AVFoundation, AVOutputContext)
225
226 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicLegible, NSString *)
227 SOFT_LINK_CONSTANT(AVFoundation, AVMediaTypeSubtitle, NSString *)
228 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
229 SOFT_LINK_CONSTANT(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
230
231 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
232 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
233 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
234 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
235 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
236 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
237 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
238
239 #endif
240
241 #if ENABLE(AVF_CAPTIONS)
242
243 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetCacheKey, NSString *)
244 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString *)
245 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString *)
246 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString *)
247 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString *)
248 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString *)
249 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString *)
250 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString *)
251 SOFT_LINK_CONSTANT(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString *)
252 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString *)
253 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString *)
254 SOFT_LINK_CONSTANT(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString *)
255
256 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
257 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
258 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
259 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
260 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
261 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
262 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
263 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
264 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
265 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
266 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
267 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
268
269 #endif
270
271 #if ENABLE(DATACUE_VALUE)
272
273 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString *)
274 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString *)
275 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString *)
276 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceiTunes, NSString *)
277 SOFT_LINK_CONSTANT(AVFoundation, AVMetadataKeySpaceID3, NSString *)
278
279 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
280 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
281 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
282 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
283 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
284
285 #endif
286
287 #if PLATFORM(IOS)
288
289 SOFT_LINK_CONSTANT(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
290 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
291 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetHTTPCookiesKey, NSString *)
292 SOFT_LINK_CONSTANT_MAY_FAIL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
293
294 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
295 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
296 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
297 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
298
299 #endif
300
301 SOFT_LINK_FRAMEWORK(MediaToolbox)
302 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
303
304 #if PLATFORM(IOS)
305
306 #if HAVE(CELESTIAL)
307
308 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
309 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
310 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
311 SOFT_LINK_CONSTANT(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
312 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
313 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
314 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
315
316 #endif // HAVE(CELESTIAL)
317
318 SOFT_LINK_FRAMEWORK(UIKit)
319 SOFT_LINK_CLASS(UIKit, UIDevice)
320 #define UIDevice getUIDeviceClass()
321
322 #endif // PLATFORM(IOS)
323
324 using namespace WebCore;
325
326 enum MediaPlayerAVFoundationObservationContext {
327     MediaPlayerAVFoundationObservationContextPlayerItem,
328     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
329     MediaPlayerAVFoundationObservationContextPlayer,
330     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
331 };
332
333 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
334 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
335 #else
336 @interface WebCoreAVFMovieObserver : NSObject
337 #endif
338 {
339     MediaPlayerPrivateAVFoundationObjC* m_callback;
340     int m_delayCallbacks;
341 }
342 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 -(void)disconnect;
344 -(void)metadataLoaded;
345 -(void)didEnd:(NSNotification *)notification;
346 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
347 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
348 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
349 - (void)outputSequenceWasFlushed:(id)output;
350 #endif
351 @end
352
353 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
354 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
355     MediaPlayerPrivateAVFoundationObjC* m_callback;
356 }
357 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
358 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
359 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
360 @end
361 #endif
362
363 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
364 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
365     MediaPlayerPrivateAVFoundationObjC *m_callback;
366 }
367 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
368 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
369 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
370 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
371 @end
372 #endif
373
374 namespace WebCore {
375 using namespace PAL;
376
377 static NSArray *assetMetadataKeyNames();
378 static NSArray *itemKVOProperties();
379 static NSArray *assetTrackMetadataKeyNames();
380 static NSArray *playerKVOProperties();
381 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
382
383 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
384 static dispatch_queue_t globalLoaderDelegateQueue()
385 {
386     static dispatch_queue_t globalQueue;
387     static dispatch_once_t onceToken;
388     dispatch_once(&onceToken, ^{
389         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
390     });
391     return globalQueue;
392 }
393 #endif
394
395 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
396 static dispatch_queue_t globalPullDelegateQueue()
397 {
398     static dispatch_queue_t globalQueue;
399     static dispatch_once_t onceToken;
400     dispatch_once(&onceToken, ^{
401         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
402     });
403     return globalQueue;
404 }
405 #endif
406
407 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
408 {
409     if (!isAvailable())
410         return;
411
412     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
413             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
414     ASSERT(AVFoundationMIMETypeCache::singleton().isAvailable());
415 }
416
417 static AVAssetCacheType *assetCacheForPath(const String& path)
418 {
419     NSURL *assetCacheURL;
420     
421     if (path.isEmpty())
422         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
423     else
424         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
425
426     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
427 }
428
429 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
430 {
431     HashSet<RefPtr<SecurityOrigin>> origins;
432     for (NSString *key in [assetCacheForPath(path) allKeys]) {
433         URL keyAsURL = URL(URL(), key);
434         if (keyAsURL.isValid())
435             origins.add(SecurityOrigin::create(keyAsURL));
436     }
437     return origins;
438 }
439
440 static WallTime toSystemClockTime(NSDate *date)
441 {
442     ASSERT(date);
443     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
444 }
445
446 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
447 {
448     AVAssetCacheType* assetCache = assetCacheForPath(path);
449     
450     for (NSString *key in [assetCache allKeys]) {
451         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
452             [assetCache removeEntryForKey:key];
453     }
454
455     NSFileManager *fileManager = [NSFileManager defaultManager];
456     NSURL *baseURL = [assetCache URL];
457
458     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
459         [fileManager removeItemAtURL:baseURL error:nil];
460         return;
461     }
462     
463     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
464     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
465         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
466         errorHandler:nil];
467     
468     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
469     for (NSURL *fileURL : enumerator) {
470         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
471     
472         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
473             continue;
474         
475         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
476             continue;
477         
478         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
479             continue;
480         
481         [urlsToDelete addObject:fileURL];
482     }
483     
484     for (NSURL *fileURL in urlsToDelete.get())
485         [fileManager removeItemAtURL:fileURL error:nil];
486 }
487
488 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
489 {
490     AVAssetCacheType* assetCache = assetCacheForPath(path);
491     for (NSString *key in [assetCache allKeys]) {
492         URL keyAsURL = URL(URL(), key);
493         if (keyAsURL.isValid()) {
494             if (origins.contains(SecurityOrigin::create(keyAsURL)))
495                 [assetCache removeEntryForKey:key];
496         }
497     }
498 }
499
500 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
501     : MediaPlayerPrivateAVFoundation(player)
502     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
503     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
504     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
505     , m_videoFrameHasDrawn(false)
506     , m_haveCheckedPlayability(false)
507 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
508     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
509 #endif
510 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
511     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
512 #endif
513     , m_currentTextTrack(0)
514     , m_cachedRate(0)
515     , m_cachedTotalBytes(0)
516     , m_pendingStatusChanges(0)
517     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
518     , m_cachedLikelyToKeepUp(false)
519     , m_cachedBufferEmpty(false)
520     , m_cachedBufferFull(false)
521     , m_cachedHasEnabledAudio(false)
522     , m_shouldBufferData(true)
523     , m_cachedIsReadyForDisplay(false)
524     , m_haveBeenAskedToCreateLayer(false)
525 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
526     , m_allowsWirelessVideoPlayback(true)
527 #endif
528 {
529 }
530
531 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
532 {
533 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
534     [m_loaderDelegate.get() setCallback:0];
535     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
536
537     for (auto& pair : m_resourceLoaderMap)
538         pair.value->invalidate();
539 #endif
540 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
541     [m_videoOutputDelegate setCallback:0];
542     [m_videoOutput setDelegate:nil queue:0];
543 #endif
544
545     if (m_videoLayer)
546         destroyVideoLayer();
547
548     cancelLoad();
549 }
550
551 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
552 {
553     INFO_LOG(LOGIDENTIFIER);
554     tearDownVideoRendering();
555
556     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
557     [m_objcObserver.get() disconnect];
558
559     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
560     setIgnoreLoadStateChanges(true);
561     if (m_avAsset) {
562         [m_avAsset.get() cancelLoading];
563         m_avAsset = nil;
564     }
565
566     clearTextTracks();
567
568 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
569     if (m_legibleOutput) {
570         if (m_avPlayerItem)
571             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
572         m_legibleOutput = nil;
573     }
574 #endif
575
576     if (m_avPlayerItem) {
577         for (NSString *keyName in itemKVOProperties())
578             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
579         
580         m_avPlayerItem = nil;
581     }
582     if (m_avPlayer) {
583         if (m_timeObserver)
584             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
585         m_timeObserver = nil;
586
587         for (NSString *keyName in playerKVOProperties())
588             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
589
590         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
591         m_avPlayer = nil;
592     }
593
594     // Reset cached properties
595     m_pendingStatusChanges = 0;
596     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
597     m_cachedSeekableRanges = nullptr;
598     m_cachedLoadedRanges = nullptr;
599     m_cachedHasEnabledAudio = false;
600     m_cachedPresentationSize = FloatSize();
601     m_cachedDuration = MediaTime::zeroTime();
602
603     for (AVPlayerItemTrack *track in m_cachedTracks.get())
604         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
605     m_cachedTracks = nullptr;
606
607 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
608     if (m_provider) {
609         m_provider->setPlayerItem(nullptr);
610         m_provider->setAudioTrack(nullptr);
611     }
612 #endif
613
614     setIgnoreLoadStateChanges(false);
615 }
616
617 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
618 {
619     return m_haveBeenAskedToCreateLayer;
620 }
621
622 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
623 {
624 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
625     if (m_videoOutput)
626         return true;
627 #endif
628     return m_imageGenerator;
629 }
630
631 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
632 {
633 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
634     createVideoOutput();
635 #else
636     createImageGenerator();
637 #endif
638 }
639
640 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
641 {
642     using namespace PAL;
643     INFO_LOG(LOGIDENTIFIER);
644
645     if (!m_avAsset || m_imageGenerator)
646         return;
647
648     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
649
650     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
651     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
652     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
653     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
654 }
655
656 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
657 {
658 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
659     destroyVideoOutput();
660 #endif
661     destroyImageGenerator();
662 }
663
664 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
665 {
666     if (!m_imageGenerator)
667         return;
668
669     INFO_LOG(LOGIDENTIFIER);
670
671     m_imageGenerator = 0;
672 }
673
674 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
675 {
676     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
677         return;
678
679     callOnMainThread([this, weakThis = makeWeakPtr(*this)] {
680         if (!weakThis)
681             return;
682
683         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
684             return;
685         m_haveBeenAskedToCreateLayer = true;
686
687         if (!m_videoLayer)
688             createAVPlayerLayer();
689
690 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
691         if (!m_videoOutput)
692             createVideoOutput();
693 #endif
694
695         player()->client().mediaPlayerRenderingModeChanged(player());
696     });
697 }
698
699 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
700 {
701     if (!m_avPlayer)
702         return;
703
704     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
705     [m_videoLayer setPlayer:m_avPlayer.get()];
706
707 #ifndef NDEBUG
708     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
709 #endif
710     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
711     updateVideoLayerGravity();
712     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
713     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
714     INFO_LOG(LOGIDENTIFIER);
715
716     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
717
718 #if PLATFORM(IOS) && !PLATFORM(WATCHOS)
719     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
720         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
721 #endif
722 }
723
724 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
725 {
726     if (!m_videoLayer)
727         return;
728
729     INFO_LOG(LOGIDENTIFIER);
730
731     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
732     [m_videoLayer setPlayer:nil];
733     m_videoFullscreenLayerManager->didDestroyVideoLayer();
734
735     m_videoLayer = nil;
736 }
737
738 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
739 {
740     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
741     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
742
743     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
744     if (!date)
745         return MediaTime::invalidTime();
746
747     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
748
749     // Rounding due to second offset error when subtracting.
750     return MediaTime::createWithDouble(round(date - currentTime));
751 }
752
753 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
754 {
755     if (currentRenderingMode() == MediaRenderingToLayer)
756         return m_cachedIsReadyForDisplay;
757
758 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
759     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
760         return true;
761 #endif
762
763     return m_videoFrameHasDrawn;
764 }
765
766 #if ENABLE(AVF_CAPTIONS)
767 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
768 {
769     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
770     if (manualSelectionMode)
771         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
772
773     // FIXME: Match these to correct types:
774     if (kind == PlatformTextTrack::Caption)
775         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
776
777     if (kind == PlatformTextTrack::Subtitle)
778         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
779
780     if (kind == PlatformTextTrack::Description)
781         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
782
783     if (kind == PlatformTextTrack::Forced)
784         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
785
786     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
787 }
788     
789 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
790 {
791     trackModeChanged();
792 }
793     
794 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
795 {
796     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
797     
798     for (auto& textTrack : m_textTracks) {
799         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
800             continue;
801         
802         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
803         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
804         
805         for (auto& track : outOfBandTrackSources) {
806             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
807             
808             if (![[currentOption.get() outOfBandIdentifier] isEqual:(__bridge NSString *)uniqueID.get()])
809                 continue;
810             
811             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
812             if (track->mode() == PlatformTextTrack::Hidden)
813                 mode = InbandTextTrackPrivate::Hidden;
814             else if (track->mode() == PlatformTextTrack::Disabled)
815                 mode = InbandTextTrackPrivate::Disabled;
816             else if (track->mode() == PlatformTextTrack::Showing)
817                 mode = InbandTextTrackPrivate::Showing;
818             
819             textTrack->setMode(mode);
820             break;
821         }
822     }
823 }
824 #endif
825
826
827 static NSURL *canonicalURL(const URL& url)
828 {
829     NSURL *cocoaURL = url;
830     if (url.isEmpty())
831         return cocoaURL;
832
833     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
834     if (!request)
835         return cocoaURL;
836
837     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
838     if (!canonicalRequest)
839         return cocoaURL;
840
841     return [canonicalRequest URL];
842 }
843
844 #if PLATFORM(IOS)
845 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
846 {
847     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
848     [properties setDictionary:@{
849         NSHTTPCookieName: cookie.name,
850         NSHTTPCookieValue: cookie.value,
851         NSHTTPCookieDomain: cookie.domain,
852         NSHTTPCookiePath: cookie.path,
853         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
854     }];
855     if (cookie.secure)
856         [properties setObject:@YES forKey:NSHTTPCookieSecure];
857     if (cookie.session)
858         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
859
860     return [NSHTTPCookie cookieWithProperties:properties.get()];
861 }
862 #endif
863
864 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
865 {
866     if (m_avAsset)
867         return;
868
869     INFO_LOG(LOGIDENTIFIER);
870
871     setDelayCallbacks(true);
872
873     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
874
875     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
876
877     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
878
879     String referrer = player()->referrer();
880     if (!referrer.isEmpty())
881         [headerFields.get() setObject:referrer forKey:@"Referer"];
882
883     String userAgent = player()->userAgent();
884     if (!userAgent.isEmpty())
885         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
886
887     if ([headerFields.get() count])
888         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
889
890     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
891         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
892
893     if (canLoadAVURLAssetUseClientURLLoadingExclusively())
894         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
895 #if PLATFORM(IOS)
896     else if (canLoadAVURLAssetRequiresCustomURLLoadingKey())
897         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
898     // FIXME: rdar://problem/20354688
899     String identifier = player()->sourceApplicationIdentifier();
900     if (!identifier.isEmpty() && canLoadAVURLAssetClientBundleIdentifierKey())
901         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
902 #endif
903
904     auto type = player()->contentMIMEType();
905     if (canLoadAVURLAssetOutOfBandMIMETypeKey() && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
906         auto codecs = player()->contentTypeCodecs();
907         if (!codecs.isEmpty()) {
908             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
909             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
910         } else
911             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
912     }
913
914 #if ENABLE(AVF_CAPTIONS)
915     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
916     if (!outOfBandTrackSources.isEmpty()) {
917         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
918         for (auto& trackSource : outOfBandTrackSources) {
919             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
920             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
921             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
922             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
923             [outOfBandTracks.get() addObject:@{
924                 AVOutOfBandAlternateTrackDisplayNameKey: (__bridge NSString *)label.get(),
925                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: (__bridge NSString *)language.get(),
926                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
927                 AVOutOfBandAlternateTrackIdentifierKey: (__bridge NSString *)uniqueID.get(),
928                 AVOutOfBandAlternateTrackSourceKey: (__bridge NSString *)url.get(),
929                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
930             }];
931         }
932
933         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
934     }
935 #endif
936
937 #if PLATFORM(IOS)
938     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
939     if (!networkInterfaceName.isEmpty())
940         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
941 #endif
942
943 #if PLATFORM(IOS)
944     Vector<Cookie> cookies;
945     if (player()->getRawCookies(url, cookies)) {
946         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
947         for (auto& cookie : cookies)
948             [nsCookies addObject:toNSHTTPCookie(cookie)];
949
950         if (canLoadAVURLAssetHTTPCookiesKey())
951             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
952     }
953 #endif
954
955     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
956     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
957     
958     if (usePersistentCache)
959         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
960
961     NSURL *cocoaURL = canonicalURL(url);
962     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
963
964 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
965     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
966     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
967
968 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
969     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
970         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
971         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
972         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
973         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
974         if (mediaResourceLoader)
975             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
976     }
977 #endif
978
979 #endif
980
981     m_haveCheckedPlayability = false;
982
983     setDelayCallbacks(false);
984 }
985
986 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
987 {
988     if (!m_avPlayer)
989         return;
990
991     if (pthread_main_np()) {
992         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
993         return;
994     }
995
996     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
997     RetainPtr<AVPlayerItemType> strongItem = item;
998     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
999         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
1000     });
1001 }
1002
1003 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
1004 {
1005     if (m_avPlayer)
1006         return;
1007
1008     INFO_LOG(LOGIDENTIFIER);
1009
1010     setDelayCallbacks(true);
1011
1012     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1013     for (NSString *keyName in playerKVOProperties())
1014         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1015
1016 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1017     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1018 #endif
1019
1020 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1021     updateDisableExternalPlayback();
1022     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1023 #endif
1024
1025 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1026     if (m_shouldPlayToPlaybackTarget) {
1027         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1028         m_shouldPlayToPlaybackTarget = false;
1029         setShouldPlayToPlaybackTarget(true);
1030     }
1031 #endif
1032
1033 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR) && !PLATFORM(IOSMAC)
1034     setShouldDisableSleep(player()->shouldDisableSleep());
1035 #endif
1036
1037     if (m_muted) {
1038         // Clear m_muted so setMuted doesn't return without doing anything.
1039         m_muted = false;
1040         [m_avPlayer.get() setMuted:m_muted];
1041     }
1042
1043     if (player()->client().mediaPlayerIsVideo())
1044         createAVPlayerLayer();
1045
1046     if (m_avPlayerItem)
1047         setAVPlayerItem(m_avPlayerItem.get());
1048
1049     setDelayCallbacks(false);
1050 }
1051
1052 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1053 {
1054     if (m_avPlayerItem)
1055         return;
1056
1057     INFO_LOG(LOGIDENTIFIER);
1058
1059     setDelayCallbacks(true);
1060
1061     // Create the player item so we can load media data. 
1062     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1063
1064     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1065
1066     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1067     for (NSString *keyName in itemKVOProperties())
1068         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1069
1070     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1071
1072     if (m_avPlayer)
1073         setAVPlayerItem(m_avPlayerItem.get());
1074
1075 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1076     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1077
1078     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1079     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1080     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1081
1082     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1083     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1084     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1085     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1086 #endif
1087
1088 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1089     if (m_provider) {
1090         m_provider->setPlayerItem(m_avPlayerItem.get());
1091         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1092     }
1093 #endif
1094
1095 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1096     createVideoOutput();
1097 #endif
1098
1099     setDelayCallbacks(false);
1100 }
1101
1102 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1103 {
1104     if (m_haveCheckedPlayability)
1105         return;
1106     m_haveCheckedPlayability = true;
1107
1108     INFO_LOG(LOGIDENTIFIER);
1109     auto weakThis = makeWeakPtr(*this);
1110
1111     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1112         callOnMainThread([weakThis] {
1113             if (weakThis)
1114                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1115         });
1116     }];
1117 }
1118
1119 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1120 {
1121     INFO_LOG(LOGIDENTIFIER);
1122
1123     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1124     dispatch_group_enter(metadataLoadingGroup.get());
1125     auto weakThis = makeWeakPtr(*this);
1126     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1127
1128         callOnMainThread([weakThis, metadataLoadingGroup] {
1129             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1130                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1131                     dispatch_group_enter(metadataLoadingGroup.get());
1132                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1133                         dispatch_group_leave(metadataLoadingGroup.get());
1134                     }];
1135                 }
1136             }
1137             dispatch_group_leave(metadataLoadingGroup.get());
1138         });
1139     }];
1140
1141     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1142         callOnMainThread([weakThis] {
1143             if (weakThis)
1144                 [weakThis->m_objcObserver.get() metadataLoaded];
1145         });
1146     });
1147 }
1148
1149 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1150 {
1151     if (!m_avPlayerItem)
1152         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1153
1154     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1155         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1156     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1157         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1158     if (m_cachedLikelyToKeepUp)
1159         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1160     if (m_cachedBufferFull)
1161         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1162     if (m_cachedBufferEmpty)
1163         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1164
1165     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1166 }
1167
1168 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1169 {
1170     return m_videoFullscreenLayerManager->videoInlineLayer();
1171 }
1172
1173 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1174 {
1175 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1176     updateLastImage(UpdateType::UpdateSynchronously);
1177     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1178 #endif
1179 }
1180
1181 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1182 {
1183 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1184     updateLastImage(UpdateType::UpdateSynchronously);
1185     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1186 #else
1187     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1188 #endif
1189     updateDisableExternalPlayback();
1190 }
1191
1192 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1193 {
1194     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1195 }
1196
1197 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1198 {
1199     m_videoFullscreenGravity = gravity;
1200
1201     if (!m_videoLayer)
1202         return;
1203
1204     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1205     if (gravity == MediaPlayer::VideoGravityResize)
1206         videoGravity = AVLayerVideoGravityResize;
1207     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1208         videoGravity = AVLayerVideoGravityResizeAspect;
1209     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1210         videoGravity = AVLayerVideoGravityResizeAspectFill;
1211     else
1212         ASSERT_NOT_REACHED();
1213     
1214     if ([m_videoLayer videoGravity] == videoGravity)
1215         return;
1216
1217     [m_videoLayer setVideoGravity:videoGravity];
1218     syncTextTrackBounds();
1219 }
1220
1221 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1222 {
1223 #if PLATFORM(IOS) && !PLATFORM(WATCHOS)
1224     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1225         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1226     updateDisableExternalPlayback();
1227 #else
1228     UNUSED_PARAM(mode);
1229 #endif
1230 }
1231
1232 #if PLATFORM(IOS)
1233 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1234 {
1235     if (m_currentMetaData)
1236         return m_currentMetaData.get();
1237     return nil;
1238 }
1239
1240 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1241 {
1242     if (!m_avPlayerItem)
1243         return emptyString();
1244     
1245     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1246     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1247
1248     return logString.get();
1249 }
1250
1251 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1252 {
1253     if (!m_avPlayerItem)
1254         return emptyString();
1255
1256     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1257     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1258
1259     return logString.get();
1260 }
1261 #endif
1262
1263 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1264 {
1265     [CATransaction begin];
1266     [CATransaction setDisableActions:YES];    
1267     if (m_videoLayer)
1268         [m_videoLayer.get() setHidden:!isVisible];
1269     [CATransaction commit];
1270 }
1271     
1272 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1273 {
1274     INFO_LOG(LOGIDENTIFIER);
1275     if (!metaDataAvailable())
1276         return;
1277
1278     setDelayCallbacks(true);
1279     m_cachedRate = requestedRate();
1280     [m_avPlayer.get() setRate:requestedRate()];
1281     setDelayCallbacks(false);
1282 }
1283
1284 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1285 {
1286     INFO_LOG(LOGIDENTIFIER);
1287     if (!metaDataAvailable())
1288         return;
1289
1290     setDelayCallbacks(true);
1291     m_cachedRate = 0;
1292     [m_avPlayer.get() setRate:0];
1293     setDelayCallbacks(false);
1294 }
1295
1296 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1297 {
1298     // Do not ask the asset for duration before it has been loaded or it will fetch the
1299     // answer synchronously.
1300     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1301         return MediaTime::invalidTime();
1302     
1303     CMTime cmDuration;
1304     
1305     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1306     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1307         cmDuration = [m_avPlayerItem.get() duration];
1308     else
1309         cmDuration = [m_avAsset.get() duration];
1310
1311     if (CMTIME_IS_NUMERIC(cmDuration))
1312         return PAL::toMediaTime(cmDuration);
1313
1314     if (CMTIME_IS_INDEFINITE(cmDuration))
1315         return MediaTime::positiveInfiniteTime();
1316
1317     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1318     return MediaTime::invalidTime();
1319 }
1320
1321 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1322 {
1323     if (!metaDataAvailable() || !m_avPlayerItem)
1324         return MediaTime::zeroTime();
1325
1326     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1327     if (CMTIME_IS_NUMERIC(itemTime))
1328         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1329
1330     return MediaTime::zeroTime();
1331 }
1332
1333 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1334 {
1335     // setCurrentTime generates several event callbacks, update afterwards.
1336     setDelayCallbacks(true);
1337
1338     if (m_metadataTrack)
1339         m_metadataTrack->flushPartialCues();
1340
1341     CMTime cmTime = PAL::toCMTime(time);
1342     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1343     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1344
1345     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1346     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1347         cmBefore = kCMTimeZero;
1348     
1349     auto weakThis = makeWeakPtr(*this);
1350
1351     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1352         callOnMainThread([weakThis, finished] {
1353             auto _this = weakThis.get();
1354             if (!_this)
1355                 return;
1356
1357             _this->seekCompleted(finished);
1358         });
1359     }];
1360
1361     setDelayCallbacks(false);
1362 }
1363
1364 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1365 {
1366 #if PLATFORM(IOS)
1367     UNUSED_PARAM(volume);
1368     return;
1369 #else
1370     if (!m_avPlayer)
1371         return;
1372
1373     [m_avPlayer.get() setVolume:volume];
1374 #endif
1375 }
1376
1377 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1378 {
1379     if (m_muted == muted)
1380         return;
1381
1382     INFO_LOG(LOGIDENTIFIER, "- ", muted);
1383
1384     m_muted = muted;
1385
1386     if (!m_avPlayer)
1387         return;
1388
1389     [m_avPlayer.get() setMuted:m_muted];
1390 }
1391
1392 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1393 {
1394     UNUSED_PARAM(closedCaptionsVisible);
1395
1396     if (!metaDataAvailable())
1397         return;
1398
1399     INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1400 }
1401
1402 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1403 {
1404     setDelayCallbacks(true);
1405     m_cachedRate = rate;
1406     [m_avPlayer.get() setRate:rate];
1407     setDelayCallbacks(false);
1408 }
1409
1410 double MediaPlayerPrivateAVFoundationObjC::rate() const
1411 {
1412     if (!metaDataAvailable())
1413         return 0;
1414
1415     return m_cachedRate;
1416 }
1417
1418 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1419 {
1420 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1421     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1422 #else
1423     return 0;
1424 #endif
1425 }
1426
1427 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1428 {
1429 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1430     return [m_avPlayerItem liveUpdateInterval];
1431 #else
1432     return 0;
1433 #endif
1434 }
1435
1436 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1437 {
1438     if (m_avPlayerItem)
1439         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1440 }
1441
1442 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1443 {
1444     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1445
1446     if (!m_avPlayerItem)
1447         return timeRanges;
1448
1449     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1450         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1451         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1452             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1453     }
1454     return timeRanges;
1455 }
1456
1457 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1458 {
1459     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1460         return MediaTime::zeroTime();
1461
1462     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1463     bool hasValidRange = false;
1464     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1465         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1466         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1467             continue;
1468
1469         hasValidRange = true;
1470         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1471         if (minTimeSeekable > startOfRange)
1472             minTimeSeekable = startOfRange;
1473     }
1474     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1475 }
1476
1477 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1478 {
1479     if (!m_cachedSeekableRanges)
1480         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1481
1482     MediaTime maxTimeSeekable;
1483     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1484         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1485         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1486             continue;
1487         
1488         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1489         if (maxTimeSeekable < endOfRange)
1490             maxTimeSeekable = endOfRange;
1491     }
1492     return maxTimeSeekable;
1493 }
1494
1495 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1496 {
1497     if (!m_cachedLoadedRanges)
1498         return MediaTime::zeroTime();
1499
1500     MediaTime maxTimeLoaded;
1501     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1502         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1503         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1504             continue;
1505         
1506         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1507         if (maxTimeLoaded < endOfRange)
1508             maxTimeLoaded = endOfRange;
1509     }
1510
1511     return maxTimeLoaded;   
1512 }
1513
1514 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1515 {
1516     if (!metaDataAvailable())
1517         return 0;
1518
1519     if (m_cachedTotalBytes)
1520         return m_cachedTotalBytes;
1521
1522     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1523         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1524
1525     return m_cachedTotalBytes;
1526 }
1527
1528 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1529 {
1530     m_avAsset = asset;
1531 }
1532
1533 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1534 {
1535     if (!m_avAsset)
1536         return MediaPlayerAVAssetStatusDoesNotExist;
1537
1538     for (NSString *keyName in assetMetadataKeyNames()) {
1539         NSError *error = nil;
1540         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1541
1542         if (error)
1543             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1544
1545         if (keyStatus < AVKeyValueStatusLoaded)
1546             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1547         
1548         if (keyStatus == AVKeyValueStatusFailed)
1549             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1550
1551         if (keyStatus == AVKeyValueStatusCancelled)
1552             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1553     }
1554
1555     if (!player()->shouldCheckHardwareSupport())
1556         m_tracksArePlayable = true;
1557
1558     if (!m_tracksArePlayable) {
1559         m_tracksArePlayable = true;
1560         for (AVAssetTrack *track in [m_avAsset tracks]) {
1561             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1562                 m_tracksArePlayable = false;
1563                 break;
1564             }
1565         }
1566     }
1567
1568     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1569         return MediaPlayerAVAssetStatusPlayable;
1570
1571     return MediaPlayerAVAssetStatusLoaded;
1572 }
1573
1574 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1575 {
1576     if (!m_avAsset)
1577         return 0;
1578
1579     NSError *error = nil;
1580     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1581     return [error code];
1582 }
1583
1584 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1585 {
1586     if (!metaDataAvailable() || context.paintingDisabled())
1587         return;
1588
1589     setDelayCallbacks(true);
1590     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1591
1592 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1593     if (videoOutputHasAvailableFrame())
1594         paintWithVideoOutput(context, rect);
1595     else
1596 #endif
1597         paintWithImageGenerator(context, rect);
1598
1599     END_BLOCK_OBJC_EXCEPTIONS;
1600     setDelayCallbacks(false);
1601
1602     m_videoFrameHasDrawn = true;
1603 }
1604
1605 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1606 {
1607     if (!metaDataAvailable() || context.paintingDisabled())
1608         return;
1609
1610     // We can ignore the request if we are already rendering to a layer.
1611     if (currentRenderingMode() == MediaRenderingToLayer)
1612         return;
1613
1614     // paint() is best effort, so only paint if we already have an image generator or video output available.
1615     if (!hasContextRenderer())
1616         return;
1617
1618     paintCurrentFrameInContext(context, rect);
1619 }
1620
1621 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1622 {
1623     INFO_LOG(LOGIDENTIFIER);
1624
1625     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1626     if (image) {
1627         GraphicsContextStateSaver stateSaver(context);
1628         context.translate(rect.x(), rect.y() + rect.height());
1629         context.scale(FloatSize(1.0f, -1.0f));
1630         context.setImageInterpolationQuality(InterpolationLow);
1631         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1632         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1633     }
1634 }
1635
1636 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1637 {
1638     if (!m_imageGenerator)
1639         createImageGenerator();
1640     ASSERT(m_imageGenerator);
1641
1642 #if !RELEASE_LOG_DISABLED
1643     MonotonicTime start = MonotonicTime::now();
1644 #endif
1645
1646     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1647     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1648     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1649
1650 #if !RELEASE_LOG_DISABLED
1651     DEBUG_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1652 #endif
1653
1654     return image;
1655 }
1656
1657 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1658 {
1659     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1660
1661
1662 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1663 static bool keySystemIsSupported(const String& keySystem)
1664 {
1665     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1666         return true;
1667     return false;
1668 }
1669 #endif
1670
1671 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1672 {
1673 #if ENABLE(MEDIA_SOURCE)
1674     if (parameters.isMediaSource)
1675         return MediaPlayer::IsNotSupported;
1676 #endif
1677 #if ENABLE(MEDIA_STREAM)
1678     if (parameters.isMediaStream)
1679         return MediaPlayer::IsNotSupported;
1680 #endif
1681
1682     auto containerType = parameters.type.containerType();
1683     if (isUnsupportedMIMEType(containerType))
1684         return MediaPlayer::IsNotSupported;
1685
1686     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(containerType))
1687         return MediaPlayer::IsNotSupported;
1688
1689     // The spec says:
1690     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1691     if (parameters.type.codecs().isEmpty())
1692         return MediaPlayer::MayBeSupported;
1693
1694     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1695         return MediaPlayer::IsNotSupported;
1696
1697     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1698     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1699 }
1700
1701 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1702 {
1703 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1704     if (!keySystem.isEmpty()) {
1705         // "Clear Key" is only supported with HLS:
1706         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1707             return MediaPlayer::IsNotSupported;
1708
1709         if (!keySystemIsSupported(keySystem))
1710             return false;
1711
1712         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1713             return false;
1714
1715         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().canDecodeType(mimeType))
1716             return false;
1717
1718         return true;
1719     }
1720 #else
1721     UNUSED_PARAM(keySystem);
1722     UNUSED_PARAM(mimeType);
1723 #endif
1724     return false;
1725 }
1726
1727 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1728 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1729 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1730 {
1731     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1732         [infoRequest setContentLength:keyData->byteLength()];
1733         [infoRequest setByteRangeAccessSupported:YES];
1734     }
1735
1736     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1737         long long start = [dataRequest currentOffset];
1738         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1739
1740         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1741             [request finishLoadingWithError:nil];
1742             return;
1743         }
1744
1745         ASSERT(start <= std::numeric_limits<int>::max());
1746         ASSERT(end <= std::numeric_limits<int>::max());
1747         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1748         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1749         [dataRequest respondWithData:nsData.get()];
1750     }
1751
1752     [request finishLoading];
1753 }
1754 #endif
1755
1756 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1757 {
1758     String scheme = [[[avRequest request] URL] scheme];
1759     String keyURI = [[[avRequest request] URL] absoluteString];
1760
1761 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1762     if (scheme == "skd") {
1763 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1764         // Create an initData with the following layout:
1765         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1766         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1767         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1768         unsigned byteLength = initDataBuffer->byteLength();
1769         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1770         initDataView->set<uint32_t>(0, keyURISize, true);
1771
1772         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1773         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1774
1775         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1776         if (!player()->keyNeeded(initData.get()))
1777             return false;
1778 #endif
1779         m_keyURIToRequestMap.set(keyURI, avRequest);
1780 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1781         if (m_cdmInstance)
1782             return false;
1783
1784         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1785         auto keyURIBuffer = SharedBuffer::create(keyURIData.get());
1786         player()->initializationDataEncountered("skd"_s, keyURIBuffer->tryCreateArrayBuffer());
1787 #endif
1788         return true;
1789     }
1790
1791 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1792     if (scheme == "clearkey") {
1793         String keyID = [[[avRequest request] URL] resourceSpecifier];
1794         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1795
1796         auto initData = Uint8Array::create(encodedKeyId.size());
1797         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1798
1799         auto keyData = player()->cachedKeyForKeyId(keyID);
1800         if (keyData) {
1801             fulfillRequestWithKeyData(avRequest, keyData.get());
1802             return false;
1803         }
1804
1805         if (!player()->keyNeeded(initData.get()))
1806             return false;
1807
1808         m_keyURIToRequestMap.set(keyID, avRequest);
1809         return true;
1810     }
1811 #endif
1812 #endif
1813
1814     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1815     m_resourceLoaderMap.add((__bridge CFTypeRef)avRequest, resourceLoader);
1816     resourceLoader->startLoading();
1817     return true;
1818 }
1819
1820 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1821 {
1822     String scheme = [[[avRequest request] URL] scheme];
1823
1824     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get((__bridge CFTypeRef)avRequest);
1825
1826     if (resourceLoader)
1827         resourceLoader->stopLoading();
1828 }
1829
1830 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1831 {
1832     m_resourceLoaderMap.remove((__bridge CFTypeRef)avRequest);
1833 }
1834 #endif
1835
1836 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1837 {
1838     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1839 }
1840
1841 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1842 {
1843     if (!metaDataAvailable())
1844         return timeValue;
1845
1846     // FIXME - impossible to implement until rdar://8721510 is fixed.
1847     return timeValue;
1848 }
1849
1850 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1851 {
1852 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1853     return 0;
1854 #else
1855     return 5;
1856 #endif
1857 }
1858
1859 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1860 {
1861     if (!m_videoLayer)
1862         return;
1863
1864     // Do not attempt to change the video gravity while in full screen mode.
1865     // See setVideoFullscreenGravity().
1866     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1867         return;
1868
1869     [CATransaction begin];
1870     [CATransaction setDisableActions:YES];    
1871     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1872     [m_videoLayer.get() setVideoGravity:gravity];
1873     [CATransaction commit];
1874 }
1875
1876 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1877 {
1878     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1879         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1880     }];
1881     if (index == NSNotFound)
1882         return nil;
1883     return [tracks objectAtIndex:index];
1884 }
1885
1886 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1887 {
1888     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1889     m_languageOfPrimaryAudioTrack = String();
1890
1891     if (!m_avAsset)
1892         return;
1893
1894     setDelayCharacteristicsChangedNotification(true);
1895
1896     bool haveCCTrack = false;
1897     bool hasCaptions = false;
1898
1899     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1900     // asked about those fairly fequently.
1901     if (!m_avPlayerItem) {
1902         // We don't have a player item yet, so check with the asset because some assets support inspection
1903         // prior to becoming ready to play.
1904         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1905         setHasVideo(firstEnabledVideoTrack);
1906         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1907 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1908         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1909 #endif
1910         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1911         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1912         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1913         if (size.width() < 0)
1914             size.setWidth(-size.width());
1915         if (size.height() < 0)
1916             size.setHeight(-size.height());
1917         presentationSizeDidChange(size);
1918     } else {
1919         bool hasVideo = false;
1920         bool hasAudio = false;
1921         bool hasMetaData = false;
1922         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1923             if ([track isEnabled]) {
1924                 AVAssetTrack *assetTrack = [track assetTrack];
1925                 NSString *mediaType = [assetTrack mediaType];
1926                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1927                     hasVideo = true;
1928                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1929                     hasAudio = true;
1930                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1931 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1932                     hasCaptions = true;
1933 #endif
1934                     haveCCTrack = true;
1935                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1936                     hasMetaData = true;
1937                 }
1938             }
1939         }
1940
1941 #if ENABLE(VIDEO_TRACK)
1942         updateAudioTracks();
1943         updateVideoTracks();
1944
1945 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1946         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1947         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1948 #endif
1949 #endif
1950
1951         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1952         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1953         // when it is not.
1954         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1955
1956         setHasAudio(hasAudio);
1957 #if ENABLE(DATACUE_VALUE)
1958         if (hasMetaData)
1959             processMetadataTrack();
1960 #endif
1961     }
1962
1963 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1964     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1965     if (legibleGroup && m_cachedTracks) {
1966         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1967         if (hasCaptions)
1968             processMediaSelectionOptions();
1969     }
1970 #endif
1971
1972 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1973     if (!hasCaptions && haveCCTrack)
1974         processLegacyClosedCaptionsTracks();
1975 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1976     if (haveCCTrack)
1977         processLegacyClosedCaptionsTracks();
1978 #endif
1979
1980     setHasClosedCaptions(hasCaptions);
1981
1982     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1983
1984     sizeChanged();
1985
1986     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1987         characteristicsChanged();
1988
1989 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1990     if (m_provider)
1991         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1992 #endif
1993
1994     setDelayCharacteristicsChangedNotification(false);
1995 }
1996
1997 #if ENABLE(VIDEO_TRACK)
1998
1999 template <typename RefT, typename PassRefT>
2000 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2001 {
2002     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2003         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2004     }]]]);
2005     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2006
2007     for (auto& oldItem : oldItems) {
2008         if (oldItem->playerItemTrack())
2009             [oldTracks addObject:oldItem->playerItemTrack()];
2010     }
2011
2012     // Find the added & removed AVPlayerItemTracks:
2013     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2014     [removedTracks minusSet:newTracks.get()];
2015
2016     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2017     [addedTracks minusSet:oldTracks.get()];
2018
2019     typedef Vector<RefT> ItemVector;
2020     ItemVector replacementItems;
2021     ItemVector addedItems;
2022     ItemVector removedItems;
2023     for (auto& oldItem : oldItems) {
2024         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2025             removedItems.append(oldItem);
2026         else
2027             replacementItems.append(oldItem);
2028     }
2029
2030     for (AVPlayerItemTrack* track in addedTracks.get())
2031         addedItems.append(itemFactory(track));
2032
2033     replacementItems.appendVector(addedItems);
2034     oldItems.swap(replacementItems);
2035
2036     for (auto& removedItem : removedItems)
2037         (player->*removedFunction)(*removedItem);
2038
2039     for (auto& addedItem : addedItems)
2040         (player->*addedFunction)(*addedItem);
2041 }
2042
2043 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2044
2045 template <typename RefT, typename PassRefT>
2046 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2047 {
2048     group->updateOptions(characteristics);
2049
2050     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2051     for (auto& option : group->options()) {
2052         if (!option)
2053             continue;
2054         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2055         if (!avOption)
2056             continue;
2057         newSelectionOptions.add(option);
2058     }
2059
2060     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2061     for (auto& oldItem : oldItems) {
2062         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2063             oldSelectionOptions.add(option);
2064     }
2065
2066     // Find the added & removed AVMediaSelectionOptions:
2067     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2068     for (auto& oldOption : oldSelectionOptions) {
2069         if (!newSelectionOptions.contains(oldOption))
2070             removedSelectionOptions.add(oldOption);
2071     }
2072
2073     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2074     for (auto& newOption : newSelectionOptions) {
2075         if (!oldSelectionOptions.contains(newOption))
2076             addedSelectionOptions.add(newOption);
2077     }
2078
2079     typedef Vector<RefT> ItemVector;
2080     ItemVector replacementItems;
2081     ItemVector addedItems;
2082     ItemVector removedItems;
2083     for (auto& oldItem : oldItems) {
2084         if (!oldItem->mediaSelectionOption())
2085             removedItems.append(oldItem);
2086         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2087             removedItems.append(oldItem);
2088         else
2089             replacementItems.append(oldItem);
2090     }
2091
2092     for (auto& option : addedSelectionOptions)
2093         addedItems.append(itemFactory(*option.get()));
2094
2095     replacementItems.appendVector(addedItems);
2096     oldItems.swap(replacementItems);
2097     
2098     for (auto& removedItem : removedItems)
2099         (player->*removedFunction)(*removedItem);
2100
2101     for (auto& addedItem : addedItems)
2102         (player->*addedFunction)(*addedItem);
2103 }
2104
2105 #endif
2106
2107 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2108 {
2109 #if !RELEASE_LOG_DISABLED
2110     size_t count = m_audioTracks.size();
2111 #endif
2112
2113 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2114     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2115     if (!m_audibleGroup) {
2116         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2117             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2118     }
2119
2120     if (m_audibleGroup)
2121         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2122     else
2123 #endif
2124         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2125
2126     for (auto& track : m_audioTracks)
2127         track->resetPropertiesFromTrack();
2128
2129 #if !RELEASE_LOG_DISABLED
2130     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2131 #endif
2132 }
2133
2134 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2135 {
2136 #if !RELEASE_LOG_DISABLED
2137     size_t count = m_videoTracks.size();
2138 #endif
2139
2140     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2141
2142 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2143     if (!m_visualGroup) {
2144         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2145             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2146     }
2147
2148     if (m_visualGroup)
2149         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2150 #endif
2151
2152     for (auto& track : m_audioTracks)
2153         track->resetPropertiesFromTrack();
2154
2155 #if !RELEASE_LOG_DISABLED
2156     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2157 #endif
2158 }
2159
2160 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2161 {
2162     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2163 }
2164
2165 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2166 {
2167     m_videoFullscreenLayerManager->syncTextTrackBounds();
2168 }
2169
2170 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2171 {
2172     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2173 }
2174
2175 #endif // ENABLE(VIDEO_TRACK)
2176
2177 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2178
2179 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2180 {
2181     if (!m_provider) {
2182         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2183         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2184     }
2185     return m_provider.get();
2186 }
2187
2188 #endif
2189
2190 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2191 {
2192     if (!m_avAsset)
2193         return;
2194
2195     setNaturalSize(m_cachedPresentationSize);
2196 }
2197
2198 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2199 {
2200     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2201 }
2202
2203 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2204 {
2205 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2206     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2207     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2208         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2209         return false;
2210
2211     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2212     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2213         return session.didPassCORSAccessChecks;
2214 #endif
2215     return false;
2216 }
2217
2218 std::optional<bool> MediaPlayerPrivateAVFoundationObjC::wouldTaintOrigin(const SecurityOrigin& origin) const
2219 {
2220 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2221     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2222     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2223         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2224         return false;
2225
2226     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2227     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2228         return [session wouldTaintOrigin:origin];
2229 #endif
2230     return std::nullopt;
2231 }
2232
2233
2234 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2235
2236 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2237 {
2238     INFO_LOG(LOGIDENTIFIER);
2239
2240     if (!m_avPlayerItem || m_videoOutput)
2241         return;
2242
2243 #if USE(VIDEOTOOLBOX)
2244     NSDictionary* attributes = nil;
2245 #else
2246     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2247 #endif
2248     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2249     ASSERT(m_videoOutput);
2250
2251     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2252
2253     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2254 }
2255
2256 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2257 {
2258     if (!m_videoOutput)
2259         return;
2260
2261     if (m_avPlayerItem)
2262         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2263
2264     INFO_LOG(LOGIDENTIFIER);
2265
2266     m_videoOutput = 0;
2267 }
2268
2269 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2270 {
2271     if (!m_avPlayerItem)
2272         return false;
2273
2274     if (!m_videoOutput)
2275         createVideoOutput();
2276     ASSERT(m_videoOutput);
2277
2278     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2279
2280     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2281         return false;
2282
2283     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2284     m_lastImage = nullptr;
2285     return true;
2286 }
2287
2288 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2289 {
2290     if (!m_avPlayerItem)
2291         return false;
2292
2293     if (m_lastImage)
2294         return true;
2295
2296     if (!m_videoOutput)
2297         createVideoOutput();
2298
2299     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2300 }
2301
2302 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2303 {
2304 #if HAVE(CORE_VIDEO)
2305     if (!m_avPlayerItem)
2306         return;
2307
2308     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2309         waitForVideoOutputMediaDataWillChange();
2310
2311     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2312     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2313     // should be displayed.
2314     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2315         return;
2316
2317     if (!m_pixelBufferConformer) {
2318 #if USE(VIDEOTOOLBOX)
2319         NSDictionary *attributes = @{ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2320 #else
2321         NSDictionary *attributes = nil;
2322 #endif
2323         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((__bridge CFDictionaryRef)attributes);
2324     }
2325
2326 #if !RELEASE_LOG_DISABLED
2327     MonotonicTime start = MonotonicTime::now();
2328 #endif
2329
2330     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2331
2332 #if !RELEASE_LOG_DISABLED
2333     DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2334 #endif
2335 #endif // HAVE(CORE_VIDEO)
2336 }
2337
2338 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2339 {
2340     updateLastImage(UpdateType::UpdateSynchronously);
2341     if (!m_lastImage)
2342         return;
2343
2344     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2345     if (!firstEnabledVideoTrack)
2346         return;
2347
2348     INFO_LOG(LOGIDENTIFIER);
2349
2350     GraphicsContextStateSaver stateSaver(context);
2351     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2352     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2353     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2354
2355     context.concatCTM(videoTransform);
2356     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2357
2358     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2359     // video frame, destroy it now that it is no longer needed.
2360     if (m_imageGenerator)
2361         destroyImageGenerator();
2362
2363 }
2364
2365 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2366 {
2367     ASSERT(context);
2368
2369     updateLastPixelBuffer();
2370     if (!m_lastPixelBuffer)
2371         return false;
2372
2373     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2374     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2375
2376     if (!m_videoTextureCopier)
2377         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2378
2379     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2380 }
2381
2382 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2383 {
2384     updateLastImage();
2385     return m_lastImage;
2386 }
2387
2388 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2389 {
2390     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2391
2392     // Wait for 1 second.
2393     bool satisfied = m_videoOutputSemaphore.waitFor(1_s);
2394     if (!satisfied)
2395         ERROR_LOG(LOGIDENTIFIER, "timed out");
2396 }
2397
2398 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2399 {
2400     m_videoOutputSemaphore.signal();
2401 }
2402
2403 #endif
2404
2405 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2406
2407 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2408 {
2409     return m_keyURIToRequestMap.take(keyURI);
2410 }
2411
2412 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2413 {
2414     Vector<String> fulfilledKeyIds;
2415
2416     for (auto& pair : m_keyURIToRequestMap) {
2417         const String& keyId = pair.key;
2418         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2419
2420         auto keyData = player()->cachedKeyForKeyId(keyId);
2421         if (!keyData)
2422             continue;
2423
2424         fulfillRequestWithKeyData(request.get(), keyData.get());
2425         fulfilledKeyIds.append(keyId);
2426     }
2427
2428     for (auto& keyId : fulfilledKeyIds)
2429         m_keyURIToRequestMap.remove(keyId);
2430 }
2431
2432 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2433 {
2434     ASSERT_UNUSED(session, &session == m_session);
2435     m_session = nullptr;
2436 }
2437
2438 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2439 {
2440     if (!keySystemIsSupported(keySystem))
2441         return nullptr;
2442     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2443     m_session = makeWeakPtr(*session);
2444     return WTFMove(session);
2445 }
2446 #endif
2447
2448 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2449 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2450 {
2451 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2452     if (m_session && newValue)
2453         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2454 #endif
2455
2456 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2457     if (m_cdmInstance)
2458         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2459 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2460     UNUSED_PARAM(newValue);
2461 #endif
2462 }
2463 #endif
2464
2465 #if ENABLE(ENCRYPTED_MEDIA)
2466 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2467 {
2468 #if HAVE(AVCONTENTKEYSESSION)
2469     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2470         return;
2471
2472     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2473     if (&fpsInstance == m_cdmInstance)
2474         return;
2475
2476     if (m_cdmInstance)
2477         cdmInstanceDetached(*m_cdmInstance);
2478
2479     m_cdmInstance = &fpsInstance;
2480     [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2481 #else
2482     UNUSED_PARAM(instance);
2483 #endif
2484 }
2485
2486 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2487 {
2488 #if HAVE(AVCONTENTKEYSESSION)
2489     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2490     [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_avAsset.get()];
2491     m_cdmInstance = nullptr;
2492 #else
2493     UNUSED_PARAM(instance);
2494 #endif
2495 }
2496
2497 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2498 {
2499     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2500     for (auto& request : keyURIToRequestMap.values()) {
2501         if (auto *infoRequest = request.get().contentInformationRequest)
2502             infoRequest.contentType = AVStreamingKeyDeliveryContentKeyType;
2503         [request finishLoading];
2504     }
2505 }
2506 #endif
2507
2508 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2509
2510 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2511 {
2512 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2513     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2514 #endif
2515
2516     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2517     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2518
2519         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2520         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2521             continue;
2522
2523         bool newCCTrack = true;
2524         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2525             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2526                 continue;
2527
2528             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2529             if (track->avPlayerItemTrack() == playerItemTrack) {
2530                 removedTextTracks.remove(i - 1);
2531                 newCCTrack = false;
2532                 break;
2533             }
2534         }
2535
2536         if (!newCCTrack)
2537             continue;
2538         
2539         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2540     }
2541
2542     processNewAndRemovedTextTracks(removedTextTracks);
2543 }
2544
2545 #endif
2546
2547 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2548 {
2549     if (!m_avAsset)
2550         return nil;
2551
2552     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2553         return nil;
2554
2555     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2556 }
2557
2558 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2559
2560 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2561 {
2562     if (!m_avAsset)
2563         return false;
2564
2565     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2566         return false;
2567
2568     return true;
2569 }
2570
2571 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2572 {
2573     if (!hasLoadedMediaSelectionGroups())
2574         return nil;
2575
2576     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2577 }
2578
2579 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2580 {
2581     if (!hasLoadedMediaSelectionGroups())
2582         return nil;
2583
2584     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2585 }
2586
2587 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2588 {
2589     if (!hasLoadedMediaSelectionGroups())
2590         return nil;
2591
2592     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2593 }
2594
2595 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2596 {
2597     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2598     if (!legibleGroup) {
2599         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2600         return;
2601     }
2602
2603     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2604     // but set the selected legible track to nil so text tracks will not be automatically configured.
2605     if (!m_textTracks.size())
2606         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2607
2608     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2609     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2610     for (AVMediaSelectionOptionType *option in legibleOptions) {
2611         bool newTrack = true;
2612         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2613             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2614                 continue;
2615             
2616             RetainPtr<AVMediaSelectionOptionType> currentOption;
2617 #if ENABLE(AVF_CAPTIONS)
2618             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2619                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2620                 currentOption = track->mediaSelectionOption();
2621             } else
2622 #endif
2623             {
2624                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2625                 currentOption = track->mediaSelectionOption();
2626             }
2627             
2628             if ([currentOption.get() isEqual:option]) {
2629                 removedTextTracks.remove(i - 1);
2630                 newTrack = false;
2631                 break;
2632             }
2633         }
2634         if (!newTrack)
2635             continue;
2636
2637 #if ENABLE(AVF_CAPTIONS)
2638         if ([option outOfBandSource]) {
2639             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2640             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2641             continue;
2642         }
2643 #endif
2644
2645         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2646     }
2647
2648     processNewAndRemovedTextTracks(removedTextTracks);
2649 }
2650
2651 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2652 {
2653     if (m_metadataTrack)
2654         return;
2655
2656     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2657     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2658     player()->addTextTrack(*m_metadataTrack);
2659 }
2660
2661 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2662 {
2663     ASSERT(time >= MediaTime::zeroTime());
2664
2665     if (!m_currentTextTrack)
2666         return;
2667
2668     m_currentTextTrack->processCue((__bridge CFArrayRef)attributedStrings, (__bridge CFArrayRef)nativeSamples, time);
2669 }
2670
2671 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2672 {
2673     INFO_LOG(LOGIDENTIFIER);
2674
2675     if (!m_currentTextTrack)
2676         return;
2677     
2678     m_currentTextTrack->resetCueValues();
2679 }
2680
2681 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2682
2683 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2684 {
2685     if (m_currentTextTrack == track)
2686         return;
2687
2688     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2689
2690     m_currentTextTrack = track;
2691
2692     if (track) {
2693         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2694             ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2695             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2696             ALLOW_DEPRECATED_DECLARATIONS_END
2697 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2698 #if ENABLE(AVF_CAPTIONS)
2699         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2700             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2701 #endif
2702         else
2703             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2704 #endif
2705     } else {
2706 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2707         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2708 #endif
2709         ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2710         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2711         ALLOW_DEPRECATED_DECLARATIONS_END
2712     }
2713
2714 }
2715
2716 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2717 {
2718     if (!m_languageOfPrimaryAudioTrack.isNull())
2719         return m_languageOfPrimaryAudioTrack;
2720
2721     if (!m_avPlayerItem.get())
2722         return emptyString();
2723
2724 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2725     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2726     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2727     ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2728     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2729     ALLOW_DEPRECATED_DECLARATIONS_END
2730     if (currentlySelectedAudibleOption) {
2731         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2732         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2733
2734         return m_languageOfPrimaryAudioTrack;
2735     }
2736 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2737
2738     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2739     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2740     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2741     if (!tracks || [tracks count] != 1) {
2742         m_languageOfPrimaryAudioTrack = emptyString();
2743         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2744         return m_languageOfPrimaryAudioTrack;
2745     }
2746
2747     AVAssetTrack *track = [tracks objectAtIndex:0];
2748     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2749
2750     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2751
2752     return m_languageOfPrimaryAudioTrack;
2753 }
2754
2755 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2756 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2757 {
2758     bool wirelessTarget = false;
2759
2760 #if !PLATFORM(IOS)
2761     if (m_playbackTarget) {
2762         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2763             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2764         else
2765             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2766     }
2767 #else
2768     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2769 #endif
2770
2771     INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2772
2773     return wirelessTarget;
2774 }
2775
2776 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2777 {
2778     if (!m_avPlayer)
2779         return MediaPlayer::TargetTypeNone;
2780
2781 #if PLATFORM(IOS)
2782     if (!AVFoundationLibrary())
2783         return MediaPlayer::TargetTypeNone;
2784
2785     switch ([m_avPlayer externalPlaybackType]) {
2786     case AVPlayerExternalPlaybackTypeNone:
2787         return MediaPlayer::TargetTypeNone;
2788     case AVPlayerExternalPlaybackTypeAirPlay:
2789         return MediaPlayer::TargetTypeAirPlay;
2790     case AVPlayerExternalPlaybackTypeTVOut:
2791         return MediaPlayer::TargetTypeTVOut;
2792     }
2793
2794     ASSERT_NOT_REACHED();
2795     return MediaPlayer::TargetTypeNone;
2796
2797 #else
2798     return MediaPlayer::TargetTypeAirPlay;
2799 #endif
2800 }
2801     
2802 #if PLATFORM(IOS)
2803 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2804 {
2805 #if HAVE(CELESTIAL)
2806     if (!AVFoundationLibrary())
2807         return nil;
2808
2809 #if __IPHONE_OS_VERSION_MAX_ALLOWED >= 110000
2810     if ([getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2811         AVOutputContext *outputContext = [getAVOutputContextClass() sharedAudioPresentationOutputContext];
2812
2813         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2814             || ![outputContext supportsMultipleOutputDevices]
2815             || ![outputContext respondsToSelector:@selector(outputDevices)])
2816             return [outputContext deviceName];
2817
2818         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2819         for (AVOutputDevice *outputDevice in [outputContext outputDevices]) {
2820 ALLOW_DEPRECATED_DECLARATIONS_BEGIN
2821             auto outputDeviceName = adoptNS([[outputDevice name] copy]);
2822 ALLOW_DEPRECATED_DECLARATIONS_END
2823             [outputDeviceNames addObject:outputDeviceName.get()];
2824         }
2825
2826         return [outputDeviceNames componentsJoinedByString:@" + "];
2827     }
2828 #endif
2829
2830     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2831         return nil;
2832
2833     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2834     if (!pickableRoutes.count)
2835         return nil;
2836
2837     NSString *displayName = nil;
2838     for (NSDictionary *pickableRoute in pickableRoutes) {
2839         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2840             continue;
2841
2842         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2843
2844         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2845         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2846             break;
2847
2848         // The route is a speaker or HDMI out, override the name to be the localized device model.
2849         NSString *localizedDeviceModel = [[UIDevice currentDevice] localizedModel];
2850
2851         // In cases where a route with that name already exists, prefix the name with the model.
2852         BOOL includeLocalizedDeviceModelName = NO;
2853         for (NSDictionary *otherRoute in pickableRoutes) {
2854             if (otherRoute == pickableRoute)
2855                 continue;
2856
2857             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2858                 includeLocalizedDeviceModelName = YES;
2859                 break;
2860             }
2861         }
2862
2863         if (includeLocalizedDeviceModelName)
2864             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2865         else
2866             displayName = localizedDeviceModel;
2867
2868         break;
2869     }
2870
2871     return displayName;
2872 #else
2873     UNUSED_PARAM(player);
2874     return nil;
2875 #endif
2876 }
2877 #endif
2878
2879 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2880 {
2881     if (!m_avPlayer)
2882         return emptyString();
2883
2884     String wirelessTargetName;
2885 #if !PLATFORM(IOS)
2886     if (m_playbackTarget)
2887         wirelessTargetName = m_playbackTarget->deviceName();
2888 #else
2889     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2890 #endif
2891
2892     return wirelessTargetName;
2893 }
2894
2895 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2896 {
2897     if (!m_avPlayer)
2898         return !m_allowsWirelessVideoPlayback;
2899
2900     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2901     INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2902
2903     return !m_allowsWirelessVideoPlayback;
2904 }
2905
2906 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2907 {
2908     INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2909     m_allowsWirelessVideoPlayback = !disabled;
2910     if (!m_avPlayer)
2911         return;
2912
2913     setDelayCallbacks(true);
2914     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2915     setDelayCallbacks(false);
2916 }
2917
2918 #if !PLATFORM(IOS)
2919
2920 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2921 {
2922     m_playbackTarget = WTFMove(target);
2923
2924     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2925
2926     INFO_LOG(LOGIDENTIFIER);
2927
2928     if (!m_playbackTarget->hasActiveRoute())
2929         setShouldPlayToPlaybackTarget(false);
2930 }
2931
2932 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2933 {
2934     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2935         return;
2936
2937     m_shouldPlayToPlaybackTarget = shouldPlay;
2938
2939     if (!m_playbackTarget)
2940         return;
2941
2942     INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2943
2944     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2945         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2946
2947         if (!m_avPlayer)
2948             return;
2949
2950         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2951         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2952             return;
2953
2954         setDelayCallbacks(true);
2955         m_avPlayer.get().outputContext = newContext;
2956         setDelayCallbacks(false);
2957
2958         return;
2959     }
2960
2961     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2962
2963     setDelayCallbacks(true);
2964     auto weakThis = makeWeakPtr(*this);
2965     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2966         if (!weakThis)
2967             return;
2968         weakThis->playbackTargetIsWirelessDidChange();
2969     }));
2970     setDelayCallbacks(false);
2971 }
2972
2973 #endif // !PLATFORM(IOS)
2974
2975 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2976 {
2977 #if PLATFORM(IOS)
2978     if (!m_avPlayer)
2979         return;
2980
2981     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
2982         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2983 #endif
2984 }
2985
2986 #endif
2987
2988 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2989 {
2990     m_cachedItemStatus = status;
2991
2992     updateStates();
2993 }
2994
2995 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2996 {
2997     m_pendingStatusChanges++;
2998 }
2999
3000 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3001 {
3002     m_cachedLikelyToKeepUp = likelyToKeepUp;
3003
3004     ASSERT(m_pendingStatusChanges);
3005     if (!--m_pendingStatusChanges)
3006         updateStates();
3007 }
3008
3009 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3010 {
3011     m_pendingStatusChanges++;
3012 }
3013
3014 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3015 {
3016     m_cachedBufferEmpty = bufferEmpty;
3017
3018     ASSERT(m_pendingStatusChanges);
3019     if (!--m_pendingStatusChanges)
3020         updateStates();
3021 }
3022
3023 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3024 {
3025     m_pendingStatusChanges++;
3026 }
3027
3028 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3029 {
3030     m_cachedBufferFull = bufferFull;
3031
3032     ASSERT(m_pendingStatusChanges);
3033     if (!--m_pendingStatusChanges)
3034         updateStates();
3035 }
3036
3037 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3038 {
3039     m_cachedSeekableRanges = seekableRanges;
3040
3041     seekableTimeRangesChanged();
3042     updateStates();
3043 }
3044
3045 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3046 {
3047     m_cachedLoadedRanges = loadedRanges;
3048
3049     loadedTimeRangesChanged();
3050     updateStates();
3051 }
3052
3053 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3054 {
3055     m_cachedIsReadyForDisplay = isReady;
3056     if (!hasVideo() && isReady)
3057         tracksChanged();
3058     updateStates();
3059 }
3060
3061 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3062 {