Fix runtime errors in simulator while playing media
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "URL.h"
63 #import "VideoFullscreenLayerManagerObjC.h"
64 #import "VideoTextureCopierCV.h"
65 #import "VideoTrackPrivateAVFObjC.h"
66 #import "WebCoreAVFResourceLoader.h"
67 #import "WebCoreCALayerExtras.h"
68 #import "WebCoreNSURLSession.h"
69 #import <JavaScriptCore/DataView.h>
70 #import <JavaScriptCore/JSCInlines.h>
71 #import <JavaScriptCore/TypedArrayInlines.h>
72 #import <JavaScriptCore/Uint16Array.h>
73 #import <JavaScriptCore/Uint32Array.h>
74 #import <JavaScriptCore/Uint8Array.h>
75 #import <functional>
76 #import <objc/runtime.h>
77 #import <pal/avfoundation/MediaTimeAVFoundation.h>
78 #import <pal/spi/cocoa/QuartzCoreSPI.h>
79 #import <pal/spi/mac/AVFoundationSPI.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #else
107 #import <Foundation/NSGeometry.h>
108 #import <QuartzCore/CoreImage.h>
109 #endif
110
111 #if USE(VIDEOTOOLBOX)
112 #import <CoreVideo/CoreVideo.h>
113 #import <VideoToolbox/VideoToolbox.h>
114 #endif
115
116 #import "CoreVideoSoftLink.h"
117 #import "MediaRemoteSoftLink.h"
118
119 namespace std {
120 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
121     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
122 };
123 }
124
125 #if ENABLE(AVF_CAPTIONS)
126 // Note: This must be defined before our SOFT_LINK macros:
127 @class AVMediaSelectionOption;
128 @interface AVMediaSelectionOption (OutOfBandExtensions)
129 @property (nonatomic, readonly) NSString* outOfBandSource;
130 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
131 @end
132 #endif
133
134 @interface AVURLAsset (WebKitExtensions)
135 @property (nonatomic, readonly) NSURL *resolvedURL;
136 @end
137
138 typedef AVPlayer AVPlayerType;
139 typedef AVPlayerItem AVPlayerItemType;
140 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
141 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
142 typedef AVMetadataItem AVMetadataItemType;
143 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
144 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
145 typedef AVAssetCache AVAssetCacheType;
146
147 #pragma mark - Soft Linking
148
149 // Soft-linking headers must be included last since they #define functions, constants, etc.
150 #import <pal/cf/CoreMediaSoftLink.h>
151
152 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
153
154 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
155
156 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
164
165 SOFT_LINK_CLASS(CoreImage, CIContext)
166 SOFT_LINK_CLASS(CoreImage, CIImage)
167
168 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
169 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
172 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
173 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
183
184 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
185 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
186 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
187 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetUseClientURLLoadingExclusively, NSString *)
188
189 #define AVPlayer initAVPlayer()
190 #define AVPlayerItem initAVPlayerItem()
191 #define AVPlayerLayer initAVPlayerLayer()
192 #define AVURLAsset initAVURLAsset()
193 #define AVAssetImageGenerator initAVAssetImageGenerator()
194 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
195 #define AVMetadataItem initAVMetadataItem()
196 #define AVAssetCache initAVAssetCache()
197
198 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
199 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
200 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
201 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
202 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
203 #define AVMediaTypeVideo getAVMediaTypeVideo()
204 #define AVMediaTypeAudio getAVMediaTypeAudio()
205 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
206 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
207 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
208 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
209 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
210 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
211 #define AVURLAssetUseClientURLLoadingExclusively getAVURLAssetUseClientURLLoadingExclusively()
212 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
213 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
214 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
215 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
216 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
217
218 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
219 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
220 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
221
222 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
223 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
224 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
225 SOFT_LINK_CLASS(AVFoundation, AVOutputContext)
226
227 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
228 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
229 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
230 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
231
232 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
233 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
234 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
235 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
236 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
237 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
238 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
239 #endif
240
241 #if ENABLE(AVF_CAPTIONS)
242 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
243 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
244 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
245 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
254 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
255
256 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
257 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
258 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
259 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
260 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
261 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
262 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
263 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
264 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
265 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
266 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
267 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
268 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
269 #endif
270
271 #if ENABLE(DATACUE_VALUE)
272 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
273 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
274 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
275 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
277
278 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
279 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
280 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
281 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
282 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
283 #endif
284
285 #if PLATFORM(IOS)
286 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
287 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
288 #endif
289
290 SOFT_LINK_FRAMEWORK(MediaToolbox)
291 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
292
293 #if PLATFORM(IOS)
294 #if HAVE(CELESTIAL)
295 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
296 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
297 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
298 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
299 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
300 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
301 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
302 #endif // HAVE(CELESTIAL)
303
304 SOFT_LINK_FRAMEWORK(UIKit)
305 SOFT_LINK_CLASS(UIKit, UIDevice)
306 #define UIDevice getUIDeviceClass()
307 #endif // PLATFORM(IOS)
308
309 using namespace WebCore;
310
311 enum MediaPlayerAVFoundationObservationContext {
312     MediaPlayerAVFoundationObservationContextPlayerItem,
313     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
314     MediaPlayerAVFoundationObservationContextPlayer,
315     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
316 };
317
318 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
319 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
320 #else
321 @interface WebCoreAVFMovieObserver : NSObject
322 #endif
323 {
324     MediaPlayerPrivateAVFoundationObjC* m_callback;
325     int m_delayCallbacks;
326 }
327 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
328 -(void)disconnect;
329 -(void)metadataLoaded;
330 -(void)didEnd:(NSNotification *)notification;
331 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
332 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
333 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
334 - (void)outputSequenceWasFlushed:(id)output;
335 #endif
336 @end
337
338 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
339 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
340     MediaPlayerPrivateAVFoundationObjC* m_callback;
341 }
342 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
344 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
345 @end
346 #endif
347
348 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
349 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
350     MediaPlayerPrivateAVFoundationObjC *m_callback;
351     dispatch_semaphore_t m_semaphore;
352 }
353 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
354 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
355 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
356 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
357 @end
358 #endif
359
360 namespace WebCore {
361 using namespace PAL;
362
363 static NSArray *assetMetadataKeyNames();
364 static NSArray *itemKVOProperties();
365 static NSArray *assetTrackMetadataKeyNames();
366 static NSArray *playerKVOProperties();
367 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
368
369 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
370 static dispatch_queue_t globalLoaderDelegateQueue()
371 {
372     static dispatch_queue_t globalQueue;
373     static dispatch_once_t onceToken;
374     dispatch_once(&onceToken, ^{
375         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
376     });
377     return globalQueue;
378 }
379 #endif
380
381 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
382 static dispatch_queue_t globalPullDelegateQueue()
383 {
384     static dispatch_queue_t globalQueue;
385     static dispatch_once_t onceToken;
386     dispatch_once(&onceToken, ^{
387         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
388     });
389     return globalQueue;
390 }
391 #endif
392
393 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
394 {
395     if (!isAvailable())
396         return;
397
398     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
399             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
400     AVFoundationMIMETypeCache::singleton().loadTypes();
401 }
402
403 static AVAssetCacheType *assetCacheForPath(const String& path)
404 {
405     NSURL *assetCacheURL;
406     
407     if (path.isEmpty())
408         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
409     else
410         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
411
412     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
413 }
414
415 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
416 {
417     HashSet<RefPtr<SecurityOrigin>> origins;
418     for (NSString *key in [assetCacheForPath(path) allKeys]) {
419         URL keyAsURL = URL(URL(), key);
420         if (keyAsURL.isValid())
421             origins.add(SecurityOrigin::create(keyAsURL));
422     }
423     return origins;
424 }
425
426 static WallTime toSystemClockTime(NSDate *date)
427 {
428     ASSERT(date);
429     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
430 }
431
432 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
433 {
434     AVAssetCacheType* assetCache = assetCacheForPath(path);
435     
436     for (NSString *key in [assetCache allKeys]) {
437         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
438             [assetCache removeEntryForKey:key];
439     }
440
441     NSFileManager *fileManager = [NSFileManager defaultManager];
442     NSURL *baseURL = [assetCache URL];
443
444     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
445         [fileManager removeItemAtURL:baseURL error:nil];
446         return;
447     }
448     
449     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
450     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
451         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
452         errorHandler:nil];
453     
454     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
455     for (NSURL *fileURL : enumerator) {
456         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
457     
458         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
459             continue;
460         
461         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
462             continue;
463         
464         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
465             continue;
466         
467         [urlsToDelete addObject:fileURL];
468     }
469     
470     for (NSURL *fileURL in urlsToDelete.get())
471         [fileManager removeItemAtURL:fileURL error:nil];
472 }
473
474 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
475 {
476     AVAssetCacheType* assetCache = assetCacheForPath(path);
477     for (NSString *key in [assetCache allKeys]) {
478         URL keyAsURL = URL(URL(), key);
479         if (keyAsURL.isValid()) {
480             if (origins.contains(SecurityOrigin::create(keyAsURL)))
481                 [assetCache removeEntryForKey:key];
482         }
483     }
484 }
485
486 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
487     : MediaPlayerPrivateAVFoundation(player)
488     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
489     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
490     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
491     , m_videoFrameHasDrawn(false)
492     , m_haveCheckedPlayability(false)
493 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
494     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
495     , m_videoOutputSemaphore(nullptr)
496 #endif
497 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
498     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
499 #endif
500     , m_currentTextTrack(0)
501     , m_cachedRate(0)
502     , m_cachedTotalBytes(0)
503     , m_pendingStatusChanges(0)
504     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
505     , m_cachedLikelyToKeepUp(false)
506     , m_cachedBufferEmpty(false)
507     , m_cachedBufferFull(false)
508     , m_cachedHasEnabledAudio(false)
509     , m_shouldBufferData(true)
510     , m_cachedIsReadyForDisplay(false)
511     , m_haveBeenAskedToCreateLayer(false)
512 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
513     , m_allowsWirelessVideoPlayback(true)
514 #endif
515 {
516 }
517
518 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
519 {
520 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
521     [m_loaderDelegate.get() setCallback:0];
522     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
523
524     for (auto& pair : m_resourceLoaderMap)
525         pair.value->invalidate();
526 #endif
527 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
528     [m_videoOutputDelegate setCallback:0];
529     [m_videoOutput setDelegate:nil queue:0];
530     if (m_videoOutputSemaphore)
531         dispatch_release(m_videoOutputSemaphore);
532 #endif
533
534     if (m_videoLayer)
535         destroyVideoLayer();
536
537     cancelLoad();
538 }
539
540 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
541 {
542     INFO_LOG(LOGIDENTIFIER);
543     tearDownVideoRendering();
544
545     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
546     [m_objcObserver.get() disconnect];
547
548     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
549     setIgnoreLoadStateChanges(true);
550     if (m_avAsset) {
551         [m_avAsset.get() cancelLoading];
552         m_avAsset = nil;
553     }
554
555     clearTextTracks();
556
557 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
558     if (m_legibleOutput) {
559         if (m_avPlayerItem)
560             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
561         m_legibleOutput = nil;
562     }
563 #endif
564
565     if (m_avPlayerItem) {
566         for (NSString *keyName in itemKVOProperties())
567             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
568         
569         m_avPlayerItem = nil;
570     }
571     if (m_avPlayer) {
572         if (m_timeObserver)
573             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
574         m_timeObserver = nil;
575
576         for (NSString *keyName in playerKVOProperties())
577             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
578
579         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
580         m_avPlayer = nil;
581     }
582
583     // Reset cached properties
584     m_pendingStatusChanges = 0;
585     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
586     m_cachedSeekableRanges = nullptr;
587     m_cachedLoadedRanges = nullptr;
588     m_cachedHasEnabledAudio = false;
589     m_cachedPresentationSize = FloatSize();
590     m_cachedDuration = MediaTime::zeroTime();
591
592     for (AVPlayerItemTrack *track in m_cachedTracks.get())
593         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
594     m_cachedTracks = nullptr;
595
596 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
597     if (m_provider) {
598         m_provider->setPlayerItem(nullptr);
599         m_provider->setAudioTrack(nullptr);
600     }
601 #endif
602
603     setIgnoreLoadStateChanges(false);
604 }
605
606 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
607 {
608     return m_haveBeenAskedToCreateLayer;
609 }
610
611 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
612 {
613 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
614     if (m_videoOutput)
615         return true;
616 #endif
617     return m_imageGenerator;
618 }
619
620 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
621 {
622 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
623     createVideoOutput();
624 #else
625     createImageGenerator();
626 #endif
627 }
628
629 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
630 {
631     using namespace PAL;
632     INFO_LOG(LOGIDENTIFIER);
633
634     if (!m_avAsset || m_imageGenerator)
635         return;
636
637     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
638
639     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
640     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
641     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
642     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
643 }
644
645 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
646 {
647 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
648     destroyVideoOutput();
649 #endif
650     destroyImageGenerator();
651 }
652
653 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
654 {
655     if (!m_imageGenerator)
656         return;
657
658     INFO_LOG(LOGIDENTIFIER);
659
660     m_imageGenerator = 0;
661 }
662
663 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
664 {
665     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
666         return;
667
668     callOnMainThread([this, weakThis = createWeakPtr()] {
669         if (!weakThis)
670             return;
671
672         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
673             return;
674         m_haveBeenAskedToCreateLayer = true;
675
676         if (!m_videoLayer)
677             createAVPlayerLayer();
678
679 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
680         if (!m_videoOutput)
681             createVideoOutput();
682 #endif
683
684         player()->client().mediaPlayerRenderingModeChanged(player());
685     });
686 }
687
688 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
689 {
690     if (!m_avPlayer)
691         return;
692
693     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
694     [m_videoLayer setPlayer:m_avPlayer.get()];
695
696 #ifndef NDEBUG
697     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
698 #endif
699     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
700     updateVideoLayerGravity();
701     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
702     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
703     INFO_LOG(LOGIDENTIFIER);
704
705     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
706
707 #if PLATFORM(IOS) && !ENABLE(EXTRA_ZOOM_MODE)
708     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
709         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
710 #endif
711 }
712
713 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
714 {
715     if (!m_videoLayer)
716         return;
717
718     INFO_LOG(LOGIDENTIFIER);
719
720     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
721     [m_videoLayer setPlayer:nil];
722     m_videoFullscreenLayerManager->didDestroyVideoLayer();
723
724     m_videoLayer = nil;
725 }
726
727 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
728 {
729     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
730     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
731
732     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
733     if (!date)
734         return MediaTime::invalidTime();
735
736     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
737
738     // Rounding due to second offset error when subtracting.
739     return MediaTime::createWithDouble(round(date - currentTime));
740 }
741
742 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
743 {
744     if (currentRenderingMode() == MediaRenderingToLayer)
745         return m_cachedIsReadyForDisplay;
746
747 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
748     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
749         return true;
750 #endif
751
752     return m_videoFrameHasDrawn;
753 }
754
755 #if ENABLE(AVF_CAPTIONS)
756 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
757 {
758     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
759     if (manualSelectionMode)
760         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
761
762     // FIXME: Match these to correct types:
763     if (kind == PlatformTextTrack::Caption)
764         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
765
766     if (kind == PlatformTextTrack::Subtitle)
767         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
768
769     if (kind == PlatformTextTrack::Description)
770         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
771
772     if (kind == PlatformTextTrack::Forced)
773         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
774
775     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
776 }
777     
778 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
779 {
780     trackModeChanged();
781 }
782     
783 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
784 {
785     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
786     
787     for (auto& textTrack : m_textTracks) {
788         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
789             continue;
790         
791         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
792         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
793         
794         for (auto& track : outOfBandTrackSources) {
795             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
796             
797             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
798                 continue;
799             
800             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
801             if (track->mode() == PlatformTextTrack::Hidden)
802                 mode = InbandTextTrackPrivate::Hidden;
803             else if (track->mode() == PlatformTextTrack::Disabled)
804                 mode = InbandTextTrackPrivate::Disabled;
805             else if (track->mode() == PlatformTextTrack::Showing)
806                 mode = InbandTextTrackPrivate::Showing;
807             
808             textTrack->setMode(mode);
809             break;
810         }
811     }
812 }
813 #endif
814
815
816 static NSURL *canonicalURL(const URL& url)
817 {
818     NSURL *cocoaURL = url;
819     if (url.isEmpty())
820         return cocoaURL;
821
822     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
823     if (!request)
824         return cocoaURL;
825
826     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
827     if (!canonicalRequest)
828         return cocoaURL;
829
830     return [canonicalRequest URL];
831 }
832
833 #if PLATFORM(IOS)
834 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
835 {
836     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
837     [properties setDictionary:@{
838         NSHTTPCookieName: cookie.name,
839         NSHTTPCookieValue: cookie.value,
840         NSHTTPCookieDomain: cookie.domain,
841         NSHTTPCookiePath: cookie.path,
842         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
843     }];
844     if (cookie.secure)
845         [properties setObject:@YES forKey:NSHTTPCookieSecure];
846     if (cookie.session)
847         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
848
849     return [NSHTTPCookie cookieWithProperties:properties.get()];
850 }
851 #endif
852
853 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
854 {
855     if (m_avAsset)
856         return;
857
858     INFO_LOG(LOGIDENTIFIER);
859
860     setDelayCallbacks(true);
861
862     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
863
864     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
865
866     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
867
868     String referrer = player()->referrer();
869     if (!referrer.isEmpty())
870         [headerFields.get() setObject:referrer forKey:@"Referer"];
871
872     String userAgent = player()->userAgent();
873     if (!userAgent.isEmpty())
874         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
875
876     if ([headerFields.get() count])
877         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
878
879     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
880         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
881
882     if (AVURLAssetUseClientURLLoadingExclusively)
883         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
884 #if PLATFORM(IOS)
885     else if (AVURLAssetRequiresCustomURLLoadingKey)
886         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
887     // FIXME: rdar://problem/20354688
888     String identifier = player()->sourceApplicationIdentifier();
889     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
890         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
891 #endif
892
893     auto type = player()->contentMIMEType();
894     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
895         auto codecs = player()->contentTypeCodecs();
896         if (!codecs.isEmpty()) {
897             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
898             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
899         } else
900             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
901     }
902
903 #if ENABLE(AVF_CAPTIONS)
904     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
905     if (!outOfBandTrackSources.isEmpty()) {
906         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
907         for (auto& trackSource : outOfBandTrackSources) {
908             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
909             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
910             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
911             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
912             [outOfBandTracks.get() addObject:@{
913                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
914                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
915                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
916                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
917                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
918                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
919             }];
920         }
921
922         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
923     }
924 #endif
925
926 #if PLATFORM(IOS)
927     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
928     if (!networkInterfaceName.isEmpty())
929         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
930 #endif
931
932 #if PLATFORM(IOS)
933     Vector<Cookie> cookies;
934     if (player()->getRawCookies(url, cookies)) {
935         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
936         for (auto& cookie : cookies)
937             [nsCookies addObject:toNSHTTPCookie(cookie)];
938
939         if (AVURLAssetHTTPCookiesKey)
940             [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
941     }
942 #endif
943
944     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
945     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
946     
947     if (usePersistentCache)
948         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
949
950     NSURL *cocoaURL = canonicalURL(url);
951     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
952
953 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
954     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
955     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
956
957 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
958     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
959         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
960         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
961         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
962         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
963         if (mediaResourceLoader)
964             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
965     }
966 #endif
967
968 #endif
969
970     m_haveCheckedPlayability = false;
971
972     setDelayCallbacks(false);
973 }
974
975 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
976 {
977     if (!m_avPlayer)
978         return;
979
980     if (pthread_main_np()) {
981         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
982         return;
983     }
984
985     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
986     RetainPtr<AVPlayerItemType> strongItem = item;
987     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
988         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
989     });
990 }
991
992 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
993 {
994     if (m_avPlayer)
995         return;
996
997     INFO_LOG(LOGIDENTIFIER);
998
999     setDelayCallbacks(true);
1000
1001     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1002     for (NSString *keyName in playerKVOProperties())
1003         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1004
1005 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1006     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1007 #endif
1008
1009 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1010     updateDisableExternalPlayback();
1011     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1012 #endif
1013
1014 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1015     if (m_shouldPlayToPlaybackTarget) {
1016         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1017         m_shouldPlayToPlaybackTarget = false;
1018         setShouldPlayToPlaybackTarget(true);
1019     }
1020 #endif
1021
1022 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR) && !ENABLE(MINIMAL_SIMULATOR)
1023     setShouldDisableSleep(player()->shouldDisableSleep());
1024 #endif
1025
1026     if (m_muted) {
1027         // Clear m_muted so setMuted doesn't return without doing anything.
1028         m_muted = false;
1029         [m_avPlayer.get() setMuted:m_muted];
1030     }
1031
1032     if (player()->client().mediaPlayerIsVideo())
1033         createAVPlayerLayer();
1034
1035     if (m_avPlayerItem)
1036         setAVPlayerItem(m_avPlayerItem.get());
1037
1038     setDelayCallbacks(false);
1039 }
1040
1041 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1042 {
1043     if (m_avPlayerItem)
1044         return;
1045
1046     INFO_LOG(LOGIDENTIFIER);
1047
1048     setDelayCallbacks(true);
1049
1050     // Create the player item so we can load media data. 
1051     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1052
1053     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1054
1055     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1056     for (NSString *keyName in itemKVOProperties())
1057         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1058
1059     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1060
1061     if (m_avPlayer)
1062         setAVPlayerItem(m_avPlayerItem.get());
1063
1064 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1065     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1066
1067     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1068     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1069     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1070
1071     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1072     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1073     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1074     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1075 #endif
1076
1077 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1078     if (m_provider) {
1079         m_provider->setPlayerItem(m_avPlayerItem.get());
1080         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1081     }
1082 #endif
1083
1084 #if ENABLE(EXTRA_ZOOM_MODE)
1085     createVideoOutput();
1086 #endif
1087
1088     setDelayCallbacks(false);
1089 }
1090
1091 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1092 {
1093     if (m_haveCheckedPlayability)
1094         return;
1095     m_haveCheckedPlayability = true;
1096
1097     INFO_LOG(LOGIDENTIFIER);
1098     auto weakThis = createWeakPtr();
1099
1100     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1101         callOnMainThread([weakThis] {
1102             if (weakThis)
1103                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1104         });
1105     }];
1106 }
1107
1108 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1109 {
1110     INFO_LOG(LOGIDENTIFIER);
1111
1112     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1113     dispatch_group_enter(metadataLoadingGroup.get());
1114     auto weakThis = createWeakPtr();
1115     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1116
1117         callOnMainThread([weakThis, metadataLoadingGroup] {
1118             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1119                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1120                     dispatch_group_enter(metadataLoadingGroup.get());
1121                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1122                         dispatch_group_leave(metadataLoadingGroup.get());
1123                     }];
1124                 }
1125             }
1126             dispatch_group_leave(metadataLoadingGroup.get());
1127         });
1128     }];
1129
1130     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1131         callOnMainThread([weakThis] {
1132             if (weakThis)
1133                 [weakThis->m_objcObserver.get() metadataLoaded];
1134         });
1135     });
1136 }
1137
1138 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1139 {
1140     if (!m_avPlayerItem)
1141         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1142
1143     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1144         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1145     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1146         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1147     if (m_cachedLikelyToKeepUp)
1148         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1149     if (m_cachedBufferFull)
1150         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1151     if (m_cachedBufferEmpty)
1152         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1153
1154     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1155 }
1156
1157 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1158 {
1159     INFO_LOG(LOGIDENTIFIER);
1160     PlatformMedia pm;
1161     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1162     pm.media.avfMediaPlayer = m_avPlayer.get();
1163     return pm;
1164 }
1165
1166 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1167 {
1168     return m_videoFullscreenLayerManager->videoInlineLayer();
1169 }
1170
1171 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1172 {
1173 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1174     updateLastImage(UpdateType::UpdateSynchronously);
1175     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1176 #endif
1177 }
1178
1179 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1180 {
1181 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1182     updateLastImage(UpdateType::UpdateSynchronously);
1183     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1184 #else
1185     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1186 #endif
1187     updateDisableExternalPlayback();
1188 }
1189
1190 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1191 {
1192     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1193 }
1194
1195 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1196 {
1197     m_videoFullscreenGravity = gravity;
1198
1199     if (!m_videoLayer)
1200         return;
1201
1202     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1203     if (gravity == MediaPlayer::VideoGravityResize)
1204         videoGravity = AVLayerVideoGravityResize;
1205     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1206         videoGravity = AVLayerVideoGravityResizeAspect;
1207     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1208         videoGravity = AVLayerVideoGravityResizeAspectFill;
1209     else
1210         ASSERT_NOT_REACHED();
1211     
1212     if ([m_videoLayer videoGravity] == videoGravity)
1213         return;
1214
1215     [m_videoLayer setVideoGravity:videoGravity];
1216     syncTextTrackBounds();
1217 }
1218
1219 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1220 {
1221 #if PLATFORM(IOS) && !ENABLE(EXTRA_ZOOM_MODE)
1222     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1223         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1224     updateDisableExternalPlayback();
1225 #else
1226     UNUSED_PARAM(mode);
1227 #endif
1228 }
1229
1230 #if PLATFORM(IOS)
1231 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1232 {
1233     if (m_currentMetaData)
1234         return m_currentMetaData.get();
1235     return nil;
1236 }
1237
1238 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1239 {
1240     if (!m_avPlayerItem)
1241         return emptyString();
1242     
1243     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1244     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1245
1246     return logString.get();
1247 }
1248
1249 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1250 {
1251     if (!m_avPlayerItem)
1252         return emptyString();
1253
1254     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1255     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1256
1257     return logString.get();
1258 }
1259 #endif
1260
1261 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1262 {
1263     [CATransaction begin];
1264     [CATransaction setDisableActions:YES];    
1265     if (m_videoLayer)
1266         [m_videoLayer.get() setHidden:!isVisible];
1267     [CATransaction commit];
1268 }
1269     
1270 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1271 {
1272     INFO_LOG(LOGIDENTIFIER);
1273     if (!metaDataAvailable())
1274         return;
1275
1276     setDelayCallbacks(true);
1277     m_cachedRate = requestedRate();
1278     [m_avPlayer.get() setRate:requestedRate()];
1279     setDelayCallbacks(false);
1280 }
1281
1282 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1283 {
1284     INFO_LOG(LOGIDENTIFIER);
1285     if (!metaDataAvailable())
1286         return;
1287
1288     setDelayCallbacks(true);
1289     m_cachedRate = 0;
1290     [m_avPlayer.get() setRate:0];
1291     setDelayCallbacks(false);
1292 }
1293
1294 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1295 {
1296     // Do not ask the asset for duration before it has been loaded or it will fetch the
1297     // answer synchronously.
1298     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1299         return MediaTime::invalidTime();
1300     
1301     CMTime cmDuration;
1302     
1303     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1304     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1305         cmDuration = [m_avPlayerItem.get() duration];
1306     else
1307         cmDuration = [m_avAsset.get() duration];
1308
1309     if (CMTIME_IS_NUMERIC(cmDuration))
1310         return PAL::toMediaTime(cmDuration);
1311
1312     if (CMTIME_IS_INDEFINITE(cmDuration))
1313         return MediaTime::positiveInfiniteTime();
1314
1315     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1316     return MediaTime::invalidTime();
1317 }
1318
1319 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1320 {
1321     if (!metaDataAvailable() || !m_avPlayerItem)
1322         return MediaTime::zeroTime();
1323
1324     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1325     if (CMTIME_IS_NUMERIC(itemTime))
1326         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1327
1328     return MediaTime::zeroTime();
1329 }
1330
1331 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1332 {
1333     // setCurrentTime generates several event callbacks, update afterwards.
1334     setDelayCallbacks(true);
1335
1336     if (m_metadataTrack)
1337         m_metadataTrack->flushPartialCues();
1338
1339     CMTime cmTime = PAL::toCMTime(time);
1340     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1341     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1342
1343     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1344     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1345         cmBefore = kCMTimeZero;
1346     
1347     auto weakThis = createWeakPtr();
1348
1349     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1350         callOnMainThread([weakThis, finished] {
1351             auto _this = weakThis.get();
1352             if (!_this)
1353                 return;
1354
1355             _this->seekCompleted(finished);
1356         });
1357     }];
1358
1359     setDelayCallbacks(false);
1360 }
1361
1362 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1363 {
1364 #if PLATFORM(IOS)
1365     UNUSED_PARAM(volume);
1366     return;
1367 #else
1368     if (!m_avPlayer)
1369         return;
1370
1371     [m_avPlayer.get() setVolume:volume];
1372 #endif
1373 }
1374
1375 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1376 {
1377     if (m_muted == muted)
1378         return;
1379
1380     INFO_LOG(LOGIDENTIFIER, "- ", muted);
1381
1382     m_muted = muted;
1383
1384     if (!m_avPlayer)
1385         return;
1386
1387     [m_avPlayer.get() setMuted:m_muted];
1388 }
1389
1390 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1391 {
1392     UNUSED_PARAM(closedCaptionsVisible);
1393
1394     if (!metaDataAvailable())
1395         return;
1396
1397     INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1398 }
1399
1400 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1401 {
1402     setDelayCallbacks(true);
1403     m_cachedRate = rate;
1404     [m_avPlayer.get() setRate:rate];
1405     setDelayCallbacks(false);
1406 }
1407
1408 double MediaPlayerPrivateAVFoundationObjC::rate() const
1409 {
1410     if (!metaDataAvailable())
1411         return 0;
1412
1413     return m_cachedRate;
1414 }
1415
1416 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1417 {
1418 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1419     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1420 #else
1421     return 0;
1422 #endif
1423 }
1424
1425 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1426 {
1427 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1428     return [m_avPlayerItem liveUpdateInterval];
1429 #else
1430     return 0;
1431 #endif
1432 }
1433
1434 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1435 {
1436     if (m_avPlayerItem)
1437         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1438 }
1439
1440 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1441 {
1442     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1443
1444     if (!m_avPlayerItem)
1445         return timeRanges;
1446
1447     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1448         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1449         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1450             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1451     }
1452     return timeRanges;
1453 }
1454
1455 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1456 {
1457     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1458         return MediaTime::zeroTime();
1459
1460     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1461     bool hasValidRange = false;
1462     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1463         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1464         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1465             continue;
1466
1467         hasValidRange = true;
1468         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1469         if (minTimeSeekable > startOfRange)
1470             minTimeSeekable = startOfRange;
1471     }
1472     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1473 }
1474
1475 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1476 {
1477     if (!m_cachedSeekableRanges)
1478         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1479
1480     MediaTime maxTimeSeekable;
1481     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1482         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1483         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1484             continue;
1485         
1486         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1487         if (maxTimeSeekable < endOfRange)
1488             maxTimeSeekable = endOfRange;
1489     }
1490     return maxTimeSeekable;
1491 }
1492
1493 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1494 {
1495     if (!m_cachedLoadedRanges)
1496         return MediaTime::zeroTime();
1497
1498     MediaTime maxTimeLoaded;
1499     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1500         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1501         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1502             continue;
1503         
1504         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1505         if (maxTimeLoaded < endOfRange)
1506             maxTimeLoaded = endOfRange;
1507     }
1508
1509     return maxTimeLoaded;   
1510 }
1511
1512 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1513 {
1514     if (!metaDataAvailable())
1515         return 0;
1516
1517     if (m_cachedTotalBytes)
1518         return m_cachedTotalBytes;
1519
1520     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1521         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1522
1523     return m_cachedTotalBytes;
1524 }
1525
1526 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1527 {
1528     m_avAsset = asset;
1529 }
1530
1531 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1532 {
1533     if (!m_avAsset)
1534         return MediaPlayerAVAssetStatusDoesNotExist;
1535
1536     for (NSString *keyName in assetMetadataKeyNames()) {
1537         NSError *error = nil;
1538         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1539
1540         if (error)
1541             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1542
1543         if (keyStatus < AVKeyValueStatusLoaded)
1544             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1545         
1546         if (keyStatus == AVKeyValueStatusFailed)
1547             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1548
1549         if (keyStatus == AVKeyValueStatusCancelled)
1550             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1551     }
1552
1553     if (!player()->shouldCheckHardwareSupport())
1554         m_tracksArePlayable = true;
1555
1556     if (!m_tracksArePlayable) {
1557         m_tracksArePlayable = true;
1558         for (AVAssetTrack *track in [m_avAsset tracks]) {
1559             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1560                 m_tracksArePlayable = false;
1561                 break;
1562             }
1563         }
1564     }
1565
1566     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1567         return MediaPlayerAVAssetStatusPlayable;
1568
1569     return MediaPlayerAVAssetStatusLoaded;
1570 }
1571
1572 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1573 {
1574     if (!m_avAsset)
1575         return 0;
1576
1577     NSError *error = nil;
1578     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1579     return [error code];
1580 }
1581
1582 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1583 {
1584     if (!metaDataAvailable() || context.paintingDisabled())
1585         return;
1586
1587     setDelayCallbacks(true);
1588     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1589
1590 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1591     if (videoOutputHasAvailableFrame())
1592         paintWithVideoOutput(context, rect);
1593     else
1594 #endif
1595         paintWithImageGenerator(context, rect);
1596
1597     END_BLOCK_OBJC_EXCEPTIONS;
1598     setDelayCallbacks(false);
1599
1600     m_videoFrameHasDrawn = true;
1601 }
1602
1603 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1604 {
1605     if (!metaDataAvailable() || context.paintingDisabled())
1606         return;
1607
1608     // We can ignore the request if we are already rendering to a layer.
1609     if (currentRenderingMode() == MediaRenderingToLayer)
1610         return;
1611
1612     // paint() is best effort, so only paint if we already have an image generator or video output available.
1613     if (!hasContextRenderer())
1614         return;
1615
1616     paintCurrentFrameInContext(context, rect);
1617 }
1618
1619 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1620 {
1621     INFO_LOG(LOGIDENTIFIER);
1622
1623     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1624     if (image) {
1625         GraphicsContextStateSaver stateSaver(context);
1626         context.translate(rect.x(), rect.y() + rect.height());
1627         context.scale(FloatSize(1.0f, -1.0f));
1628         context.setImageInterpolationQuality(InterpolationLow);
1629         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1630         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1631     }
1632 }
1633
1634 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1635 {
1636     if (!m_imageGenerator)
1637         createImageGenerator();
1638     ASSERT(m_imageGenerator);
1639
1640 #if !RELEASE_LOG_DISABLED
1641     MonotonicTime start = MonotonicTime::now();
1642 #endif
1643
1644     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1645     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1646     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1647
1648 #if !RELEASE_LOG_DISABLED
1649     DEBUG_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1650 #endif
1651
1652     return image;
1653 }
1654
1655 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1656 {
1657     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1658
1659
1660 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1661 static bool keySystemIsSupported(const String& keySystem)
1662 {
1663     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1664         return true;
1665     return false;
1666 }
1667 #endif
1668
1669 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1670 {
1671 #if ENABLE(MEDIA_SOURCE)
1672     if (parameters.isMediaSource)
1673         return MediaPlayer::IsNotSupported;
1674 #endif
1675 #if ENABLE(MEDIA_STREAM)
1676     if (parameters.isMediaStream)
1677         return MediaPlayer::IsNotSupported;
1678 #endif
1679
1680     auto containerType = parameters.type.containerType();
1681     if (isUnsupportedMIMEType(containerType))
1682         return MediaPlayer::IsNotSupported;
1683
1684     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().types().contains(containerType))
1685         return MediaPlayer::IsNotSupported;
1686
1687     // The spec says:
1688     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1689     if (parameters.type.codecs().isEmpty())
1690         return MediaPlayer::MayBeSupported;
1691
1692     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1693         return MediaPlayer::IsNotSupported;
1694
1695     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1696     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1697 }
1698
1699 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1700 {
1701 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1702     if (!keySystem.isEmpty()) {
1703         // "Clear Key" is only supported with HLS:
1704         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1705             return MediaPlayer::IsNotSupported;
1706
1707         if (!keySystemIsSupported(keySystem))
1708             return false;
1709
1710         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1711             return false;
1712
1713         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1714             return false;
1715
1716         return true;
1717     }
1718 #else
1719     UNUSED_PARAM(keySystem);
1720     UNUSED_PARAM(mimeType);
1721 #endif
1722     return false;
1723 }
1724
1725 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1726 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1727 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1728 {
1729     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1730         [infoRequest setContentLength:keyData->byteLength()];
1731         [infoRequest setByteRangeAccessSupported:YES];
1732     }
1733
1734     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1735         long long start = [dataRequest currentOffset];
1736         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1737
1738         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1739             [request finishLoadingWithError:nil];
1740             return;
1741         }
1742
1743         ASSERT(start <= std::numeric_limits<int>::max());
1744         ASSERT(end <= std::numeric_limits<int>::max());
1745         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1746         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1747         [dataRequest respondWithData:nsData.get()];
1748     }
1749
1750     [request finishLoading];
1751 }
1752 #endif
1753
1754 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1755 {
1756     String scheme = [[[avRequest request] URL] scheme];
1757     String keyURI = [[[avRequest request] URL] absoluteString];
1758
1759 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1760     if (scheme == "skd") {
1761 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1762         // Create an initData with the following layout:
1763         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1764         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1765         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1766         unsigned byteLength = initDataBuffer->byteLength();
1767         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1768         initDataView->set<uint32_t>(0, keyURISize, true);
1769
1770         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1771         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1772
1773         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1774         if (!player()->keyNeeded(initData.get()))
1775             return false;
1776 #endif
1777         m_keyURIToRequestMap.set(keyURI, avRequest);
1778 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1779         if (m_cdmInstance)
1780             return false;
1781
1782         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1783         auto keyURIBuffer = SharedBuffer::create(keyURIData.get());
1784         player()->initializationDataEncountered(ASCIILiteral("skd"), keyURIBuffer->tryCreateArrayBuffer());
1785 #endif
1786         return true;
1787     }
1788
1789 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1790     if (scheme == "clearkey") {
1791         String keyID = [[[avRequest request] URL] resourceSpecifier];
1792         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1793
1794         auto initData = Uint8Array::create(encodedKeyId.size());
1795         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1796
1797         auto keyData = player()->cachedKeyForKeyId(keyID);
1798         if (keyData) {
1799             fulfillRequestWithKeyData(avRequest, keyData.get());
1800             return false;
1801         }
1802
1803         if (!player()->keyNeeded(initData.get()))
1804             return false;
1805
1806         m_keyURIToRequestMap.set(keyID, avRequest);
1807         return true;
1808     }
1809 #endif
1810 #endif
1811
1812     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1813     m_resourceLoaderMap.add(avRequest, resourceLoader);
1814     resourceLoader->startLoading();
1815     return true;
1816 }
1817
1818 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1819 {
1820     String scheme = [[[avRequest request] URL] scheme];
1821
1822     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1823
1824     if (resourceLoader)
1825         resourceLoader->stopLoading();
1826 }
1827
1828 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1829 {
1830     m_resourceLoaderMap.remove(avRequest);
1831 }
1832 #endif
1833
1834 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1835 {
1836     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1837 }
1838
1839 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1840 {
1841     if (!metaDataAvailable())
1842         return timeValue;
1843
1844     // FIXME - impossible to implement until rdar://8721510 is fixed.
1845     return timeValue;
1846 }
1847
1848 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1849 {
1850 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1851     return 0;
1852 #else
1853     return 5;
1854 #endif
1855 }
1856
1857 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1858 {
1859     if (!m_videoLayer)
1860         return;
1861
1862     // Do not attempt to change the video gravity while in full screen mode.
1863     // See setVideoFullscreenGravity().
1864     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1865         return;
1866
1867     [CATransaction begin];
1868     [CATransaction setDisableActions:YES];    
1869     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1870     [m_videoLayer.get() setVideoGravity:gravity];
1871     [CATransaction commit];
1872 }
1873
1874 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1875 {
1876     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1877         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1878     }];
1879     if (index == NSNotFound)
1880         return nil;
1881     return [tracks objectAtIndex:index];
1882 }
1883
1884 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1885 {
1886     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1887     m_languageOfPrimaryAudioTrack = String();
1888
1889     if (!m_avAsset)
1890         return;
1891
1892     setDelayCharacteristicsChangedNotification(true);
1893
1894     bool haveCCTrack = false;
1895     bool hasCaptions = false;
1896
1897     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1898     // asked about those fairly fequently.
1899     if (!m_avPlayerItem) {
1900         // We don't have a player item yet, so check with the asset because some assets support inspection
1901         // prior to becoming ready to play.
1902         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1903         setHasVideo(firstEnabledVideoTrack);
1904         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1905 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1906         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1907 #endif
1908         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1909         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1910         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1911         if (size.width() < 0)
1912             size.setWidth(-size.width());
1913         if (size.height() < 0)
1914             size.setHeight(-size.height());
1915         presentationSizeDidChange(size);
1916     } else {
1917         bool hasVideo = false;
1918         bool hasAudio = false;
1919         bool hasMetaData = false;
1920         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1921             if ([track isEnabled]) {
1922                 AVAssetTrack *assetTrack = [track assetTrack];
1923                 NSString *mediaType = [assetTrack mediaType];
1924                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1925                     hasVideo = true;
1926                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1927                     hasAudio = true;
1928                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1929 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1930                     hasCaptions = true;
1931 #endif
1932                     haveCCTrack = true;
1933                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1934                     hasMetaData = true;
1935                 }
1936             }
1937         }
1938
1939 #if ENABLE(VIDEO_TRACK)
1940         updateAudioTracks();
1941         updateVideoTracks();
1942
1943 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1944         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1945         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1946 #endif
1947 #endif
1948
1949         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1950         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1951         // when it is not.
1952         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1953
1954         setHasAudio(hasAudio);
1955 #if ENABLE(DATACUE_VALUE)
1956         if (hasMetaData)
1957             processMetadataTrack();
1958 #endif
1959     }
1960
1961 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1962     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1963     if (legibleGroup && m_cachedTracks) {
1964         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1965         if (hasCaptions)
1966             processMediaSelectionOptions();
1967     }
1968 #endif
1969
1970 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1971     if (!hasCaptions && haveCCTrack)
1972         processLegacyClosedCaptionsTracks();
1973 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1974     if (haveCCTrack)
1975         processLegacyClosedCaptionsTracks();
1976 #endif
1977
1978     setHasClosedCaptions(hasCaptions);
1979
1980     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1981
1982     sizeChanged();
1983
1984     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1985         characteristicsChanged();
1986
1987 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1988     if (m_provider)
1989         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1990 #endif
1991
1992     setDelayCharacteristicsChangedNotification(false);
1993 }
1994
1995 #if ENABLE(VIDEO_TRACK)
1996
1997 template <typename RefT, typename PassRefT>
1998 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1999 {
2000     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2001         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2002     }]]]);
2003     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2004
2005     for (auto& oldItem : oldItems) {
2006         if (oldItem->playerItemTrack())
2007             [oldTracks addObject:oldItem->playerItemTrack()];
2008     }
2009
2010     // Find the added & removed AVPlayerItemTracks:
2011     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2012     [removedTracks minusSet:newTracks.get()];
2013
2014     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2015     [addedTracks minusSet:oldTracks.get()];
2016
2017     typedef Vector<RefT> ItemVector;
2018     ItemVector replacementItems;
2019     ItemVector addedItems;
2020     ItemVector removedItems;
2021     for (auto& oldItem : oldItems) {
2022         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2023             removedItems.append(oldItem);
2024         else
2025             replacementItems.append(oldItem);
2026     }
2027
2028     for (AVPlayerItemTrack* track in addedTracks.get())
2029         addedItems.append(itemFactory(track));
2030
2031     replacementItems.appendVector(addedItems);
2032     oldItems.swap(replacementItems);
2033
2034     for (auto& removedItem : removedItems)
2035         (player->*removedFunction)(*removedItem);
2036
2037     for (auto& addedItem : addedItems)
2038         (player->*addedFunction)(*addedItem);
2039 }
2040
2041 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2042
2043 template <typename RefT, typename PassRefT>
2044 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2045 {
2046     group->updateOptions(characteristics);
2047
2048     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2049     for (auto& option : group->options()) {
2050         if (!option)
2051             continue;
2052         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2053         if (!avOption)
2054             continue;
2055         newSelectionOptions.add(option);
2056     }
2057
2058     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2059     for (auto& oldItem : oldItems) {
2060         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2061             oldSelectionOptions.add(option);
2062     }
2063
2064     // Find the added & removed AVMediaSelectionOptions:
2065     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2066     for (auto& oldOption : oldSelectionOptions) {
2067         if (!newSelectionOptions.contains(oldOption))
2068             removedSelectionOptions.add(oldOption);
2069     }
2070
2071     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2072     for (auto& newOption : newSelectionOptions) {
2073         if (!oldSelectionOptions.contains(newOption))
2074             addedSelectionOptions.add(newOption);
2075     }
2076
2077     typedef Vector<RefT> ItemVector;
2078     ItemVector replacementItems;
2079     ItemVector addedItems;
2080     ItemVector removedItems;
2081     for (auto& oldItem : oldItems) {
2082         if (!oldItem->mediaSelectionOption())
2083             removedItems.append(oldItem);
2084         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2085             removedItems.append(oldItem);
2086         else
2087             replacementItems.append(oldItem);
2088     }
2089
2090     for (auto& option : addedSelectionOptions)
2091         addedItems.append(itemFactory(*option.get()));
2092
2093     replacementItems.appendVector(addedItems);
2094     oldItems.swap(replacementItems);
2095     
2096     for (auto& removedItem : removedItems)
2097         (player->*removedFunction)(*removedItem);
2098
2099     for (auto& addedItem : addedItems)
2100         (player->*addedFunction)(*addedItem);
2101 }
2102
2103 #endif
2104
2105 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2106 {
2107 #if !RELEASE_LOG_DISABLED
2108     size_t count = m_audioTracks.size();
2109 #endif
2110
2111 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2112     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2113     if (!m_audibleGroup) {
2114         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2115             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2116     }
2117
2118     if (m_audibleGroup)
2119         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2120     else
2121 #endif
2122         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2123
2124     for (auto& track : m_audioTracks)
2125         track->resetPropertiesFromTrack();
2126
2127 #if !RELEASE_LOG_DISABLED
2128     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2129 #endif
2130 }
2131
2132 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2133 {
2134 #if !RELEASE_LOG_DISABLED
2135     size_t count = m_videoTracks.size();
2136 #endif
2137
2138     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2139
2140 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2141     if (!m_visualGroup) {
2142         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2143             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2144     }
2145
2146     if (m_visualGroup)
2147         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2148 #endif
2149
2150     for (auto& track : m_audioTracks)
2151         track->resetPropertiesFromTrack();
2152
2153 #if !RELEASE_LOG_DISABLED
2154     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2155 #endif
2156 }
2157
2158 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2159 {
2160     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2161 }
2162
2163 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2164 {
2165     m_videoFullscreenLayerManager->syncTextTrackBounds();
2166 }
2167
2168 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2169 {
2170     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2171 }
2172
2173 #endif // ENABLE(VIDEO_TRACK)
2174
2175 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2176
2177 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2178 {
2179     if (!m_provider) {
2180         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2181         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2182     }
2183     return m_provider.get();
2184 }
2185
2186 #endif
2187
2188 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2189 {
2190     if (!m_avAsset)
2191         return;
2192
2193     setNaturalSize(m_cachedPresentationSize);
2194 }
2195
2196 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2197 {
2198     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2199 }
2200
2201 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2202 {
2203 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2204     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2205     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2206         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2207         return false;
2208
2209     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2210     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2211         return session.didPassCORSAccessChecks;
2212 #endif
2213     return false;
2214 }
2215
2216 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2217
2218 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2219 {
2220     INFO_LOG(LOGIDENTIFIER);
2221
2222     if (!m_avPlayerItem || m_videoOutput)
2223         return;
2224
2225 #if USE(VIDEOTOOLBOX)
2226     NSDictionary* attributes = nil;
2227 #else
2228     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2229 #endif
2230     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2231     ASSERT(m_videoOutput);
2232
2233     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2234
2235     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2236 }
2237
2238 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2239 {
2240     if (!m_videoOutput)
2241         return;
2242
2243     if (m_avPlayerItem)
2244         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2245
2246     INFO_LOG(LOGIDENTIFIER);
2247
2248     m_videoOutput = 0;
2249 }
2250
2251 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2252 {
2253     if (!m_avPlayerItem)
2254         return false;
2255
2256     if (!m_videoOutput)
2257         createVideoOutput();
2258     ASSERT(m_videoOutput);
2259
2260     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2261
2262     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2263         return false;
2264
2265     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2266     m_lastImage = nullptr;
2267     return true;
2268 }
2269
2270 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2271 {
2272     if (!m_avPlayerItem)
2273         return false;
2274
2275     if (m_lastImage)
2276         return true;
2277
2278     if (!m_videoOutput)
2279         createVideoOutput();
2280
2281     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2282 }
2283
2284 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2285 {
2286 #if HAVE(CORE_VIDEO)
2287     if (!m_avPlayerItem)
2288         return;
2289
2290     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2291         waitForVideoOutputMediaDataWillChange();
2292
2293     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2294     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2295     // should be displayed.
2296     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2297         return;
2298
2299     if (!m_pixelBufferConformer) {
2300 #if USE(VIDEOTOOLBOX)
2301         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2302 #else
2303         NSDictionary *attributes = nil;
2304 #endif
2305         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2306     }
2307
2308 #if !RELEASE_LOG_DISABLED
2309     MonotonicTime start = MonotonicTime::now();
2310 #endif
2311
2312     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2313
2314 #if !RELEASE_LOG_DISABLED
2315     DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2316 #endif
2317 #endif // HAVE(CORE_VIDEO)
2318 }
2319
2320 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2321 {
2322     updateLastImage(UpdateType::UpdateSynchronously);
2323     if (!m_lastImage)
2324         return;
2325
2326     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2327     if (!firstEnabledVideoTrack)
2328         return;
2329
2330     INFO_LOG(LOGIDENTIFIER);
2331
2332     GraphicsContextStateSaver stateSaver(context);
2333     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2334     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2335     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2336
2337     context.concatCTM(videoTransform);
2338     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2339
2340     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2341     // video frame, destroy it now that it is no longer needed.
2342     if (m_imageGenerator)
2343         destroyImageGenerator();
2344
2345 }
2346
2347 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2348 {
2349     ASSERT(context);
2350
2351     updateLastPixelBuffer();
2352     if (!m_lastPixelBuffer)
2353         return false;
2354
2355     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2356     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2357
2358     if (!m_videoTextureCopier)
2359         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2360
2361     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2362 }
2363
2364 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2365 {
2366     updateLastImage();
2367     return m_lastImage;
2368 }
2369
2370 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2371 {
2372     if (!m_videoOutputSemaphore)
2373         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2374
2375     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2376
2377     // Wait for 1 second.
2378     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2379
2380     if (result)
2381         ERROR_LOG(LOGIDENTIFIER, "timed out");
2382 }
2383
2384 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2385 {
2386     dispatch_semaphore_signal(m_videoOutputSemaphore);
2387 }
2388
2389 #endif
2390
2391 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2392
2393 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2394 {
2395     return m_keyURIToRequestMap.take(keyURI);
2396 }
2397
2398 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2399 {
2400     Vector<String> fulfilledKeyIds;
2401
2402     for (auto& pair : m_keyURIToRequestMap) {
2403         const String& keyId = pair.key;
2404         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2405
2406         auto keyData = player()->cachedKeyForKeyId(keyId);
2407         if (!keyData)
2408             continue;
2409
2410         fulfillRequestWithKeyData(request.get(), keyData.get());
2411         fulfilledKeyIds.append(keyId);
2412     }
2413
2414     for (auto& keyId : fulfilledKeyIds)
2415         m_keyURIToRequestMap.remove(keyId);
2416 }
2417
2418 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2419 {
2420     ASSERT_UNUSED(session, &session == m_session);
2421     m_session = nullptr;
2422 }
2423
2424 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2425 {
2426     if (!keySystemIsSupported(keySystem))
2427         return nullptr;
2428     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2429     m_session = session->createWeakPtr();
2430     return WTFMove(session);
2431 }
2432 #endif
2433
2434 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2435 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2436 {
2437 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2438     if (m_session && newValue)
2439         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2440 #endif
2441
2442 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2443     if (m_cdmInstance)
2444         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2445 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2446     UNUSED_PARAM(newValue);
2447 #endif
2448 }
2449 #endif
2450
2451 #if ENABLE(ENCRYPTED_MEDIA)
2452 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2453 {
2454 #if HAVE(AVCONTENTKEYSESSION)
2455     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2456         return;
2457
2458     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2459     if (&fpsInstance == m_cdmInstance)
2460         return;
2461
2462     if (m_cdmInstance)
2463         cdmInstanceDetached(*m_cdmInstance);
2464
2465     m_cdmInstance = &fpsInstance;
2466     [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2467 #else
2468     UNUSED_PARAM(instance);
2469 #endif
2470 }
2471
2472 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2473 {
2474 #if HAVE(AVCONTENTKEYSESSION)
2475     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2476     [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_avAsset.get()];
2477     m_cdmInstance = nullptr;
2478 #else
2479     UNUSED_PARAM(instance);
2480 #endif
2481 }
2482
2483 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2484 {
2485     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2486     for (auto& request : keyURIToRequestMap.values())
2487         [request finishLoading];
2488 }
2489 #endif
2490
2491 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2492
2493 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2494 {
2495 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2496     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2497 #endif
2498
2499     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2500     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2501
2502         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2503         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2504             continue;
2505
2506         bool newCCTrack = true;
2507         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2508             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2509                 continue;
2510
2511             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2512             if (track->avPlayerItemTrack() == playerItemTrack) {
2513                 removedTextTracks.remove(i - 1);
2514                 newCCTrack = false;
2515                 break;
2516             }
2517         }
2518
2519         if (!newCCTrack)
2520             continue;
2521         
2522         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2523     }
2524
2525     processNewAndRemovedTextTracks(removedTextTracks);
2526 }
2527
2528 #endif
2529
2530 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2531 {
2532     if (!m_avAsset)
2533         return nil;
2534
2535     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2536         return nil;
2537
2538     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2539 }
2540
2541 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2542
2543 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2544 {
2545     if (!m_avAsset)
2546         return false;
2547
2548     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2549         return false;
2550
2551     return true;
2552 }
2553
2554 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2555 {
2556     if (!hasLoadedMediaSelectionGroups())
2557         return nil;
2558
2559     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2560 }
2561
2562 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2563 {
2564     if (!hasLoadedMediaSelectionGroups())
2565         return nil;
2566
2567     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2568 }
2569
2570 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2571 {
2572     if (!hasLoadedMediaSelectionGroups())
2573         return nil;
2574
2575     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2576 }
2577
2578 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2579 {
2580     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2581     if (!legibleGroup) {
2582         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2583         return;
2584     }
2585
2586     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2587     // but set the selected legible track to nil so text tracks will not be automatically configured.
2588     if (!m_textTracks.size())
2589         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2590
2591     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2592     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2593     for (AVMediaSelectionOptionType *option in legibleOptions) {
2594         bool newTrack = true;
2595         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2596             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2597                 continue;
2598             
2599             RetainPtr<AVMediaSelectionOptionType> currentOption;
2600 #if ENABLE(AVF_CAPTIONS)
2601             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2602                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2603                 currentOption = track->mediaSelectionOption();
2604             } else
2605 #endif
2606             {
2607                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2608                 currentOption = track->mediaSelectionOption();
2609             }
2610             
2611             if ([currentOption.get() isEqual:option]) {
2612                 removedTextTracks.remove(i - 1);
2613                 newTrack = false;
2614                 break;
2615             }
2616         }
2617         if (!newTrack)
2618             continue;
2619
2620 #if ENABLE(AVF_CAPTIONS)
2621         if ([option outOfBandSource]) {
2622             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2623             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2624             continue;
2625         }
2626 #endif
2627
2628         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2629     }
2630
2631     processNewAndRemovedTextTracks(removedTextTracks);
2632 }
2633
2634 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2635 {
2636     if (m_metadataTrack)
2637         return;
2638
2639     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2640     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2641     player()->addTextTrack(*m_metadataTrack);
2642 }
2643
2644 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2645 {
2646     ASSERT(time >= MediaTime::zeroTime());
2647
2648     if (!m_currentTextTrack)
2649         return;
2650
2651     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2652 }
2653
2654 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2655 {
2656     INFO_LOG(LOGIDENTIFIER);
2657
2658     if (!m_currentTextTrack)
2659         return;
2660     
2661     m_currentTextTrack->resetCueValues();
2662 }
2663
2664 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2665
2666 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2667 {
2668     if (m_currentTextTrack == track)
2669         return;
2670
2671     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2672
2673     m_currentTextTrack = track;
2674
2675     if (track) {
2676         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2677 #pragma clang diagnostic push
2678 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2679             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2680 #pragma clang diagnostic pop
2681 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2682 #if ENABLE(AVF_CAPTIONS)
2683         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2684             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2685 #endif
2686         else
2687             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2688 #endif
2689     } else {
2690 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2691         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2692 #endif
2693 #pragma clang diagnostic push
2694 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2695         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2696 #pragma clang diagnostic pop
2697     }
2698
2699 }
2700
2701 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2702 {
2703     if (!m_languageOfPrimaryAudioTrack.isNull())
2704         return m_languageOfPrimaryAudioTrack;
2705
2706     if (!m_avPlayerItem.get())
2707         return emptyString();
2708
2709 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2710     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2711     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2712 #pragma clang diagnostic push
2713 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2714     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2715 #pragma clang diagnostic pop
2716     if (currentlySelectedAudibleOption) {
2717         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2718         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2719
2720         return m_languageOfPrimaryAudioTrack;
2721     }
2722 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2723
2724     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2725     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2726     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2727     if (!tracks || [tracks count] != 1) {
2728         m_languageOfPrimaryAudioTrack = emptyString();
2729         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2730         return m_languageOfPrimaryAudioTrack;
2731     }
2732
2733     AVAssetTrack *track = [tracks objectAtIndex:0];
2734     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2735
2736     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2737
2738     return m_languageOfPrimaryAudioTrack;
2739 }
2740
2741 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2742 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2743 {
2744     bool wirelessTarget = false;
2745
2746 #if !PLATFORM(IOS)
2747     if (m_playbackTarget) {
2748         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2749             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2750         else
2751             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2752     }
2753 #else
2754     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2755 #endif
2756
2757     INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2758
2759     return wirelessTarget;
2760 }
2761
2762 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2763 {
2764     if (!m_avPlayer)
2765         return MediaPlayer::TargetTypeNone;
2766
2767 #if PLATFORM(IOS)
2768     if (!AVFoundationLibrary())
2769         return MediaPlayer::TargetTypeNone;
2770
2771     switch ([m_avPlayer externalPlaybackType]) {
2772     case AVPlayerExternalPlaybackTypeNone:
2773         return MediaPlayer::TargetTypeNone;
2774     case AVPlayerExternalPlaybackTypeAirPlay:
2775         return MediaPlayer::TargetTypeAirPlay;
2776     case AVPlayerExternalPlaybackTypeTVOut:
2777         return MediaPlayer::TargetTypeTVOut;
2778     }
2779
2780     ASSERT_NOT_REACHED();
2781     return MediaPlayer::TargetTypeNone;
2782
2783 #else
2784     return MediaPlayer::TargetTypeAirPlay;
2785 #endif
2786 }
2787     
2788 #if PLATFORM(IOS)
2789 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2790 {
2791 #if HAVE(CELESTIAL)
2792     if (!AVFoundationLibrary())
2793         return nil;
2794
2795 #if __IPHONE_OS_VERSION_MAX_ALLOWED >= 110000
2796     if ([getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2797         AVOutputContext *outputContext = [getAVOutputContextClass() sharedAudioPresentationOutputContext];
2798
2799         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2800             || ![outputContext supportsMultipleOutputDevices]
2801             || ![outputContext respondsToSelector:@selector(outputDevices)])
2802             return [outputContext deviceName];
2803
2804         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2805         for (AVOutputDevice *outputDevice in [outputContext outputDevices])
2806             [outputDeviceNames addObject:[[outputDevice name] copy]];
2807
2808         return [outputDeviceNames componentsJoinedByString:@" + "];
2809     }
2810 #endif
2811
2812     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2813         return nil;
2814
2815     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2816     if (!pickableRoutes.count)
2817         return nil;
2818
2819     NSString *displayName = nil;
2820     for (NSDictionary *pickableRoute in pickableRoutes) {
2821         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2822             continue;
2823
2824         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2825
2826         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2827         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2828             break;
2829
2830         // The route is a speaker or HDMI out, override the name to be the localized device model.
2831         NSString *localizedDeviceModel = [[UIDevice currentDevice] localizedModel];
2832
2833         // In cases where a route with that name already exists, prefix the name with the model.
2834         BOOL includeLocalizedDeviceModelName = NO;
2835         for (NSDictionary *otherRoute in pickableRoutes) {
2836             if (otherRoute == pickableRoute)
2837                 continue;
2838
2839             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2840                 includeLocalizedDeviceModelName = YES;
2841                 break;
2842             }
2843         }
2844
2845         if (includeLocalizedDeviceModelName)
2846             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2847         else
2848             displayName = localizedDeviceModel;
2849
2850         break;
2851     }
2852
2853     return displayName;
2854 #else
2855     UNUSED_PARAM(player);
2856     return nil;
2857 #endif
2858 }
2859 #endif
2860
2861 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2862 {
2863     if (!m_avPlayer)
2864         return emptyString();
2865
2866     String wirelessTargetName;
2867 #if !PLATFORM(IOS)
2868     if (m_playbackTarget)
2869         wirelessTargetName = m_playbackTarget->deviceName();
2870 #else
2871     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2872 #endif
2873
2874     return wirelessTargetName;
2875 }
2876
2877 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2878 {
2879     if (!m_avPlayer)
2880         return !m_allowsWirelessVideoPlayback;
2881
2882     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2883     INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2884
2885     return !m_allowsWirelessVideoPlayback;
2886 }
2887
2888 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2889 {
2890     INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2891     m_allowsWirelessVideoPlayback = !disabled;
2892     if (!m_avPlayer)
2893         return;
2894
2895     setDelayCallbacks(true);
2896     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2897     setDelayCallbacks(false);
2898 }
2899
2900 #if !PLATFORM(IOS)
2901
2902 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2903 {
2904     m_playbackTarget = WTFMove(target);
2905
2906     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2907
2908     INFO_LOG(LOGIDENTIFIER);
2909
2910     if (!m_playbackTarget->hasActiveRoute())
2911         setShouldPlayToPlaybackTarget(false);
2912 }
2913
2914 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2915 {
2916     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2917         return;
2918
2919     m_shouldPlayToPlaybackTarget = shouldPlay;
2920
2921     if (!m_playbackTarget)
2922         return;
2923
2924     INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2925
2926     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2927         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2928
2929         if (!m_avPlayer)
2930             return;
2931
2932         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2933         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2934             return;
2935
2936         setDelayCallbacks(true);
2937         m_avPlayer.get().outputContext = newContext;
2938         setDelayCallbacks(false);
2939
2940         return;
2941     }
2942
2943     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2944
2945     setDelayCallbacks(true);
2946     auto weakThis = createWeakPtr();
2947     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2948         if (!weakThis)
2949             return;
2950         weakThis->playbackTargetIsWirelessDidChange();
2951     }));
2952     setDelayCallbacks(false);
2953 }
2954
2955 #endif // !PLATFORM(IOS)
2956
2957 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2958 {
2959 #if PLATFORM(IOS)
2960     if (!m_avPlayer)
2961         return;
2962
2963     if ([m_avPlayer respondsToSelector:@selector(setUsesExternalPlaybackWhileExternalScreenIsActive:)])
2964         [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2965 #endif
2966 }
2967
2968 #endif
2969
2970 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2971 {
2972     m_cachedItemStatus = status;
2973
2974     updateStates();
2975 }
2976
2977 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2978 {
2979     m_pendingStatusChanges++;
2980 }
2981
2982 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2983 {
2984     m_cachedLikelyToKeepUp = likelyToKeepUp;
2985
2986     ASSERT(m_pendingStatusChanges);
2987     if (!--m_pendingStatusChanges)
2988         updateStates();
2989 }
2990
2991 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2992 {
2993     m_pendingStatusChanges++;
2994 }
2995
2996 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2997 {
2998     m_cachedBufferEmpty = bufferEmpty;
2999
3000     ASSERT(m_pendingStatusChanges);
3001     if (!--m_pendingStatusChanges)
3002         updateStates();
3003 }
3004
3005 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3006 {
3007     m_pendingStatusChanges++;
3008 }
3009
3010 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3011 {
3012     m_cachedBufferFull = bufferFull;
3013
3014     ASSERT(m_pendingStatusChanges);
3015     if (!--m_pendingStatusChanges)
3016         updateStates();
3017 }
3018
3019 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3020 {
3021     m_cachedSeekableRanges = seekableRanges;
3022
3023     seekableTimeRangesChanged();
3024     updateStates();
3025 }
3026
3027 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3028 {
3029     m_cachedLoadedRanges = loadedRanges;
3030
3031     loadedTimeRangesChanged();
3032     updateStates();
3033 }
3034
3035 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3036 {
3037     m_cachedIsReadyForDisplay = isReady;
3038     if (!hasVideo() && isReady)
3039         tracksChanged();
3040     updateStates();
3041 }
3042
3043 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3044 {
3045     tracksChanged();
3046     updateStates();
3047 }
3048
3049 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3050 {
3051     INFO_LOG(LOGIDENTIFIER, "- ", shouldBuffer);
3052
3053     if (m_shouldBufferData == shouldBuffer)
3054         return;
3055
3056     m_shouldBufferData = shouldBuffer;
3057     
3058     if (!m_avPlayer)
3059         return;
3060
3061     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3062 }
3063