[iOS] AirPlay device name is sometimes wrong
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "URL.h"
63 #import "VideoFullscreenLayerManagerObjC.h"
64 #import "VideoTextureCopierCV.h"
65 #import "VideoTrackPrivateAVFObjC.h"
66 #import "WebCoreAVFResourceLoader.h"
67 #import "WebCoreCALayerExtras.h"
68 #import "WebCoreNSURLSession.h"
69 #import <JavaScriptCore/DataView.h>
70 #import <JavaScriptCore/JSCInlines.h>
71 #import <JavaScriptCore/TypedArrayInlines.h>
72 #import <JavaScriptCore/Uint16Array.h>
73 #import <JavaScriptCore/Uint32Array.h>
74 #import <JavaScriptCore/Uint8Array.h>
75 #import <functional>
76 #import <objc/runtime.h>
77 #import <pal/avfoundation/MediaTimeAVFoundation.h>
78 #import <pal/spi/cocoa/QuartzCoreSPI.h>
79 #import <pal/spi/mac/AVFoundationSPI.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #else
107 #import <Foundation/NSGeometry.h>
108 #import <QuartzCore/CoreImage.h>
109 #endif
110
111 #if USE(VIDEOTOOLBOX)
112 #import <CoreVideo/CoreVideo.h>
113 #import <VideoToolbox/VideoToolbox.h>
114 #endif
115
116 #import "CoreVideoSoftLink.h"
117 #import "MediaRemoteSoftLink.h"
118
119 namespace std {
120 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
121     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
122 };
123 }
124
125 #if ENABLE(AVF_CAPTIONS)
126 // Note: This must be defined before our SOFT_LINK macros:
127 @class AVMediaSelectionOption;
128 @interface AVMediaSelectionOption (OutOfBandExtensions)
129 @property (nonatomic, readonly) NSString* outOfBandSource;
130 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
131 @end
132 #endif
133
134 @interface AVURLAsset (WebKitExtensions)
135 @property (nonatomic, readonly) NSURL *resolvedURL;
136 @end
137
138 typedef AVPlayer AVPlayerType;
139 typedef AVPlayerItem AVPlayerItemType;
140 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
141 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
142 typedef AVMetadataItem AVMetadataItemType;
143 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
144 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
145 typedef AVAssetCache AVAssetCacheType;
146
147 #pragma mark - Soft Linking
148
149 // Soft-linking headers must be included last since they #define functions, constants, etc.
150 #import <pal/cf/CoreMediaSoftLink.h>
151
152 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
153
154 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
155
156 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
164
165 SOFT_LINK_CLASS(CoreImage, CIContext)
166 SOFT_LINK_CLASS(CoreImage, CIImage)
167
168 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
169 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
172 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
173 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
183
184 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
185 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
186 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
187 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetUseClientURLLoadingExclusively, NSString *)
188
189 #define AVPlayer initAVPlayer()
190 #define AVPlayerItem initAVPlayerItem()
191 #define AVPlayerLayer initAVPlayerLayer()
192 #define AVURLAsset initAVURLAsset()
193 #define AVAssetImageGenerator initAVAssetImageGenerator()
194 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
195 #define AVMetadataItem initAVMetadataItem()
196 #define AVAssetCache initAVAssetCache()
197
198 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
199 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
200 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
201 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
202 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
203 #define AVMediaTypeVideo getAVMediaTypeVideo()
204 #define AVMediaTypeAudio getAVMediaTypeAudio()
205 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
206 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
207 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
208 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
209 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
210 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
211 #define AVURLAssetUseClientURLLoadingExclusively getAVURLAssetUseClientURLLoadingExclusively()
212 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
213 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
214 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
215 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
216 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
217
218 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
219 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
220 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
221
222 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
223 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
224 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
225 SOFT_LINK_CLASS(AVFoundation, AVOutputContext)
226
227 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
228 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
229 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
230 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
231
232 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
233 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
234 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
235 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
236 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
237 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
238 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
239 #endif
240
241 #if ENABLE(AVF_CAPTIONS)
242 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
243 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
244 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
245 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
255
256 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
257 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
258 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
259 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
260 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
261 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
262 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
263 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
264 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
265 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
266 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
267 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
268 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
269 #endif
270
271 #if ENABLE(DATACUE_VALUE)
272 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
273 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
274 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
275 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
277
278 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
279 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
280 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
281 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
282 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
283 #endif
284
285 #if PLATFORM(IOS)
286 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
287 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
288 #endif
289
290 SOFT_LINK_FRAMEWORK(MediaToolbox)
291 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
292
293 #if PLATFORM(IOS)
294 #if HAVE(CELESTIAL)
295 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
296 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
297 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
298 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
299 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
300 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
301 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
302 #endif // HAVE(CELESTIAL)
303
304 SOFT_LINK_FRAMEWORK(UIKit)
305 SOFT_LINK_CLASS(UIKit, UIDevice)
306 #define UIDevice getUIDeviceClass()
307 #endif // PLATFORM(IOS)
308
309 using namespace WebCore;
310
311 enum MediaPlayerAVFoundationObservationContext {
312     MediaPlayerAVFoundationObservationContextPlayerItem,
313     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
314     MediaPlayerAVFoundationObservationContextPlayer,
315     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
316 };
317
318 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
319 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
320 #else
321 @interface WebCoreAVFMovieObserver : NSObject
322 #endif
323 {
324     MediaPlayerPrivateAVFoundationObjC* m_callback;
325     int m_delayCallbacks;
326 }
327 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
328 -(void)disconnect;
329 -(void)metadataLoaded;
330 -(void)didEnd:(NSNotification *)notification;
331 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
332 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
333 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
334 - (void)outputSequenceWasFlushed:(id)output;
335 #endif
336 @end
337
338 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
339 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
340     MediaPlayerPrivateAVFoundationObjC* m_callback;
341 }
342 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
344 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
345 @end
346 #endif
347
348 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
349 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
350     MediaPlayerPrivateAVFoundationObjC *m_callback;
351     dispatch_semaphore_t m_semaphore;
352 }
353 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
354 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
355 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
356 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
357 @end
358 #endif
359
360 namespace WebCore {
361 using namespace PAL;
362
363 static NSArray *assetMetadataKeyNames();
364 static NSArray *itemKVOProperties();
365 static NSArray *assetTrackMetadataKeyNames();
366 static NSArray *playerKVOProperties();
367 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
368
369 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
370 static dispatch_queue_t globalLoaderDelegateQueue()
371 {
372     static dispatch_queue_t globalQueue;
373     static dispatch_once_t onceToken;
374     dispatch_once(&onceToken, ^{
375         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
376     });
377     return globalQueue;
378 }
379 #endif
380
381 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
382 static dispatch_queue_t globalPullDelegateQueue()
383 {
384     static dispatch_queue_t globalQueue;
385     static dispatch_once_t onceToken;
386     dispatch_once(&onceToken, ^{
387         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
388     });
389     return globalQueue;
390 }
391 #endif
392
393 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
394 {
395     if (!isAvailable())
396         return;
397
398     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
399             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
400     AVFoundationMIMETypeCache::singleton().loadTypes();
401 }
402
403 static AVAssetCacheType *assetCacheForPath(const String& path)
404 {
405     NSURL *assetCacheURL;
406     
407     if (path.isEmpty())
408         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
409     else
410         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
411
412     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
413 }
414
415 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
416 {
417     HashSet<RefPtr<SecurityOrigin>> origins;
418     for (NSString *key in [assetCacheForPath(path) allKeys]) {
419         URL keyAsURL = URL(URL(), key);
420         if (keyAsURL.isValid())
421             origins.add(SecurityOrigin::create(keyAsURL));
422     }
423     return origins;
424 }
425
426 static WallTime toSystemClockTime(NSDate *date)
427 {
428     ASSERT(date);
429     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
430 }
431
432 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
433 {
434     AVAssetCacheType* assetCache = assetCacheForPath(path);
435     
436     for (NSString *key in [assetCache allKeys]) {
437         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
438             [assetCache removeEntryForKey:key];
439     }
440
441     NSFileManager *fileManager = [NSFileManager defaultManager];
442     NSURL *baseURL = [assetCache URL];
443
444     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
445         [fileManager removeItemAtURL:baseURL error:nil];
446         return;
447     }
448     
449     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
450     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
451         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
452         errorHandler:nil];
453     
454     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
455     for (NSURL *fileURL : enumerator) {
456         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
457     
458         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
459             continue;
460         
461         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
462             continue;
463         
464         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
465             continue;
466         
467         [urlsToDelete addObject:fileURL];
468     }
469     
470     for (NSURL *fileURL in urlsToDelete.get())
471         [fileManager removeItemAtURL:fileURL error:nil];
472 }
473
474 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
475 {
476     AVAssetCacheType* assetCache = assetCacheForPath(path);
477     for (NSString *key in [assetCache allKeys]) {
478         URL keyAsURL = URL(URL(), key);
479         if (keyAsURL.isValid()) {
480             if (origins.contains(SecurityOrigin::create(keyAsURL)))
481                 [assetCache removeEntryForKey:key];
482         }
483     }
484 }
485
486 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
487     : MediaPlayerPrivateAVFoundation(player)
488     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
489     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
490     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
491     , m_videoFrameHasDrawn(false)
492     , m_haveCheckedPlayability(false)
493 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
494     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
495     , m_videoOutputSemaphore(nullptr)
496 #endif
497 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
498     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
499 #endif
500     , m_currentTextTrack(0)
501     , m_cachedRate(0)
502     , m_cachedTotalBytes(0)
503     , m_pendingStatusChanges(0)
504     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
505     , m_cachedLikelyToKeepUp(false)
506     , m_cachedBufferEmpty(false)
507     , m_cachedBufferFull(false)
508     , m_cachedHasEnabledAudio(false)
509     , m_shouldBufferData(true)
510     , m_cachedIsReadyForDisplay(false)
511     , m_haveBeenAskedToCreateLayer(false)
512 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
513     , m_allowsWirelessVideoPlayback(true)
514 #endif
515 {
516 }
517
518 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
519 {
520 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
521     [m_loaderDelegate.get() setCallback:0];
522     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
523
524     for (auto& pair : m_resourceLoaderMap)
525         pair.value->invalidate();
526 #endif
527 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
528     [m_videoOutputDelegate setCallback:0];
529     [m_videoOutput setDelegate:nil queue:0];
530     if (m_videoOutputSemaphore)
531         dispatch_release(m_videoOutputSemaphore);
532 #endif
533
534     if (m_videoLayer)
535         destroyVideoLayer();
536
537     cancelLoad();
538 }
539
540 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
541 {
542     INFO_LOG(LOGIDENTIFIER);
543     tearDownVideoRendering();
544
545     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
546     [m_objcObserver.get() disconnect];
547
548     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
549     setIgnoreLoadStateChanges(true);
550     if (m_avAsset) {
551         [m_avAsset.get() cancelLoading];
552         m_avAsset = nil;
553     }
554
555     clearTextTracks();
556
557 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
558     if (m_legibleOutput) {
559         if (m_avPlayerItem)
560             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
561         m_legibleOutput = nil;
562     }
563 #endif
564
565     if (m_avPlayerItem) {
566         for (NSString *keyName in itemKVOProperties())
567             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
568         
569         m_avPlayerItem = nil;
570     }
571     if (m_avPlayer) {
572         if (m_timeObserver)
573             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
574         m_timeObserver = nil;
575
576         for (NSString *keyName in playerKVOProperties())
577             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
578
579         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
580         m_avPlayer = nil;
581     }
582
583     // Reset cached properties
584     m_pendingStatusChanges = 0;
585     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
586     m_cachedSeekableRanges = nullptr;
587     m_cachedLoadedRanges = nullptr;
588     m_cachedHasEnabledAudio = false;
589     m_cachedPresentationSize = FloatSize();
590     m_cachedDuration = MediaTime::zeroTime();
591
592     for (AVPlayerItemTrack *track in m_cachedTracks.get())
593         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
594     m_cachedTracks = nullptr;
595
596 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
597     if (m_provider) {
598         m_provider->setPlayerItem(nullptr);
599         m_provider->setAudioTrack(nullptr);
600     }
601 #endif
602
603     setIgnoreLoadStateChanges(false);
604 }
605
606 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
607 {
608     return m_haveBeenAskedToCreateLayer;
609 }
610
611 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
612 {
613 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
614     if (m_videoOutput)
615         return true;
616 #endif
617     return m_imageGenerator;
618 }
619
620 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
621 {
622 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
623     createVideoOutput();
624 #else
625     createImageGenerator();
626 #endif
627 }
628
629 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
630 {
631     using namespace PAL;
632     INFO_LOG(LOGIDENTIFIER);
633
634     if (!m_avAsset || m_imageGenerator)
635         return;
636
637     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
638
639     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
640     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
641     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
642     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
643 }
644
645 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
646 {
647 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
648     destroyVideoOutput();
649 #endif
650     destroyImageGenerator();
651 }
652
653 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
654 {
655     if (!m_imageGenerator)
656         return;
657
658     INFO_LOG(LOGIDENTIFIER);
659
660     m_imageGenerator = 0;
661 }
662
663 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
664 {
665     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
666         return;
667
668     callOnMainThread([this, weakThis = createWeakPtr()] {
669         if (!weakThis)
670             return;
671
672         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
673             return;
674         m_haveBeenAskedToCreateLayer = true;
675
676         if (!m_videoLayer)
677             createAVPlayerLayer();
678
679 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
680         if (!m_videoOutput)
681             createVideoOutput();
682 #endif
683
684         player()->client().mediaPlayerRenderingModeChanged(player());
685     });
686 }
687
688 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
689 {
690     if (!m_avPlayer)
691         return;
692
693     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
694     [m_videoLayer setPlayer:m_avPlayer.get()];
695
696 #ifndef NDEBUG
697     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
698 #endif
699     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
700     updateVideoLayerGravity();
701     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
702     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
703     INFO_LOG(LOGIDENTIFIER);
704
705     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
706
707 #if PLATFORM(IOS) && !ENABLE(EXTRA_ZOOM_MODE)
708     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
709         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
710 #endif
711 }
712
713 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
714 {
715     if (!m_videoLayer)
716         return;
717
718     INFO_LOG(LOGIDENTIFIER);
719
720     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
721     [m_videoLayer setPlayer:nil];
722     m_videoFullscreenLayerManager->didDestroyVideoLayer();
723
724     m_videoLayer = nil;
725 }
726
727 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
728 {
729     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
730     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
731
732     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
733     if (!date)
734         return MediaTime::invalidTime();
735
736     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
737
738     // Rounding due to second offset error when subtracting.
739     return MediaTime::createWithDouble(round(date - currentTime));
740 }
741
742 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
743 {
744     if (currentRenderingMode() == MediaRenderingToLayer)
745         return m_cachedIsReadyForDisplay;
746
747 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
748     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
749         return true;
750 #endif
751
752     return m_videoFrameHasDrawn;
753 }
754
755 #if ENABLE(AVF_CAPTIONS)
756 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
757 {
758     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
759     if (manualSelectionMode)
760         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
761
762     // FIXME: Match these to correct types:
763     if (kind == PlatformTextTrack::Caption)
764         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
765
766     if (kind == PlatformTextTrack::Subtitle)
767         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
768
769     if (kind == PlatformTextTrack::Description)
770         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
771
772     if (kind == PlatformTextTrack::Forced)
773         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
774
775     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
776 }
777     
778 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
779 {
780     trackModeChanged();
781 }
782     
783 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
784 {
785     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
786     
787     for (auto& textTrack : m_textTracks) {
788         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
789             continue;
790         
791         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
792         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
793         
794         for (auto& track : outOfBandTrackSources) {
795             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
796             
797             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
798                 continue;
799             
800             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
801             if (track->mode() == PlatformTextTrack::Hidden)
802                 mode = InbandTextTrackPrivate::Hidden;
803             else if (track->mode() == PlatformTextTrack::Disabled)
804                 mode = InbandTextTrackPrivate::Disabled;
805             else if (track->mode() == PlatformTextTrack::Showing)
806                 mode = InbandTextTrackPrivate::Showing;
807             
808             textTrack->setMode(mode);
809             break;
810         }
811     }
812 }
813 #endif
814
815
816 static NSURL *canonicalURL(const URL& url)
817 {
818     NSURL *cocoaURL = url;
819     if (url.isEmpty())
820         return cocoaURL;
821
822     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
823     if (!request)
824         return cocoaURL;
825
826     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
827     if (!canonicalRequest)
828         return cocoaURL;
829
830     return [canonicalRequest URL];
831 }
832
833 #if PLATFORM(IOS)
834 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
835 {
836     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
837     [properties setDictionary:@{
838         NSHTTPCookieName: cookie.name,
839         NSHTTPCookieValue: cookie.value,
840         NSHTTPCookieDomain: cookie.domain,
841         NSHTTPCookiePath: cookie.path,
842         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
843     }];
844     if (cookie.secure)
845         [properties setObject:@YES forKey:NSHTTPCookieSecure];
846     if (cookie.session)
847         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
848
849     return [NSHTTPCookie cookieWithProperties:properties.get()];
850 }
851 #endif
852
853 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
854 {
855     if (m_avAsset)
856         return;
857
858     INFO_LOG(LOGIDENTIFIER);
859
860     setDelayCallbacks(true);
861
862     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
863
864     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
865
866     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
867
868     String referrer = player()->referrer();
869     if (!referrer.isEmpty())
870         [headerFields.get() setObject:referrer forKey:@"Referer"];
871
872     String userAgent = player()->userAgent();
873     if (!userAgent.isEmpty())
874         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
875
876     if ([headerFields.get() count])
877         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
878
879     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
880         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
881
882     if (AVURLAssetUseClientURLLoadingExclusively)
883         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
884 #if PLATFORM(IOS)
885     else if (AVURLAssetRequiresCustomURLLoadingKey)
886         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
887     // FIXME: rdar://problem/20354688
888     String identifier = player()->sourceApplicationIdentifier();
889     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
890         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
891 #endif
892
893     auto type = player()->contentMIMEType();
894     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
895         auto codecs = player()->contentTypeCodecs();
896         if (!codecs.isEmpty()) {
897             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
898             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
899         } else
900             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
901     }
902
903 #if ENABLE(AVF_CAPTIONS)
904     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
905     if (!outOfBandTrackSources.isEmpty()) {
906         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
907         for (auto& trackSource : outOfBandTrackSources) {
908             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
909             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
910             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
911             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
912             [outOfBandTracks.get() addObject:@{
913                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
914                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
915                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
916                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
917                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
918                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
919             }];
920         }
921
922         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
923     }
924 #endif
925
926 #if PLATFORM(IOS)
927     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
928     if (!networkInterfaceName.isEmpty())
929         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
930 #endif
931
932 #if PLATFORM(IOS)
933     Vector<Cookie> cookies;
934     if (player()->getRawCookies(url, cookies)) {
935         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
936         for (auto& cookie : cookies)
937             [nsCookies addObject:toNSHTTPCookie(cookie)];
938
939         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
940     }
941 #endif
942
943     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
944     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
945     
946     if (usePersistentCache)
947         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
948
949     NSURL *cocoaURL = canonicalURL(url);
950     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
951
952 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
953     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
954     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
955
956 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
957     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
958         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
959         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
960         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
961         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
962         if (mediaResourceLoader)
963             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
964     }
965 #endif
966
967 #endif
968
969     m_haveCheckedPlayability = false;
970
971     setDelayCallbacks(false);
972 }
973
974 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
975 {
976     if (!m_avPlayer)
977         return;
978
979     if (pthread_main_np()) {
980         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
981         return;
982     }
983
984     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
985     RetainPtr<AVPlayerItemType> strongItem = item;
986     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
987         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
988     });
989 }
990
991 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
992 {
993     if (m_avPlayer)
994         return;
995
996     INFO_LOG(LOGIDENTIFIER);
997
998     setDelayCallbacks(true);
999
1000     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1001     for (NSString *keyName in playerKVOProperties())
1002         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1003
1004 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1005     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1006 #endif
1007
1008 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1009     updateDisableExternalPlayback();
1010     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1011 #endif
1012
1013 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1014     if (m_shouldPlayToPlaybackTarget) {
1015         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1016         m_shouldPlayToPlaybackTarget = false;
1017         setShouldPlayToPlaybackTarget(true);
1018     }
1019 #endif
1020
1021 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
1022     setShouldDisableSleep(player()->shouldDisableSleep());
1023 #endif
1024
1025     if (m_muted) {
1026         // Clear m_muted so setMuted doesn't return without doing anything.
1027         m_muted = false;
1028         [m_avPlayer.get() setMuted:m_muted];
1029     }
1030
1031     if (player()->client().mediaPlayerIsVideo())
1032         createAVPlayerLayer();
1033
1034     if (m_avPlayerItem)
1035         setAVPlayerItem(m_avPlayerItem.get());
1036
1037     setDelayCallbacks(false);
1038 }
1039
1040 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1041 {
1042     if (m_avPlayerItem)
1043         return;
1044
1045     INFO_LOG(LOGIDENTIFIER);
1046
1047     setDelayCallbacks(true);
1048
1049     // Create the player item so we can load media data. 
1050     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1051
1052     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1053
1054     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1055     for (NSString *keyName in itemKVOProperties())
1056         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1057
1058     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1059
1060     if (m_avPlayer)
1061         setAVPlayerItem(m_avPlayerItem.get());
1062
1063 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1064     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1065
1066     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1067     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1068     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1069
1070     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1071     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1072     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1073     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1074 #endif
1075
1076 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1077     if (m_provider) {
1078         m_provider->setPlayerItem(m_avPlayerItem.get());
1079         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1080     }
1081 #endif
1082
1083 #if ENABLE(EXTRA_ZOOM_MODE)
1084     createVideoOutput();
1085 #endif
1086
1087     setDelayCallbacks(false);
1088 }
1089
1090 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1091 {
1092     if (m_haveCheckedPlayability)
1093         return;
1094     m_haveCheckedPlayability = true;
1095
1096     INFO_LOG(LOGIDENTIFIER);
1097     auto weakThis = createWeakPtr();
1098
1099     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1100         callOnMainThread([weakThis] {
1101             if (weakThis)
1102                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1103         });
1104     }];
1105 }
1106
1107 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1108 {
1109     INFO_LOG(LOGIDENTIFIER);
1110
1111     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1112     dispatch_group_enter(metadataLoadingGroup.get());
1113     auto weakThis = createWeakPtr();
1114     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1115
1116         callOnMainThread([weakThis, metadataLoadingGroup] {
1117             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1118                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1119                     dispatch_group_enter(metadataLoadingGroup.get());
1120                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1121                         dispatch_group_leave(metadataLoadingGroup.get());
1122                     }];
1123                 }
1124             }
1125             dispatch_group_leave(metadataLoadingGroup.get());
1126         });
1127     }];
1128
1129     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1130         callOnMainThread([weakThis] {
1131             if (weakThis)
1132                 [weakThis->m_objcObserver.get() metadataLoaded];
1133         });
1134     });
1135 }
1136
1137 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1138 {
1139     if (!m_avPlayerItem)
1140         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1141
1142     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1143         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1144     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1145         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1146     if (m_cachedLikelyToKeepUp)
1147         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1148     if (m_cachedBufferFull)
1149         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1150     if (m_cachedBufferEmpty)
1151         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1152
1153     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1154 }
1155
1156 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1157 {
1158     INFO_LOG(LOGIDENTIFIER);
1159     PlatformMedia pm;
1160     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1161     pm.media.avfMediaPlayer = m_avPlayer.get();
1162     return pm;
1163 }
1164
1165 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1166 {
1167     return m_videoFullscreenLayerManager->videoInlineLayer();
1168 }
1169
1170 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1171 {
1172 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1173     updateLastImage(UpdateType::UpdateSynchronously);
1174     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1175 #endif
1176 }
1177
1178 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1179 {
1180 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1181     updateLastImage(UpdateType::UpdateSynchronously);
1182     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1183 #else
1184     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1185 #endif
1186     updateDisableExternalPlayback();
1187 }
1188
1189 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1190 {
1191     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1192 }
1193
1194 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1195 {
1196     m_videoFullscreenGravity = gravity;
1197
1198     if (!m_videoLayer)
1199         return;
1200
1201     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1202     if (gravity == MediaPlayer::VideoGravityResize)
1203         videoGravity = AVLayerVideoGravityResize;
1204     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1205         videoGravity = AVLayerVideoGravityResizeAspect;
1206     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1207         videoGravity = AVLayerVideoGravityResizeAspectFill;
1208     else
1209         ASSERT_NOT_REACHED();
1210     
1211     if ([m_videoLayer videoGravity] == videoGravity)
1212         return;
1213
1214     [m_videoLayer setVideoGravity:videoGravity];
1215     syncTextTrackBounds();
1216 }
1217
1218 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1219 {
1220 #if PLATFORM(IOS) && !ENABLE(EXTRA_ZOOM_MODE)
1221     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1222         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1223     updateDisableExternalPlayback();
1224 #else
1225     UNUSED_PARAM(mode);
1226 #endif
1227 }
1228
1229 #if PLATFORM(IOS)
1230 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1231 {
1232     if (m_currentMetaData)
1233         return m_currentMetaData.get();
1234     return nil;
1235 }
1236
1237 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1238 {
1239     if (!m_avPlayerItem)
1240         return emptyString();
1241     
1242     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1243     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1244
1245     return logString.get();
1246 }
1247
1248 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1249 {
1250     if (!m_avPlayerItem)
1251         return emptyString();
1252
1253     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1254     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1255
1256     return logString.get();
1257 }
1258 #endif
1259
1260 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1261 {
1262     [CATransaction begin];
1263     [CATransaction setDisableActions:YES];    
1264     if (m_videoLayer)
1265         [m_videoLayer.get() setHidden:!isVisible];
1266     [CATransaction commit];
1267 }
1268     
1269 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1270 {
1271     INFO_LOG(LOGIDENTIFIER);
1272     if (!metaDataAvailable())
1273         return;
1274
1275     setDelayCallbacks(true);
1276     m_cachedRate = requestedRate();
1277     [m_avPlayer.get() setRate:requestedRate()];
1278     setDelayCallbacks(false);
1279 }
1280
1281 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1282 {
1283     INFO_LOG(LOGIDENTIFIER);
1284     if (!metaDataAvailable())
1285         return;
1286
1287     setDelayCallbacks(true);
1288     m_cachedRate = 0;
1289     [m_avPlayer.get() setRate:0];
1290     setDelayCallbacks(false);
1291 }
1292
1293 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1294 {
1295     // Do not ask the asset for duration before it has been loaded or it will fetch the
1296     // answer synchronously.
1297     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1298         return MediaTime::invalidTime();
1299     
1300     CMTime cmDuration;
1301     
1302     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1303     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1304         cmDuration = [m_avPlayerItem.get() duration];
1305     else
1306         cmDuration = [m_avAsset.get() duration];
1307
1308     if (CMTIME_IS_NUMERIC(cmDuration))
1309         return PAL::toMediaTime(cmDuration);
1310
1311     if (CMTIME_IS_INDEFINITE(cmDuration))
1312         return MediaTime::positiveInfiniteTime();
1313
1314     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1315     return MediaTime::invalidTime();
1316 }
1317
1318 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1319 {
1320     if (!metaDataAvailable() || !m_avPlayerItem)
1321         return MediaTime::zeroTime();
1322
1323     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1324     if (CMTIME_IS_NUMERIC(itemTime))
1325         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1326
1327     return MediaTime::zeroTime();
1328 }
1329
1330 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1331 {
1332     // setCurrentTime generates several event callbacks, update afterwards.
1333     setDelayCallbacks(true);
1334
1335     if (m_metadataTrack)
1336         m_metadataTrack->flushPartialCues();
1337
1338     CMTime cmTime = PAL::toCMTime(time);
1339     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1340     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1341
1342     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1343     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1344         cmBefore = kCMTimeZero;
1345     
1346     auto weakThis = createWeakPtr();
1347
1348     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1349         callOnMainThread([weakThis, finished] {
1350             auto _this = weakThis.get();
1351             if (!_this)
1352                 return;
1353
1354             _this->seekCompleted(finished);
1355         });
1356     }];
1357
1358     setDelayCallbacks(false);
1359 }
1360
1361 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1362 {
1363 #if PLATFORM(IOS)
1364     UNUSED_PARAM(volume);
1365     return;
1366 #else
1367     if (!m_avPlayer)
1368         return;
1369
1370     [m_avPlayer.get() setVolume:volume];
1371 #endif
1372 }
1373
1374 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1375 {
1376     if (m_muted == muted)
1377         return;
1378
1379     INFO_LOG(LOGIDENTIFIER, "- ", muted);
1380
1381     m_muted = muted;
1382
1383     if (!m_avPlayer)
1384         return;
1385
1386     [m_avPlayer.get() setMuted:m_muted];
1387 }
1388
1389 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1390 {
1391     UNUSED_PARAM(closedCaptionsVisible);
1392
1393     if (!metaDataAvailable())
1394         return;
1395
1396     INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1397 }
1398
1399 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1400 {
1401     setDelayCallbacks(true);
1402     m_cachedRate = rate;
1403     [m_avPlayer.get() setRate:rate];
1404     setDelayCallbacks(false);
1405 }
1406
1407 double MediaPlayerPrivateAVFoundationObjC::rate() const
1408 {
1409     if (!metaDataAvailable())
1410         return 0;
1411
1412     return m_cachedRate;
1413 }
1414
1415 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1416 {
1417 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1418     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1419 #else
1420     return 0;
1421 #endif
1422 }
1423
1424 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1425 {
1426 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1427     return [m_avPlayerItem liveUpdateInterval];
1428 #else
1429     return 0;
1430 #endif
1431 }
1432
1433 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1434 {
1435     if (m_avPlayerItem)
1436         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1437 }
1438
1439 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1440 {
1441     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1442
1443     if (!m_avPlayerItem)
1444         return timeRanges;
1445
1446     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1447         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1448         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1449             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1450     }
1451     return timeRanges;
1452 }
1453
1454 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1455 {
1456     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1457         return MediaTime::zeroTime();
1458
1459     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1460     bool hasValidRange = false;
1461     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1462         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1463         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1464             continue;
1465
1466         hasValidRange = true;
1467         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1468         if (minTimeSeekable > startOfRange)
1469             minTimeSeekable = startOfRange;
1470     }
1471     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1472 }
1473
1474 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1475 {
1476     if (!m_cachedSeekableRanges)
1477         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1478
1479     MediaTime maxTimeSeekable;
1480     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1481         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1482         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1483             continue;
1484         
1485         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1486         if (maxTimeSeekable < endOfRange)
1487             maxTimeSeekable = endOfRange;
1488     }
1489     return maxTimeSeekable;
1490 }
1491
1492 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1493 {
1494     if (!m_cachedLoadedRanges)
1495         return MediaTime::zeroTime();
1496
1497     MediaTime maxTimeLoaded;
1498     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1499         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1500         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1501             continue;
1502         
1503         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1504         if (maxTimeLoaded < endOfRange)
1505             maxTimeLoaded = endOfRange;
1506     }
1507
1508     return maxTimeLoaded;   
1509 }
1510
1511 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1512 {
1513     if (!metaDataAvailable())
1514         return 0;
1515
1516     if (m_cachedTotalBytes)
1517         return m_cachedTotalBytes;
1518
1519     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1520         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1521
1522     return m_cachedTotalBytes;
1523 }
1524
1525 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1526 {
1527     m_avAsset = asset;
1528 }
1529
1530 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1531 {
1532     if (!m_avAsset)
1533         return MediaPlayerAVAssetStatusDoesNotExist;
1534
1535     for (NSString *keyName in assetMetadataKeyNames()) {
1536         NSError *error = nil;
1537         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1538
1539         if (error)
1540             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1541
1542         if (keyStatus < AVKeyValueStatusLoaded)
1543             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1544         
1545         if (keyStatus == AVKeyValueStatusFailed)
1546             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1547
1548         if (keyStatus == AVKeyValueStatusCancelled)
1549             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1550     }
1551
1552     if (!player()->shouldCheckHardwareSupport())
1553         m_tracksArePlayable = true;
1554
1555     if (!m_tracksArePlayable) {
1556         m_tracksArePlayable = true;
1557         for (AVAssetTrack *track in [m_avAsset tracks]) {
1558             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1559                 m_tracksArePlayable = false;
1560                 break;
1561             }
1562         }
1563     }
1564
1565     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1566         return MediaPlayerAVAssetStatusPlayable;
1567
1568     return MediaPlayerAVAssetStatusLoaded;
1569 }
1570
1571 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1572 {
1573     if (!m_avAsset)
1574         return 0;
1575
1576     NSError *error = nil;
1577     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1578     return [error code];
1579 }
1580
1581 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1582 {
1583     if (!metaDataAvailable() || context.paintingDisabled())
1584         return;
1585
1586     setDelayCallbacks(true);
1587     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1588
1589 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1590     if (videoOutputHasAvailableFrame())
1591         paintWithVideoOutput(context, rect);
1592     else
1593 #endif
1594         paintWithImageGenerator(context, rect);
1595
1596     END_BLOCK_OBJC_EXCEPTIONS;
1597     setDelayCallbacks(false);
1598
1599     m_videoFrameHasDrawn = true;
1600 }
1601
1602 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1603 {
1604     if (!metaDataAvailable() || context.paintingDisabled())
1605         return;
1606
1607     // We can ignore the request if we are already rendering to a layer.
1608     if (currentRenderingMode() == MediaRenderingToLayer)
1609         return;
1610
1611     // paint() is best effort, so only paint if we already have an image generator or video output available.
1612     if (!hasContextRenderer())
1613         return;
1614
1615     paintCurrentFrameInContext(context, rect);
1616 }
1617
1618 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1619 {
1620     INFO_LOG(LOGIDENTIFIER);
1621
1622     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1623     if (image) {
1624         GraphicsContextStateSaver stateSaver(context);
1625         context.translate(rect.x(), rect.y() + rect.height());
1626         context.scale(FloatSize(1.0f, -1.0f));
1627         context.setImageInterpolationQuality(InterpolationLow);
1628         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1629         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1630     }
1631 }
1632
1633 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1634 {
1635     if (!m_imageGenerator)
1636         createImageGenerator();
1637     ASSERT(m_imageGenerator);
1638
1639 #if !RELEASE_LOG_DISABLED
1640     MonotonicTime start = MonotonicTime::now();
1641 #endif
1642
1643     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1644     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1645     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1646
1647 #if !RELEASE_LOG_DISABLED
1648     DEBUG_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1649 #endif
1650
1651     return image;
1652 }
1653
1654 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1655 {
1656     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1657
1658
1659 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1660 static bool keySystemIsSupported(const String& keySystem)
1661 {
1662     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1663         return true;
1664     return false;
1665 }
1666 #endif
1667
1668 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1669 {
1670 #if ENABLE(MEDIA_SOURCE)
1671     if (parameters.isMediaSource)
1672         return MediaPlayer::IsNotSupported;
1673 #endif
1674 #if ENABLE(MEDIA_STREAM)
1675     if (parameters.isMediaStream)
1676         return MediaPlayer::IsNotSupported;
1677 #endif
1678
1679     auto containerType = parameters.type.containerType();
1680     if (isUnsupportedMIMEType(containerType))
1681         return MediaPlayer::IsNotSupported;
1682
1683     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().types().contains(containerType))
1684         return MediaPlayer::IsNotSupported;
1685
1686     // The spec says:
1687     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1688     if (parameters.type.codecs().isEmpty())
1689         return MediaPlayer::MayBeSupported;
1690
1691     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1692         return MediaPlayer::IsNotSupported;
1693
1694     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1695     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1696 }
1697
1698 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1699 {
1700 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1701     if (!keySystem.isEmpty()) {
1702         // "Clear Key" is only supported with HLS:
1703         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1704             return MediaPlayer::IsNotSupported;
1705
1706         if (!keySystemIsSupported(keySystem))
1707             return false;
1708
1709         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1710             return false;
1711
1712         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1713             return false;
1714
1715         return true;
1716     }
1717 #else
1718     UNUSED_PARAM(keySystem);
1719     UNUSED_PARAM(mimeType);
1720 #endif
1721     return false;
1722 }
1723
1724 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1725 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1726 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1727 {
1728     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1729         [infoRequest setContentLength:keyData->byteLength()];
1730         [infoRequest setByteRangeAccessSupported:YES];
1731     }
1732
1733     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1734         long long start = [dataRequest currentOffset];
1735         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1736
1737         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1738             [request finishLoadingWithError:nil];
1739             return;
1740         }
1741
1742         ASSERT(start <= std::numeric_limits<int>::max());
1743         ASSERT(end <= std::numeric_limits<int>::max());
1744         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1745         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1746         [dataRequest respondWithData:nsData.get()];
1747     }
1748
1749     [request finishLoading];
1750 }
1751 #endif
1752
1753 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1754 {
1755     String scheme = [[[avRequest request] URL] scheme];
1756     String keyURI = [[[avRequest request] URL] absoluteString];
1757
1758 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1759     if (scheme == "skd") {
1760 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1761         // Create an initData with the following layout:
1762         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1763         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1764         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1765         unsigned byteLength = initDataBuffer->byteLength();
1766         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1767         initDataView->set<uint32_t>(0, keyURISize, true);
1768
1769         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1770         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1771
1772         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1773         if (!player()->keyNeeded(initData.get()))
1774             return false;
1775 #endif
1776         m_keyURIToRequestMap.set(keyURI, avRequest);
1777 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1778         if (m_cdmInstance)
1779             return false;
1780
1781         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1782         auto keyURIBuffer = SharedBuffer::create(keyURIData.get());
1783         player()->initializationDataEncountered(ASCIILiteral("skd"), keyURIBuffer->tryCreateArrayBuffer());
1784 #endif
1785         return true;
1786     }
1787
1788 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1789     if (scheme == "clearkey") {
1790         String keyID = [[[avRequest request] URL] resourceSpecifier];
1791         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1792
1793         auto initData = Uint8Array::create(encodedKeyId.size());
1794         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1795
1796         auto keyData = player()->cachedKeyForKeyId(keyID);
1797         if (keyData) {
1798             fulfillRequestWithKeyData(avRequest, keyData.get());
1799             return false;
1800         }
1801
1802         if (!player()->keyNeeded(initData.get()))
1803             return false;
1804
1805         m_keyURIToRequestMap.set(keyID, avRequest);
1806         return true;
1807     }
1808 #endif
1809 #endif
1810
1811     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1812     m_resourceLoaderMap.add(avRequest, resourceLoader);
1813     resourceLoader->startLoading();
1814     return true;
1815 }
1816
1817 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1818 {
1819     String scheme = [[[avRequest request] URL] scheme];
1820
1821     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1822
1823     if (resourceLoader)
1824         resourceLoader->stopLoading();
1825 }
1826
1827 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1828 {
1829     m_resourceLoaderMap.remove(avRequest);
1830 }
1831 #endif
1832
1833 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1834 {
1835     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1836 }
1837
1838 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1839 {
1840     if (!metaDataAvailable())
1841         return timeValue;
1842
1843     // FIXME - impossible to implement until rdar://8721510 is fixed.
1844     return timeValue;
1845 }
1846
1847 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1848 {
1849 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1850     return 0;
1851 #else
1852     return 5;
1853 #endif
1854 }
1855
1856 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1857 {
1858     if (!m_videoLayer)
1859         return;
1860
1861     // Do not attempt to change the video gravity while in full screen mode.
1862     // See setVideoFullscreenGravity().
1863     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1864         return;
1865
1866     [CATransaction begin];
1867     [CATransaction setDisableActions:YES];    
1868     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1869     [m_videoLayer.get() setVideoGravity:gravity];
1870     [CATransaction commit];
1871 }
1872
1873 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1874 {
1875     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1876         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1877     }];
1878     if (index == NSNotFound)
1879         return nil;
1880     return [tracks objectAtIndex:index];
1881 }
1882
1883 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1884 {
1885     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1886     m_languageOfPrimaryAudioTrack = String();
1887
1888     if (!m_avAsset)
1889         return;
1890
1891     setDelayCharacteristicsChangedNotification(true);
1892
1893     bool haveCCTrack = false;
1894     bool hasCaptions = false;
1895
1896     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1897     // asked about those fairly fequently.
1898     if (!m_avPlayerItem) {
1899         // We don't have a player item yet, so check with the asset because some assets support inspection
1900         // prior to becoming ready to play.
1901         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1902         setHasVideo(firstEnabledVideoTrack);
1903         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1904 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1905         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1906 #endif
1907         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1908         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1909         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1910         if (size.width() < 0)
1911             size.setWidth(-size.width());
1912         if (size.height() < 0)
1913             size.setHeight(-size.height());
1914         presentationSizeDidChange(size);
1915     } else {
1916         bool hasVideo = false;
1917         bool hasAudio = false;
1918         bool hasMetaData = false;
1919         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1920             if ([track isEnabled]) {
1921                 AVAssetTrack *assetTrack = [track assetTrack];
1922                 NSString *mediaType = [assetTrack mediaType];
1923                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1924                     hasVideo = true;
1925                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1926                     hasAudio = true;
1927                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1928 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1929                     hasCaptions = true;
1930 #endif
1931                     haveCCTrack = true;
1932                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1933                     hasMetaData = true;
1934                 }
1935             }
1936         }
1937
1938 #if ENABLE(VIDEO_TRACK)
1939         updateAudioTracks();
1940         updateVideoTracks();
1941
1942 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1943         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1944         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1945 #endif
1946 #endif
1947
1948         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1949         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1950         // when it is not.
1951         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1952
1953         setHasAudio(hasAudio);
1954 #if ENABLE(DATACUE_VALUE)
1955         if (hasMetaData)
1956             processMetadataTrack();
1957 #endif
1958     }
1959
1960 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1961     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1962     if (legibleGroup && m_cachedTracks) {
1963         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1964         if (hasCaptions)
1965             processMediaSelectionOptions();
1966     }
1967 #endif
1968
1969 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1970     if (!hasCaptions && haveCCTrack)
1971         processLegacyClosedCaptionsTracks();
1972 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1973     if (haveCCTrack)
1974         processLegacyClosedCaptionsTracks();
1975 #endif
1976
1977     setHasClosedCaptions(hasCaptions);
1978
1979     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1980
1981     sizeChanged();
1982
1983     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1984         characteristicsChanged();
1985
1986 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1987     if (m_provider)
1988         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1989 #endif
1990
1991     setDelayCharacteristicsChangedNotification(false);
1992 }
1993
1994 #if ENABLE(VIDEO_TRACK)
1995
1996 template <typename RefT, typename PassRefT>
1997 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1998 {
1999     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2000         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2001     }]]]);
2002     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2003
2004     for (auto& oldItem : oldItems) {
2005         if (oldItem->playerItemTrack())
2006             [oldTracks addObject:oldItem->playerItemTrack()];
2007     }
2008
2009     // Find the added & removed AVPlayerItemTracks:
2010     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2011     [removedTracks minusSet:newTracks.get()];
2012
2013     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2014     [addedTracks minusSet:oldTracks.get()];
2015
2016     typedef Vector<RefT> ItemVector;
2017     ItemVector replacementItems;
2018     ItemVector addedItems;
2019     ItemVector removedItems;
2020     for (auto& oldItem : oldItems) {
2021         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2022             removedItems.append(oldItem);
2023         else
2024             replacementItems.append(oldItem);
2025     }
2026
2027     for (AVPlayerItemTrack* track in addedTracks.get())
2028         addedItems.append(itemFactory(track));
2029
2030     replacementItems.appendVector(addedItems);
2031     oldItems.swap(replacementItems);
2032
2033     for (auto& removedItem : removedItems)
2034         (player->*removedFunction)(*removedItem);
2035
2036     for (auto& addedItem : addedItems)
2037         (player->*addedFunction)(*addedItem);
2038 }
2039
2040 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2041
2042 template <typename RefT, typename PassRefT>
2043 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2044 {
2045     group->updateOptions(characteristics);
2046
2047     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2048     for (auto& option : group->options()) {
2049         if (!option)
2050             continue;
2051         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2052         if (!avOption)
2053             continue;
2054         newSelectionOptions.add(option);
2055     }
2056
2057     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2058     for (auto& oldItem : oldItems) {
2059         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2060             oldSelectionOptions.add(option);
2061     }
2062
2063     // Find the added & removed AVMediaSelectionOptions:
2064     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2065     for (auto& oldOption : oldSelectionOptions) {
2066         if (!newSelectionOptions.contains(oldOption))
2067             removedSelectionOptions.add(oldOption);
2068     }
2069
2070     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2071     for (auto& newOption : newSelectionOptions) {
2072         if (!oldSelectionOptions.contains(newOption))
2073             addedSelectionOptions.add(newOption);
2074     }
2075
2076     typedef Vector<RefT> ItemVector;
2077     ItemVector replacementItems;
2078     ItemVector addedItems;
2079     ItemVector removedItems;
2080     for (auto& oldItem : oldItems) {
2081         if (!oldItem->mediaSelectionOption())
2082             removedItems.append(oldItem);
2083         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2084             removedItems.append(oldItem);
2085         else
2086             replacementItems.append(oldItem);
2087     }
2088
2089     for (auto& option : addedSelectionOptions)
2090         addedItems.append(itemFactory(*option.get()));
2091
2092     replacementItems.appendVector(addedItems);
2093     oldItems.swap(replacementItems);
2094     
2095     for (auto& removedItem : removedItems)
2096         (player->*removedFunction)(*removedItem);
2097
2098     for (auto& addedItem : addedItems)
2099         (player->*addedFunction)(*addedItem);
2100 }
2101
2102 #endif
2103
2104 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2105 {
2106 #if !RELEASE_LOG_DISABLED
2107     size_t count = m_audioTracks.size();
2108 #endif
2109
2110 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2111     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2112     if (!m_audibleGroup) {
2113         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2114             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2115     }
2116
2117     if (m_audibleGroup)
2118         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2119     else
2120 #endif
2121         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2122
2123     for (auto& track : m_audioTracks)
2124         track->resetPropertiesFromTrack();
2125
2126 #if !RELEASE_LOG_DISABLED
2127     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2128 #endif
2129 }
2130
2131 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2132 {
2133 #if !RELEASE_LOG_DISABLED
2134     size_t count = m_videoTracks.size();
2135 #endif
2136
2137     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2138
2139 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2140     if (!m_visualGroup) {
2141         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2142             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2143     }
2144
2145     if (m_visualGroup)
2146         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2147 #endif
2148
2149     for (auto& track : m_audioTracks)
2150         track->resetPropertiesFromTrack();
2151
2152 #if !RELEASE_LOG_DISABLED
2153     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2154 #endif
2155 }
2156
2157 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2158 {
2159     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2160 }
2161
2162 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2163 {
2164     m_videoFullscreenLayerManager->syncTextTrackBounds();
2165 }
2166
2167 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2168 {
2169     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2170 }
2171
2172 #endif // ENABLE(VIDEO_TRACK)
2173
2174 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2175
2176 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2177 {
2178     if (!m_provider) {
2179         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2180         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2181     }
2182     return m_provider.get();
2183 }
2184
2185 #endif
2186
2187 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2188 {
2189     if (!m_avAsset)
2190         return;
2191
2192     setNaturalSize(m_cachedPresentationSize);
2193 }
2194
2195 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2196 {
2197     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2198 }
2199
2200 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2201 {
2202 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2203     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2204     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2205         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2206         return false;
2207
2208     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2209     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2210         return session.didPassCORSAccessChecks;
2211 #endif
2212     return false;
2213 }
2214
2215 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2216
2217 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2218 {
2219     INFO_LOG(LOGIDENTIFIER);
2220
2221     if (!m_avPlayerItem || m_videoOutput)
2222         return;
2223
2224 #if USE(VIDEOTOOLBOX)
2225     NSDictionary* attributes = nil;
2226 #else
2227     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2228 #endif
2229     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2230     ASSERT(m_videoOutput);
2231
2232     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2233
2234     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2235 }
2236
2237 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2238 {
2239     if (!m_videoOutput)
2240         return;
2241
2242     if (m_avPlayerItem)
2243         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2244
2245     INFO_LOG(LOGIDENTIFIER);
2246
2247     m_videoOutput = 0;
2248 }
2249
2250 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2251 {
2252     if (!m_avPlayerItem)
2253         return false;
2254
2255     if (!m_videoOutput)
2256         createVideoOutput();
2257     ASSERT(m_videoOutput);
2258
2259     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2260
2261     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2262         return false;
2263
2264     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2265     m_lastImage = nullptr;
2266     return true;
2267 }
2268
2269 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2270 {
2271     if (!m_avPlayerItem)
2272         return false;
2273
2274     if (m_lastImage)
2275         return true;
2276
2277     if (!m_videoOutput)
2278         createVideoOutput();
2279
2280     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2281 }
2282
2283 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2284 {
2285 #if HAVE(CORE_VIDEO)
2286     if (!m_avPlayerItem)
2287         return;
2288
2289     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2290         waitForVideoOutputMediaDataWillChange();
2291
2292     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2293     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2294     // should be displayed.
2295     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2296         return;
2297
2298     if (!m_pixelBufferConformer) {
2299 #if USE(VIDEOTOOLBOX)
2300         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2301 #else
2302         NSDictionary *attributes = nil;
2303 #endif
2304         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2305     }
2306
2307 #if !RELEASE_LOG_DISABLED
2308     MonotonicTime start = MonotonicTime::now();
2309 #endif
2310
2311     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2312
2313 #if !RELEASE_LOG_DISABLED
2314     DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2315 #endif
2316 #endif // HAVE(CORE_VIDEO)
2317 }
2318
2319 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2320 {
2321     updateLastImage(UpdateType::UpdateSynchronously);
2322     if (!m_lastImage)
2323         return;
2324
2325     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2326     if (!firstEnabledVideoTrack)
2327         return;
2328
2329     INFO_LOG(LOGIDENTIFIER);
2330
2331     GraphicsContextStateSaver stateSaver(context);
2332     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2333     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2334     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2335
2336     context.concatCTM(videoTransform);
2337     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2338
2339     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2340     // video frame, destroy it now that it is no longer needed.
2341     if (m_imageGenerator)
2342         destroyImageGenerator();
2343
2344 }
2345
2346 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2347 {
2348     ASSERT(context);
2349
2350     updateLastPixelBuffer();
2351     if (!m_lastPixelBuffer)
2352         return false;
2353
2354     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2355     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2356
2357     if (!m_videoTextureCopier)
2358         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2359
2360     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2361 }
2362
2363 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2364 {
2365     updateLastImage();
2366     return m_lastImage;
2367 }
2368
2369 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2370 {
2371     if (!m_videoOutputSemaphore)
2372         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2373
2374     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2375
2376     // Wait for 1 second.
2377     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2378
2379     if (result)
2380         ERROR_LOG(LOGIDENTIFIER, "timed out");
2381 }
2382
2383 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2384 {
2385     dispatch_semaphore_signal(m_videoOutputSemaphore);
2386 }
2387
2388 #endif
2389
2390 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2391
2392 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2393 {
2394     return m_keyURIToRequestMap.take(keyURI);
2395 }
2396
2397 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2398 {
2399     Vector<String> fulfilledKeyIds;
2400
2401     for (auto& pair : m_keyURIToRequestMap) {
2402         const String& keyId = pair.key;
2403         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2404
2405         auto keyData = player()->cachedKeyForKeyId(keyId);
2406         if (!keyData)
2407             continue;
2408
2409         fulfillRequestWithKeyData(request.get(), keyData.get());
2410         fulfilledKeyIds.append(keyId);
2411     }
2412
2413     for (auto& keyId : fulfilledKeyIds)
2414         m_keyURIToRequestMap.remove(keyId);
2415 }
2416
2417 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2418 {
2419     ASSERT_UNUSED(session, &session == m_session);
2420     m_session = nullptr;
2421 }
2422
2423 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2424 {
2425     if (!keySystemIsSupported(keySystem))
2426         return nullptr;
2427     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2428     m_session = session->createWeakPtr();
2429     return WTFMove(session);
2430 }
2431 #endif
2432
2433 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2434 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2435 {
2436 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2437     if (m_session && newValue)
2438         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2439 #endif
2440
2441 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2442     if (m_cdmInstance)
2443         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2444 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2445     UNUSED_PARAM(newValue);
2446 #endif
2447 }
2448 #endif
2449
2450 #if ENABLE(ENCRYPTED_MEDIA)
2451 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2452 {
2453 #if HAVE(AVCONTENTKEYSESSION)
2454     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2455         return;
2456
2457     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2458     if (&fpsInstance == m_cdmInstance)
2459         return;
2460
2461     if (m_cdmInstance)
2462         cdmInstanceDetached(*m_cdmInstance);
2463
2464     m_cdmInstance = &fpsInstance;
2465     [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2466 #else
2467     UNUSED_PARAM(instance);
2468 #endif
2469 }
2470
2471 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2472 {
2473 #if HAVE(AVCONTENTKEYSESSION)
2474     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2475     [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_avAsset.get()];
2476     m_cdmInstance = nullptr;
2477 #else
2478     UNUSED_PARAM(instance);
2479 #endif
2480 }
2481
2482 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2483 {
2484     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2485     for (auto& request : keyURIToRequestMap.values())
2486         [request finishLoading];
2487 }
2488 #endif
2489
2490 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2491
2492 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2493 {
2494 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2495     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2496 #endif
2497
2498     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2499     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2500
2501         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2502         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2503             continue;
2504
2505         bool newCCTrack = true;
2506         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2507             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2508                 continue;
2509
2510             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2511             if (track->avPlayerItemTrack() == playerItemTrack) {
2512                 removedTextTracks.remove(i - 1);
2513                 newCCTrack = false;
2514                 break;
2515             }
2516         }
2517
2518         if (!newCCTrack)
2519             continue;
2520         
2521         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2522     }
2523
2524     processNewAndRemovedTextTracks(removedTextTracks);
2525 }
2526
2527 #endif
2528
2529 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2530 {
2531     if (!m_avAsset)
2532         return nil;
2533
2534     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2535         return nil;
2536
2537     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2538 }
2539
2540 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2541
2542 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2543 {
2544     if (!m_avAsset)
2545         return false;
2546
2547     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2548         return false;
2549
2550     return true;
2551 }
2552
2553 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2554 {
2555     if (!hasLoadedMediaSelectionGroups())
2556         return nil;
2557
2558     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2559 }
2560
2561 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2562 {
2563     if (!hasLoadedMediaSelectionGroups())
2564         return nil;
2565
2566     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2567 }
2568
2569 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2570 {
2571     if (!hasLoadedMediaSelectionGroups())
2572         return nil;
2573
2574     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2575 }
2576
2577 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2578 {
2579     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2580     if (!legibleGroup) {
2581         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2582         return;
2583     }
2584
2585     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2586     // but set the selected legible track to nil so text tracks will not be automatically configured.
2587     if (!m_textTracks.size())
2588         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2589
2590     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2591     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2592     for (AVMediaSelectionOptionType *option in legibleOptions) {
2593         bool newTrack = true;
2594         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2595             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2596                 continue;
2597             
2598             RetainPtr<AVMediaSelectionOptionType> currentOption;
2599 #if ENABLE(AVF_CAPTIONS)
2600             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2601                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2602                 currentOption = track->mediaSelectionOption();
2603             } else
2604 #endif
2605             {
2606                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2607                 currentOption = track->mediaSelectionOption();
2608             }
2609             
2610             if ([currentOption.get() isEqual:option]) {
2611                 removedTextTracks.remove(i - 1);
2612                 newTrack = false;
2613                 break;
2614             }
2615         }
2616         if (!newTrack)
2617             continue;
2618
2619 #if ENABLE(AVF_CAPTIONS)
2620         if ([option outOfBandSource]) {
2621             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2622             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2623             continue;
2624         }
2625 #endif
2626
2627         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2628     }
2629
2630     processNewAndRemovedTextTracks(removedTextTracks);
2631 }
2632
2633 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2634 {
2635     if (m_metadataTrack)
2636         return;
2637
2638     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2639     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2640     player()->addTextTrack(*m_metadataTrack);
2641 }
2642
2643 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2644 {
2645     ASSERT(time >= MediaTime::zeroTime());
2646
2647     if (!m_currentTextTrack)
2648         return;
2649
2650     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2651 }
2652
2653 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2654 {
2655     INFO_LOG(LOGIDENTIFIER);
2656
2657     if (!m_currentTextTrack)
2658         return;
2659     
2660     m_currentTextTrack->resetCueValues();
2661 }
2662
2663 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2664
2665 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2666 {
2667     if (m_currentTextTrack == track)
2668         return;
2669
2670     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2671
2672     m_currentTextTrack = track;
2673
2674     if (track) {
2675         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2676 #pragma clang diagnostic push
2677 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2678             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2679 #pragma clang diagnostic pop
2680 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2681 #if ENABLE(AVF_CAPTIONS)
2682         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2683             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2684 #endif
2685         else
2686             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2687 #endif
2688     } else {
2689 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2690         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2691 #endif
2692 #pragma clang diagnostic push
2693 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2694         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2695 #pragma clang diagnostic pop
2696     }
2697
2698 }
2699
2700 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2701 {
2702     if (!m_languageOfPrimaryAudioTrack.isNull())
2703         return m_languageOfPrimaryAudioTrack;
2704
2705     if (!m_avPlayerItem.get())
2706         return emptyString();
2707
2708 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2709     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2710     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2711 #pragma clang diagnostic push
2712 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2713     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2714 #pragma clang diagnostic pop
2715     if (currentlySelectedAudibleOption) {
2716         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2717         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2718
2719         return m_languageOfPrimaryAudioTrack;
2720     }
2721 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2722
2723     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2724     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2725     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2726     if (!tracks || [tracks count] != 1) {
2727         m_languageOfPrimaryAudioTrack = emptyString();
2728         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2729         return m_languageOfPrimaryAudioTrack;
2730     }
2731
2732     AVAssetTrack *track = [tracks objectAtIndex:0];
2733     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2734
2735     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2736
2737     return m_languageOfPrimaryAudioTrack;
2738 }
2739
2740 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2741 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2742 {
2743     bool wirelessTarget = false;
2744
2745 #if !PLATFORM(IOS)
2746     if (m_playbackTarget) {
2747         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2748             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2749         else
2750             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2751     }
2752 #else
2753     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2754 #endif
2755
2756     INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2757
2758     return wirelessTarget;
2759 }
2760
2761 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2762 {
2763     if (!m_avPlayer)
2764         return MediaPlayer::TargetTypeNone;
2765
2766 #if PLATFORM(IOS)
2767     if (!AVFoundationLibrary())
2768         return MediaPlayer::TargetTypeNone;
2769
2770     switch ([m_avPlayer externalPlaybackType]) {
2771     case AVPlayerExternalPlaybackTypeNone:
2772         return MediaPlayer::TargetTypeNone;
2773     case AVPlayerExternalPlaybackTypeAirPlay:
2774         return MediaPlayer::TargetTypeAirPlay;
2775     case AVPlayerExternalPlaybackTypeTVOut:
2776         return MediaPlayer::TargetTypeTVOut;
2777     }
2778
2779     ASSERT_NOT_REACHED();
2780     return MediaPlayer::TargetTypeNone;
2781
2782 #else
2783     return MediaPlayer::TargetTypeAirPlay;
2784 #endif
2785 }
2786     
2787 #if PLATFORM(IOS)
2788 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2789 {
2790 #if HAVE(CELESTIAL)
2791     if (!AVFoundationLibrary())
2792         return nil;
2793
2794 #if __IPHONE_OS_VERSION_MAX_ALLOWED >= 110000
2795     if ([getAVOutputContextClass() respondsToSelector:@selector(sharedAudioPresentationOutputContext)]) {
2796         AVOutputContext *outputContext = [getAVOutputContextClass() sharedAudioPresentationOutputContext];
2797
2798         if (![outputContext respondsToSelector:@selector(supportsMultipleOutputDevices)]
2799             || ![outputContext supportsMultipleOutputDevices]
2800             || ![outputContext respondsToSelector:@selector(outputDevices)])
2801             return [outputContext deviceName];
2802
2803         auto outputDeviceNames = adoptNS([[NSMutableArray alloc] init]);
2804         for (AVOutputDevice *outputDevice in [outputContext outputDevices])
2805             [outputDeviceNames addObject:[[outputDevice name] copy]];
2806
2807         return [outputDeviceNames componentsJoinedByString:@" + "];
2808     }
2809 #endif
2810
2811     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2812         return nil;
2813
2814     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2815     if (!pickableRoutes.count)
2816         return nil;
2817
2818     NSString *displayName = nil;
2819     for (NSDictionary *pickableRoute in pickableRoutes) {
2820         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2821             continue;
2822
2823         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2824
2825         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2826         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2827             break;
2828
2829         // The route is a speaker or HDMI out, override the name to be the localized device model.
2830         NSString *localizedDeviceModel = [[UIDevice currentDevice] localizedModel];
2831
2832         // In cases where a route with that name already exists, prefix the name with the model.
2833         BOOL includeLocalizedDeviceModelName = NO;
2834         for (NSDictionary *otherRoute in pickableRoutes) {
2835             if (otherRoute == pickableRoute)
2836                 continue;
2837
2838             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2839                 includeLocalizedDeviceModelName = YES;
2840                 break;
2841             }
2842         }
2843
2844         if (includeLocalizedDeviceModelName)
2845             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2846         else
2847             displayName = localizedDeviceModel;
2848
2849         break;
2850     }
2851
2852     return displayName;
2853 #else
2854     UNUSED_PARAM(player);
2855     return nil;
2856 #endif
2857 }
2858 #endif
2859
2860 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2861 {
2862     if (!m_avPlayer)
2863         return emptyString();
2864
2865     String wirelessTargetName;
2866 #if !PLATFORM(IOS)
2867     if (m_playbackTarget)
2868         wirelessTargetName = m_playbackTarget->deviceName();
2869 #else
2870     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2871 #endif
2872
2873     return wirelessTargetName;
2874 }
2875
2876 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2877 {
2878     if (!m_avPlayer)
2879         return !m_allowsWirelessVideoPlayback;
2880
2881     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2882     INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2883
2884     return !m_allowsWirelessVideoPlayback;
2885 }
2886
2887 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2888 {
2889     INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2890     m_allowsWirelessVideoPlayback = !disabled;
2891     if (!m_avPlayer)
2892         return;
2893
2894     setDelayCallbacks(true);
2895     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2896     setDelayCallbacks(false);
2897 }
2898
2899 #if !PLATFORM(IOS)
2900
2901 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2902 {
2903     m_playbackTarget = WTFMove(target);
2904
2905     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2906
2907     INFO_LOG(LOGIDENTIFIER);
2908
2909     if (!m_playbackTarget->hasActiveRoute())
2910         setShouldPlayToPlaybackTarget(false);
2911 }
2912
2913 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2914 {
2915     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2916         return;
2917
2918     m_shouldPlayToPlaybackTarget = shouldPlay;
2919
2920     if (!m_playbackTarget)
2921         return;
2922
2923     INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2924
2925     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2926         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2927
2928         if (!m_avPlayer)
2929             return;
2930
2931         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2932         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2933             return;
2934
2935         setDelayCallbacks(true);
2936         m_avPlayer.get().outputContext = newContext;
2937         setDelayCallbacks(false);
2938
2939         return;
2940     }
2941
2942     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2943
2944     setDelayCallbacks(true);
2945     auto weakThis = createWeakPtr();
2946     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2947         if (!weakThis)
2948             return;
2949         weakThis->playbackTargetIsWirelessDidChange();
2950     }));
2951     setDelayCallbacks(false);
2952 }
2953
2954 #endif // !PLATFORM(IOS)
2955
2956 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2957 {
2958 #if PLATFORM(IOS)
2959     if (!m_avPlayer)
2960         return;
2961
2962     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2963 #endif
2964 }
2965
2966 #endif
2967
2968 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2969 {
2970     m_cachedItemStatus = status;
2971
2972     updateStates();
2973 }
2974
2975 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2976 {
2977     m_pendingStatusChanges++;
2978 }
2979
2980 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2981 {
2982     m_cachedLikelyToKeepUp = likelyToKeepUp;
2983
2984     ASSERT(m_pendingStatusChanges);
2985     if (!--m_pendingStatusChanges)
2986         updateStates();
2987 }
2988
2989 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2990 {
2991     m_pendingStatusChanges++;
2992 }
2993
2994 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2995 {
2996     m_cachedBufferEmpty = bufferEmpty;
2997
2998     ASSERT(m_pendingStatusChanges);
2999     if (!--m_pendingStatusChanges)
3000         updateStates();
3001 }
3002
3003 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3004 {
3005     m_pendingStatusChanges++;
3006 }
3007
3008 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3009 {
3010     m_cachedBufferFull = bufferFull;
3011
3012     ASSERT(m_pendingStatusChanges);
3013     if (!--m_pendingStatusChanges)
3014         updateStates();
3015 }
3016
3017 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3018 {
3019     m_cachedSeekableRanges = seekableRanges;
3020
3021     seekableTimeRangesChanged();
3022     updateStates();
3023 }
3024
3025 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3026 {
3027     m_cachedLoadedRanges = loadedRanges;
3028
3029     loadedTimeRangesChanged();
3030     updateStates();
3031 }
3032
3033 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3034 {
3035     m_cachedIsReadyForDisplay = isReady;
3036     if (!hasVideo() && isReady)
3037         tracksChanged();
3038     updateStates();
3039 }
3040
3041 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3042 {
3043     tracksChanged();
3044     updateStates();
3045 }
3046
3047 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3048 {
3049     INFO_LOG(LOGIDENTIFIER, "- ", shouldBuffer);
3050
3051     if (m_shouldBufferData == shouldBuffer)
3052         return;
3053
3054     m_shouldBufferData = shouldBuffer;
3055     
3056     if (!m_avPlayer)
3057         return;
3058
3059     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3060 }
3061
3062 #if ENABLE(DATACUE_VALUE)
3063
3064 static const AtomicString& metadataType(NSString *avMetadataKeySpace)