[WTF] Move currentCPUTime and sleep(Seconds) to CPUTime.h and Seconds.h respectively
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "URL.h"
63 #import "VideoTextureCopierCV.h"
64 #import "VideoTrackPrivateAVFObjC.h"
65 #import "WebCoreAVFResourceLoader.h"
66 #import "WebCoreCALayerExtras.h"
67 #import "WebCoreNSURLSession.h"
68 #import <JavaScriptCore/DataView.h>
69 #import <JavaScriptCore/JSCInlines.h>
70 #import <JavaScriptCore/TypedArrayInlines.h>
71 #import <JavaScriptCore/Uint16Array.h>
72 #import <JavaScriptCore/Uint32Array.h>
73 #import <JavaScriptCore/Uint8Array.h>
74 #import <functional>
75 #import <map>
76 #import <objc/runtime.h>
77 #import <pal/avfoundation/MediaTimeAVFoundation.h>
78 #import <pal/spi/cocoa/QuartzCoreSPI.h>
79 #import <pal/spi/mac/AVFoundationSPI.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
102 #import "VideoFullscreenLayerManager.h"
103 #endif
104
105 #if PLATFORM(IOS)
106 #import "WAKAppKitStubs.h"
107 #import <CoreImage/CoreImage.h>
108 #import <UIKit/UIDevice.h>
109 #import <mach/mach_port.h>
110 #else
111 #import <Foundation/NSGeometry.h>
112 #import <QuartzCore/CoreImage.h>
113 #endif
114
115 #if USE(VIDEOTOOLBOX)
116 #import <CoreVideo/CoreVideo.h>
117 #import <VideoToolbox/VideoToolbox.h>
118 #endif
119
120 #import "CoreVideoSoftLink.h"
121 #import "MediaRemoteSoftLink.h"
122
123 namespace std {
124 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
125     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
126 };
127 }
128
129 #if ENABLE(AVF_CAPTIONS)
130 // Note: This must be defined before our SOFT_LINK macros:
131 @class AVMediaSelectionOption;
132 @interface AVMediaSelectionOption (OutOfBandExtensions)
133 @property (nonatomic, readonly) NSString* outOfBandSource;
134 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
135 @end
136 #endif
137
138 @interface AVURLAsset (WebKitExtensions)
139 @property (nonatomic, readonly) NSURL *resolvedURL;
140 @end
141
142 typedef AVPlayer AVPlayerType;
143 typedef AVPlayerItem AVPlayerItemType;
144 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
145 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
146 typedef AVMetadataItem AVMetadataItemType;
147 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
148 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
149 typedef AVAssetCache AVAssetCacheType;
150
151 #pragma mark - Soft Linking
152
153 // Soft-linking headers must be included last since they #define functions, constants, etc.
154 #import <pal/cf/CoreMediaSoftLink.h>
155
156 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
157
158 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
159
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
164 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
165 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
166 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
167 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
168
169 SOFT_LINK_CLASS(CoreImage, CIContext)
170 SOFT_LINK_CLASS(CoreImage, CIImage)
171
172 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
173 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
174 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
187
188 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
189 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
190 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
191
192 #define AVPlayer initAVPlayer()
193 #define AVPlayerItem initAVPlayerItem()
194 #define AVPlayerLayer initAVPlayerLayer()
195 #define AVURLAsset initAVURLAsset()
196 #define AVAssetImageGenerator initAVAssetImageGenerator()
197 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
198 #define AVMetadataItem initAVMetadataItem()
199 #define AVAssetCache initAVAssetCache()
200
201 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
202 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
203 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
204 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
205 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
206 #define AVMediaTypeVideo getAVMediaTypeVideo()
207 #define AVMediaTypeAudio getAVMediaTypeAudio()
208 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
209 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
210 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
211 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
212 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
213 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
214 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
215 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
216 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
217 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
218 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
219
220 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
221 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
222 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
223
224 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
225 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
226 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
227
228 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
229 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
230 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
231 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
232
233 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
234 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
235 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
236 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
237 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
238 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
239 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
240 #endif
241
242 #if ENABLE(AVF_CAPTIONS)
243 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
244 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
245 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
256
257 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
258 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
259 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
260 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
261 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
262 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
263 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
264 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
265 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
266 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
267 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
268 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
269 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
270 #endif
271
272 #if ENABLE(DATACUE_VALUE)
273 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
274 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
275 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
277 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
278
279 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
280 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
281 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
282 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
283 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
284 #endif
285
286 #if PLATFORM(IOS)
287 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
288 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
289 #endif
290
291 SOFT_LINK_FRAMEWORK(MediaToolbox)
292 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
293
294 #if PLATFORM(IOS)
295 #if HAVE(CELESTIAL)
296 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
297 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
298 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
299 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
300 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
301 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
302 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
303 #endif // HAVE(CELESTIAL)
304
305 SOFT_LINK_FRAMEWORK(UIKit)
306 SOFT_LINK_CLASS(UIKit, UIDevice)
307 #define UIDevice getUIDeviceClass()
308 #endif // PLATFORM(IOS)
309
310 using namespace WebCore;
311
312 enum MediaPlayerAVFoundationObservationContext {
313     MediaPlayerAVFoundationObservationContextPlayerItem,
314     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
315     MediaPlayerAVFoundationObservationContextPlayer,
316     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
317 };
318
319 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
320 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
321 #else
322 @interface WebCoreAVFMovieObserver : NSObject
323 #endif
324 {
325     MediaPlayerPrivateAVFoundationObjC* m_callback;
326     int m_delayCallbacks;
327 }
328 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
329 -(void)disconnect;
330 -(void)metadataLoaded;
331 -(void)didEnd:(NSNotification *)notification;
332 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
333 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
334 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
335 - (void)outputSequenceWasFlushed:(id)output;
336 #endif
337 @end
338
339 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
340 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
341     MediaPlayerPrivateAVFoundationObjC* m_callback;
342 }
343 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
344 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
345 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
346 @end
347 #endif
348
349 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
350 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
351     MediaPlayerPrivateAVFoundationObjC *m_callback;
352     dispatch_semaphore_t m_semaphore;
353 }
354 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
355 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
356 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
357 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
358 @end
359 #endif
360
361 namespace WebCore {
362 using namespace PAL;
363
364 static NSArray *assetMetadataKeyNames();
365 static NSArray *itemKVOProperties();
366 static NSArray *assetTrackMetadataKeyNames();
367 static NSArray *playerKVOProperties();
368 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
369
370 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
371 static dispatch_queue_t globalLoaderDelegateQueue()
372 {
373     static dispatch_queue_t globalQueue;
374     static dispatch_once_t onceToken;
375     dispatch_once(&onceToken, ^{
376         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
377     });
378     return globalQueue;
379 }
380 #endif
381
382 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
383 static dispatch_queue_t globalPullDelegateQueue()
384 {
385     static dispatch_queue_t globalQueue;
386     static dispatch_once_t onceToken;
387     dispatch_once(&onceToken, ^{
388         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
389     });
390     return globalQueue;
391 }
392 #endif
393
394 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
395 {
396     if (!isAvailable())
397         return;
398
399     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
400             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
401     AVFoundationMIMETypeCache::singleton().loadTypes();
402 }
403
404 static AVAssetCacheType *assetCacheForPath(const String& path)
405 {
406     NSURL *assetCacheURL;
407     
408     if (path.isEmpty())
409         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
410     else
411         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
412
413     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
414 }
415
416 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
417 {
418     HashSet<RefPtr<SecurityOrigin>> origins;
419     for (NSString *key in [assetCacheForPath(path) allKeys]) {
420         URL keyAsURL = URL(URL(), key);
421         if (keyAsURL.isValid())
422             origins.add(SecurityOrigin::create(keyAsURL));
423     }
424     return origins;
425 }
426
427 static WallTime toSystemClockTime(NSDate *date)
428 {
429     ASSERT(date);
430     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
431 }
432
433 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
434 {
435     AVAssetCacheType* assetCache = assetCacheForPath(path);
436     
437     for (NSString *key in [assetCache allKeys]) {
438         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
439             [assetCache removeEntryForKey:key];
440     }
441
442     NSFileManager *fileManager = [NSFileManager defaultManager];
443     NSURL *baseURL = [assetCache URL];
444
445     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
446         [fileManager removeItemAtURL:baseURL error:nil];
447         return;
448     }
449     
450     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
451     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
452         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
453         errorHandler:nil];
454     
455     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
456     for (NSURL *fileURL : enumerator) {
457         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
458     
459         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
460             continue;
461         
462         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
463             continue;
464         
465         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
466             continue;
467         
468         [urlsToDelete addObject:fileURL];
469     }
470     
471     for (NSURL *fileURL in urlsToDelete.get())
472         [fileManager removeItemAtURL:fileURL error:nil];
473 }
474
475 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
476 {
477     AVAssetCacheType* assetCache = assetCacheForPath(path);
478     for (NSString *key in [assetCache allKeys]) {
479         URL keyAsURL = URL(URL(), key);
480         if (keyAsURL.isValid()) {
481             if (origins.contains(SecurityOrigin::create(keyAsURL)))
482                 [assetCache removeEntryForKey:key];
483         }
484     }
485 }
486
487 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
488     : MediaPlayerPrivateAVFoundation(player)
489 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
490     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManager>())
491     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
492 #endif
493     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
494     , m_videoFrameHasDrawn(false)
495     , m_haveCheckedPlayability(false)
496 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
497     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
498     , m_videoOutputSemaphore(nullptr)
499 #endif
500 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
501     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
502 #endif
503     , m_currentTextTrack(0)
504     , m_cachedRate(0)
505     , m_cachedTotalBytes(0)
506     , m_pendingStatusChanges(0)
507     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
508     , m_cachedLikelyToKeepUp(false)
509     , m_cachedBufferEmpty(false)
510     , m_cachedBufferFull(false)
511     , m_cachedHasEnabledAudio(false)
512     , m_shouldBufferData(true)
513     , m_cachedIsReadyForDisplay(false)
514     , m_haveBeenAskedToCreateLayer(false)
515 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
516     , m_allowsWirelessVideoPlayback(true)
517 #endif
518 {
519 }
520
521 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
522 {
523 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
524     [m_loaderDelegate.get() setCallback:0];
525     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
526
527     for (auto& pair : m_resourceLoaderMap)
528         pair.value->invalidate();
529 #endif
530 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
531     [m_videoOutputDelegate setCallback:0];
532     [m_videoOutput setDelegate:nil queue:0];
533     if (m_videoOutputSemaphore)
534         dispatch_release(m_videoOutputSemaphore);
535 #endif
536
537     if (m_videoLayer)
538         destroyVideoLayer();
539
540     cancelLoad();
541 }
542
543 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
544 {
545     INFO_LOG(LOGIDENTIFIER);
546     tearDownVideoRendering();
547
548     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
549     [m_objcObserver.get() disconnect];
550
551     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
552     setIgnoreLoadStateChanges(true);
553     if (m_avAsset) {
554         [m_avAsset.get() cancelLoading];
555         m_avAsset = nil;
556     }
557
558     clearTextTracks();
559
560 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
561     if (m_legibleOutput) {
562         if (m_avPlayerItem)
563             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
564         m_legibleOutput = nil;
565     }
566 #endif
567
568     if (m_avPlayerItem) {
569         for (NSString *keyName in itemKVOProperties())
570             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
571         
572         m_avPlayerItem = nil;
573     }
574     if (m_avPlayer) {
575         if (m_timeObserver)
576             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
577         m_timeObserver = nil;
578
579         for (NSString *keyName in playerKVOProperties())
580             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
581
582         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
583         m_avPlayer = nil;
584     }
585
586     // Reset cached properties
587     m_pendingStatusChanges = 0;
588     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
589     m_cachedSeekableRanges = nullptr;
590     m_cachedLoadedRanges = nullptr;
591     m_cachedHasEnabledAudio = false;
592     m_cachedPresentationSize = FloatSize();
593     m_cachedDuration = MediaTime::zeroTime();
594
595     for (AVPlayerItemTrack *track in m_cachedTracks.get())
596         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
597     m_cachedTracks = nullptr;
598
599 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
600     if (m_provider) {
601         m_provider->setPlayerItem(nullptr);
602         m_provider->setAudioTrack(nullptr);
603     }
604 #endif
605
606     setIgnoreLoadStateChanges(false);
607 }
608
609 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
610 {
611     return m_haveBeenAskedToCreateLayer;
612 }
613
614 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
615 {
616 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
617     if (m_videoOutput)
618         return true;
619 #endif
620     return m_imageGenerator;
621 }
622
623 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
624 {
625 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
626     createVideoOutput();
627 #else
628     createImageGenerator();
629 #endif
630 }
631
632 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
633 {
634     using namespace PAL;
635     INFO_LOG(LOGIDENTIFIER);
636
637     if (!m_avAsset || m_imageGenerator)
638         return;
639
640     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
641
642     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
643     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
644     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
645     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
646 }
647
648 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
649 {
650 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
651     destroyVideoOutput();
652 #endif
653     destroyImageGenerator();
654 }
655
656 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
657 {
658     if (!m_imageGenerator)
659         return;
660
661     INFO_LOG(LOGIDENTIFIER);
662
663     m_imageGenerator = 0;
664 }
665
666 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
667 {
668     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
669         return;
670
671     callOnMainThread([this, weakThis = createWeakPtr()] {
672         if (!weakThis)
673             return;
674
675         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
676             return;
677         m_haveBeenAskedToCreateLayer = true;
678
679         if (!m_videoLayer)
680             createAVPlayerLayer();
681
682 #if USE(VIDEOTOOLBOX)
683         if (!m_videoOutput)
684             createVideoOutput();
685 #endif
686
687         player()->client().mediaPlayerRenderingModeChanged(player());
688     });
689 }
690
691 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
692 {
693     if (!m_avPlayer)
694         return;
695
696     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
697     [m_videoLayer setPlayer:m_avPlayer.get()];
698
699 #ifndef NDEBUG
700     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
701 #endif
702     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
703     updateVideoLayerGravity();
704     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
705     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
706     INFO_LOG(LOGIDENTIFIER);
707
708 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
709     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
710
711 #if PLATFORM(IOS)
712     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
713         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
714 #endif
715 #else
716     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
717 #endif
718 }
719
720 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
721 {
722     if (!m_videoLayer)
723         return;
724
725     INFO_LOG(LOGIDENTIFIER);
726
727     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
728     [m_videoLayer setPlayer:nil];
729
730 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
731     m_videoFullscreenLayerManager->didDestroyVideoLayer();
732 #endif
733
734     m_videoLayer = nil;
735 }
736
737 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
738 {
739     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
740     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
741
742     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
743     if (!date)
744         return MediaTime::invalidTime();
745
746     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
747
748     // Rounding due to second offset error when subtracting.
749     return MediaTime::createWithDouble(round(date - currentTime));
750 }
751
752 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
753 {
754     if (currentRenderingMode() == MediaRenderingToLayer)
755         return m_cachedIsReadyForDisplay;
756
757     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
758         return true;
759
760     return m_videoFrameHasDrawn;
761 }
762
763 #if ENABLE(AVF_CAPTIONS)
764 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
765 {
766     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
767     if (manualSelectionMode)
768         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
769
770     // FIXME: Match these to correct types:
771     if (kind == PlatformTextTrack::Caption)
772         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
773
774     if (kind == PlatformTextTrack::Subtitle)
775         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
776
777     if (kind == PlatformTextTrack::Description)
778         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
779
780     if (kind == PlatformTextTrack::Forced)
781         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
782
783     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
784 }
785     
786 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
787 {
788     trackModeChanged();
789 }
790     
791 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
792 {
793     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
794     
795     for (auto& textTrack : m_textTracks) {
796         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
797             continue;
798         
799         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
800         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
801         
802         for (auto& track : outOfBandTrackSources) {
803             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
804             
805             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
806                 continue;
807             
808             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
809             if (track->mode() == PlatformTextTrack::Hidden)
810                 mode = InbandTextTrackPrivate::Hidden;
811             else if (track->mode() == PlatformTextTrack::Disabled)
812                 mode = InbandTextTrackPrivate::Disabled;
813             else if (track->mode() == PlatformTextTrack::Showing)
814                 mode = InbandTextTrackPrivate::Showing;
815             
816             textTrack->setMode(mode);
817             break;
818         }
819     }
820 }
821 #endif
822
823
824 static NSURL *canonicalURL(const URL& url)
825 {
826     NSURL *cocoaURL = url;
827     if (url.isEmpty())
828         return cocoaURL;
829
830     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
831     if (!request)
832         return cocoaURL;
833
834     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
835     if (!canonicalRequest)
836         return cocoaURL;
837
838     return [canonicalRequest URL];
839 }
840
841 #if PLATFORM(IOS)
842 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
843 {
844     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
845     [properties setDictionary:@{
846         NSHTTPCookieName: cookie.name,
847         NSHTTPCookieValue: cookie.value,
848         NSHTTPCookieDomain: cookie.domain,
849         NSHTTPCookiePath: cookie.path,
850         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
851     }];
852     if (cookie.secure)
853         [properties setObject:@YES forKey:NSHTTPCookieSecure];
854     if (cookie.session)
855         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
856
857     return [NSHTTPCookie cookieWithProperties:properties.get()];
858 }
859 #endif
860
861 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
862 {
863     if (m_avAsset)
864         return;
865
866     INFO_LOG(LOGIDENTIFIER);
867
868     setDelayCallbacks(true);
869
870     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
871
872     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
873
874     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
875
876     String referrer = player()->referrer();
877     if (!referrer.isEmpty())
878         [headerFields.get() setObject:referrer forKey:@"Referer"];
879
880     String userAgent = player()->userAgent();
881     if (!userAgent.isEmpty())
882         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
883
884     if ([headerFields.get() count])
885         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
886
887     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
888         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
889
890 #if PLATFORM(IOS)
891     // FIXME: rdar://problem/20354688
892     String identifier = player()->sourceApplicationIdentifier();
893     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
894         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
895
896     if (AVURLAssetRequiresCustomURLLoadingKey)
897         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
898 #endif
899
900     auto type = player()->contentMIMEType();
901     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
902         auto codecs = player()->contentTypeCodecs();
903         if (!codecs.isEmpty()) {
904             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
905             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
906         } else
907             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
908     }
909
910 #if ENABLE(AVF_CAPTIONS)
911     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
912     if (!outOfBandTrackSources.isEmpty()) {
913         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
914         for (auto& trackSource : outOfBandTrackSources) {
915             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
916             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
917             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
918             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
919             [outOfBandTracks.get() addObject:@{
920                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
921                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
922                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
923                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
924                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
925                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
926             }];
927         }
928
929         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
930     }
931 #endif
932
933 #if PLATFORM(IOS)
934     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
935     if (!networkInterfaceName.isEmpty())
936         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
937 #endif
938
939 #if PLATFORM(IOS)
940     Vector<Cookie> cookies;
941     if (player()->getRawCookies(url, cookies)) {
942         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
943         for (auto& cookie : cookies)
944             [nsCookies addObject:toNSHTTPCookie(cookie)];
945
946         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
947     }
948 #endif
949
950     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
951     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
952     
953     if (usePersistentCache)
954         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
955
956     NSURL *cocoaURL = canonicalURL(url);
957     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
958
959 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
960     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
961     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
962
963 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
964     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
965         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
966         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
967         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
968         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
969         if (mediaResourceLoader)
970             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
971     }
972 #endif
973
974 #endif
975
976     m_haveCheckedPlayability = false;
977
978     setDelayCallbacks(false);
979 }
980
981 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
982 {
983     if (!m_avPlayer)
984         return;
985
986     if (pthread_main_np()) {
987         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
988         return;
989     }
990
991     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
992     RetainPtr<AVPlayerItemType> strongItem = item;
993     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
994         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
995     });
996 }
997
998 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
999 {
1000     if (m_avPlayer)
1001         return;
1002
1003     INFO_LOG(LOGIDENTIFIER);
1004
1005     setDelayCallbacks(true);
1006
1007     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1008     for (NSString *keyName in playerKVOProperties())
1009         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1010
1011 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1012     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1013 #endif
1014
1015 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1016     updateDisableExternalPlayback();
1017     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1018 #endif
1019
1020 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1021     if (m_shouldPlayToPlaybackTarget) {
1022         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1023         m_shouldPlayToPlaybackTarget = false;
1024         setShouldPlayToPlaybackTarget(true);
1025     }
1026 #endif
1027
1028 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
1029     setShouldDisableSleep(player()->shouldDisableSleep());
1030 #endif
1031
1032     if (m_muted) {
1033         // Clear m_muted so setMuted doesn't return without doing anything.
1034         m_muted = false;
1035         [m_avPlayer.get() setMuted:m_muted];
1036     }
1037
1038     if (player()->client().mediaPlayerIsVideo())
1039         createAVPlayerLayer();
1040
1041     if (m_avPlayerItem)
1042         setAVPlayerItem(m_avPlayerItem.get());
1043
1044     setDelayCallbacks(false);
1045 }
1046
1047 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1048 {
1049     if (m_avPlayerItem)
1050         return;
1051
1052     INFO_LOG(LOGIDENTIFIER);
1053
1054     setDelayCallbacks(true);
1055
1056     // Create the player item so we can load media data. 
1057     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1058
1059     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1060
1061     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1062     for (NSString *keyName in itemKVOProperties())
1063         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1064
1065     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1066
1067     if (m_avPlayer)
1068         setAVPlayerItem(m_avPlayerItem.get());
1069
1070 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1071     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1072
1073     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1074     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1075     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1076
1077     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1078     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1079     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1080     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1081 #endif
1082
1083 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1084     if (m_provider) {
1085         m_provider->setPlayerItem(m_avPlayerItem.get());
1086         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1087     }
1088 #endif
1089
1090     setDelayCallbacks(false);
1091 }
1092
1093 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1094 {
1095     if (m_haveCheckedPlayability)
1096         return;
1097     m_haveCheckedPlayability = true;
1098
1099     INFO_LOG(LOGIDENTIFIER);
1100     auto weakThis = createWeakPtr();
1101
1102     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1103         callOnMainThread([weakThis] {
1104             if (weakThis)
1105                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1106         });
1107     }];
1108 }
1109
1110 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1111 {
1112     INFO_LOG(LOGIDENTIFIER);
1113
1114     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1115     dispatch_group_enter(metadataLoadingGroup.get());
1116     auto weakThis = createWeakPtr();
1117     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1118
1119         callOnMainThread([weakThis, metadataLoadingGroup] {
1120             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1121                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1122                     dispatch_group_enter(metadataLoadingGroup.get());
1123                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1124                         dispatch_group_leave(metadataLoadingGroup.get());
1125                     }];
1126                 }
1127             }
1128             dispatch_group_leave(metadataLoadingGroup.get());
1129         });
1130     }];
1131
1132     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1133         callOnMainThread([weakThis] {
1134             if (weakThis)
1135                 [weakThis->m_objcObserver.get() metadataLoaded];
1136         });
1137     });
1138 }
1139
1140 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1141 {
1142     if (!m_avPlayerItem)
1143         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1144
1145     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1146         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1147     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1148         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1149     if (m_cachedLikelyToKeepUp)
1150         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1151     if (m_cachedBufferFull)
1152         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1153     if (m_cachedBufferEmpty)
1154         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1155
1156     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1157 }
1158
1159 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1160 {
1161     INFO_LOG(LOGIDENTIFIER);
1162     PlatformMedia pm;
1163     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1164     pm.media.avfMediaPlayer = m_avPlayer.get();
1165     return pm;
1166 }
1167
1168 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1169 {
1170 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1171     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1172 #else
1173     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1174 #endif
1175 }
1176
1177 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1178 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1179 {
1180     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1181         completionHandler();
1182         return;
1183     }
1184
1185     [CATransaction begin];
1186     [CATransaction setDisableActions:YES];
1187
1188     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler));
1189
1190     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1191         syncTextTrackBounds();
1192         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1193     }
1194
1195     [CATransaction commit];
1196
1197     updateDisableExternalPlayback();
1198 }
1199
1200 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1201 {
1202     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1203     syncTextTrackBounds();
1204 }
1205
1206 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1207 {
1208     m_videoFullscreenGravity = gravity;
1209
1210     if (!m_videoLayer)
1211         return;
1212
1213     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1214     if (gravity == MediaPlayer::VideoGravityResize)
1215         videoGravity = AVLayerVideoGravityResize;
1216     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1217         videoGravity = AVLayerVideoGravityResizeAspect;
1218     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1219         videoGravity = AVLayerVideoGravityResizeAspectFill;
1220     else
1221         ASSERT_NOT_REACHED();
1222     
1223     if ([m_videoLayer videoGravity] == videoGravity)
1224         return;
1225
1226     [m_videoLayer setVideoGravity:videoGravity];
1227     syncTextTrackBounds();
1228 }
1229
1230 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1231 {
1232 #if PLATFORM(IOS)
1233     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1234         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1235     updateDisableExternalPlayback();
1236 #else
1237     UNUSED_PARAM(mode);
1238 #endif
1239 }
1240
1241 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1242
1243 #if PLATFORM(IOS)
1244 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1245 {
1246     if (m_currentMetaData)
1247         return m_currentMetaData.get();
1248     return nil;
1249 }
1250
1251 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1252 {
1253     if (!m_avPlayerItem)
1254         return emptyString();
1255     
1256     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1257     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1258
1259     return logString.get();
1260 }
1261
1262 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1263 {
1264     if (!m_avPlayerItem)
1265         return emptyString();
1266
1267     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1268     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1269
1270     return logString.get();
1271 }
1272 #endif
1273
1274 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1275 {
1276     [CATransaction begin];
1277     [CATransaction setDisableActions:YES];    
1278     if (m_videoLayer)
1279         [m_videoLayer.get() setHidden:!isVisible];
1280     [CATransaction commit];
1281 }
1282     
1283 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1284 {
1285     INFO_LOG(LOGIDENTIFIER);
1286     if (!metaDataAvailable())
1287         return;
1288
1289     setDelayCallbacks(true);
1290     m_cachedRate = requestedRate();
1291     [m_avPlayer.get() setRate:requestedRate()];
1292     setDelayCallbacks(false);
1293 }
1294
1295 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1296 {
1297     INFO_LOG(LOGIDENTIFIER);
1298     if (!metaDataAvailable())
1299         return;
1300
1301     setDelayCallbacks(true);
1302     m_cachedRate = 0;
1303     [m_avPlayer.get() setRate:0];
1304     setDelayCallbacks(false);
1305 }
1306
1307 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1308 {
1309     // Do not ask the asset for duration before it has been loaded or it will fetch the
1310     // answer synchronously.
1311     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1312         return MediaTime::invalidTime();
1313     
1314     CMTime cmDuration;
1315     
1316     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1317     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1318         cmDuration = [m_avPlayerItem.get() duration];
1319     else
1320         cmDuration = [m_avAsset.get() duration];
1321
1322     if (CMTIME_IS_NUMERIC(cmDuration))
1323         return PAL::toMediaTime(cmDuration);
1324
1325     if (CMTIME_IS_INDEFINITE(cmDuration))
1326         return MediaTime::positiveInfiniteTime();
1327
1328     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1329     return MediaTime::invalidTime();
1330 }
1331
1332 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1333 {
1334     if (!metaDataAvailable() || !m_avPlayerItem)
1335         return MediaTime::zeroTime();
1336
1337     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1338     if (CMTIME_IS_NUMERIC(itemTime))
1339         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1340
1341     return MediaTime::zeroTime();
1342 }
1343
1344 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1345 {
1346     // setCurrentTime generates several event callbacks, update afterwards.
1347     setDelayCallbacks(true);
1348
1349     if (m_metadataTrack)
1350         m_metadataTrack->flushPartialCues();
1351
1352     CMTime cmTime = PAL::toCMTime(time);
1353     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1354     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1355
1356     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1357     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1358         cmBefore = kCMTimeZero;
1359     
1360     auto weakThis = createWeakPtr();
1361
1362     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1363         callOnMainThread([weakThis, finished] {
1364             auto _this = weakThis.get();
1365             if (!_this)
1366                 return;
1367
1368             _this->seekCompleted(finished);
1369         });
1370     }];
1371
1372     setDelayCallbacks(false);
1373 }
1374
1375 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1376 {
1377 #if PLATFORM(IOS)
1378     UNUSED_PARAM(volume);
1379     return;
1380 #else
1381     if (!m_avPlayer)
1382         return;
1383
1384     [m_avPlayer.get() setVolume:volume];
1385 #endif
1386 }
1387
1388 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1389 {
1390     if (m_muted == muted)
1391         return;
1392
1393     INFO_LOG(LOGIDENTIFIER, "- ", muted);
1394
1395     m_muted = muted;
1396
1397     if (!m_avPlayer)
1398         return;
1399
1400     [m_avPlayer.get() setMuted:m_muted];
1401 }
1402
1403 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1404 {
1405     UNUSED_PARAM(closedCaptionsVisible);
1406
1407     if (!metaDataAvailable())
1408         return;
1409
1410     INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1411 }
1412
1413 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1414 {
1415     setDelayCallbacks(true);
1416     m_cachedRate = rate;
1417     [m_avPlayer.get() setRate:rate];
1418     setDelayCallbacks(false);
1419 }
1420
1421 double MediaPlayerPrivateAVFoundationObjC::rate() const
1422 {
1423     if (!metaDataAvailable())
1424         return 0;
1425
1426     return m_cachedRate;
1427 }
1428
1429 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1430 {
1431 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1432     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1433 #else
1434     return 0;
1435 #endif
1436 }
1437
1438 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1439 {
1440 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1441     return [m_avPlayerItem liveUpdateInterval];
1442 #else
1443     return 0;
1444 #endif
1445 }
1446
1447 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1448 {
1449     if (m_avPlayerItem)
1450         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1451 }
1452
1453 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1454 {
1455     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1456
1457     if (!m_avPlayerItem)
1458         return timeRanges;
1459
1460     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1461         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1462         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1463             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1464     }
1465     return timeRanges;
1466 }
1467
1468 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1469 {
1470     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1471         return MediaTime::zeroTime();
1472
1473     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1474     bool hasValidRange = false;
1475     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1476         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1477         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1478             continue;
1479
1480         hasValidRange = true;
1481         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1482         if (minTimeSeekable > startOfRange)
1483             minTimeSeekable = startOfRange;
1484     }
1485     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1486 }
1487
1488 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1489 {
1490     if (!m_cachedSeekableRanges)
1491         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1492
1493     MediaTime maxTimeSeekable;
1494     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1495         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1496         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1497             continue;
1498         
1499         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1500         if (maxTimeSeekable < endOfRange)
1501             maxTimeSeekable = endOfRange;
1502     }
1503     return maxTimeSeekable;
1504 }
1505
1506 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1507 {
1508     if (!m_cachedLoadedRanges)
1509         return MediaTime::zeroTime();
1510
1511     MediaTime maxTimeLoaded;
1512     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1513         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1514         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1515             continue;
1516         
1517         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1518         if (maxTimeLoaded < endOfRange)
1519             maxTimeLoaded = endOfRange;
1520     }
1521
1522     return maxTimeLoaded;   
1523 }
1524
1525 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1526 {
1527     if (!metaDataAvailable())
1528         return 0;
1529
1530     if (m_cachedTotalBytes)
1531         return m_cachedTotalBytes;
1532
1533     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1534         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1535
1536     return m_cachedTotalBytes;
1537 }
1538
1539 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1540 {
1541     m_avAsset = asset;
1542 }
1543
1544 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1545 {
1546     if (!m_avAsset)
1547         return MediaPlayerAVAssetStatusDoesNotExist;
1548
1549     for (NSString *keyName in assetMetadataKeyNames()) {
1550         NSError *error = nil;
1551         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1552
1553         if (error)
1554             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1555
1556         if (keyStatus < AVKeyValueStatusLoaded)
1557             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1558         
1559         if (keyStatus == AVKeyValueStatusFailed)
1560             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1561
1562         if (keyStatus == AVKeyValueStatusCancelled)
1563             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1564     }
1565
1566     if (!player()->shouldCheckHardwareSupport())
1567         m_tracksArePlayable = true;
1568
1569     if (!m_tracksArePlayable) {
1570         m_tracksArePlayable = true;
1571         for (AVAssetTrack *track in [m_avAsset tracks]) {
1572             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1573                 m_tracksArePlayable = false;
1574                 break;
1575             }
1576         }
1577     }
1578
1579     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1580         return MediaPlayerAVAssetStatusPlayable;
1581
1582     return MediaPlayerAVAssetStatusLoaded;
1583 }
1584
1585 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1586 {
1587     if (!m_avAsset)
1588         return 0;
1589
1590     NSError *error = nil;
1591     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1592     return [error code];
1593 }
1594
1595 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1596 {
1597     if (!metaDataAvailable() || context.paintingDisabled())
1598         return;
1599
1600     setDelayCallbacks(true);
1601     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1602
1603 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1604     if (videoOutputHasAvailableFrame())
1605         paintWithVideoOutput(context, rect);
1606     else
1607 #endif
1608         paintWithImageGenerator(context, rect);
1609
1610     END_BLOCK_OBJC_EXCEPTIONS;
1611     setDelayCallbacks(false);
1612
1613     m_videoFrameHasDrawn = true;
1614 }
1615
1616 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1617 {
1618     if (!metaDataAvailable() || context.paintingDisabled())
1619         return;
1620
1621     // We can ignore the request if we are already rendering to a layer.
1622     if (currentRenderingMode() == MediaRenderingToLayer)
1623         return;
1624
1625     // paint() is best effort, so only paint if we already have an image generator or video output available.
1626     if (!hasContextRenderer())
1627         return;
1628
1629     paintCurrentFrameInContext(context, rect);
1630 }
1631
1632 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1633 {
1634     INFO_LOG(LOGIDENTIFIER);
1635
1636     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1637     if (image) {
1638         GraphicsContextStateSaver stateSaver(context);
1639         context.translate(rect.x(), rect.y() + rect.height());
1640         context.scale(FloatSize(1.0f, -1.0f));
1641         context.setImageInterpolationQuality(InterpolationLow);
1642         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1643         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1644     }
1645 }
1646
1647 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1648 {
1649     if (!m_imageGenerator)
1650         createImageGenerator();
1651     ASSERT(m_imageGenerator);
1652
1653 #if !RELEASE_LOG_DISABLED
1654     MonotonicTime start = MonotonicTime::now();
1655 #endif
1656
1657     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1658     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1659     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1660
1661 #if !RELEASE_LOG_DISABLED
1662     DEBUG_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1663 #endif
1664
1665     return image;
1666 }
1667
1668 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1669 {
1670     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1671
1672
1673 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1674 static bool keySystemIsSupported(const String& keySystem)
1675 {
1676     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1677         return true;
1678     return false;
1679 }
1680 #endif
1681
1682 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1683 {
1684 #if ENABLE(MEDIA_SOURCE)
1685     if (parameters.isMediaSource)
1686         return MediaPlayer::IsNotSupported;
1687 #endif
1688 #if ENABLE(MEDIA_STREAM)
1689     if (parameters.isMediaStream)
1690         return MediaPlayer::IsNotSupported;
1691 #endif
1692
1693     auto containerType = parameters.type.containerType();
1694     if (isUnsupportedMIMEType(containerType))
1695         return MediaPlayer::IsNotSupported;
1696
1697     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().types().contains(containerType))
1698         return MediaPlayer::IsNotSupported;
1699
1700     // The spec says:
1701     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1702     if (parameters.type.codecs().isEmpty())
1703         return MediaPlayer::MayBeSupported;
1704
1705     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1706         return MediaPlayer::IsNotSupported;
1707
1708     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1709     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1710 }
1711
1712 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1713 {
1714 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1715     if (!keySystem.isEmpty()) {
1716         // "Clear Key" is only supported with HLS:
1717         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1718             return MediaPlayer::IsNotSupported;
1719
1720         if (!keySystemIsSupported(keySystem))
1721             return false;
1722
1723         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1724             return false;
1725
1726         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1727             return false;
1728
1729         return true;
1730     }
1731 #else
1732     UNUSED_PARAM(keySystem);
1733     UNUSED_PARAM(mimeType);
1734 #endif
1735     return false;
1736 }
1737
1738 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1739 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1740 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1741 {
1742     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1743         [infoRequest setContentLength:keyData->byteLength()];
1744         [infoRequest setByteRangeAccessSupported:YES];
1745     }
1746
1747     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1748         long long start = [dataRequest currentOffset];
1749         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1750
1751         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1752             [request finishLoadingWithError:nil];
1753             return;
1754         }
1755
1756         ASSERT(start <= std::numeric_limits<int>::max());
1757         ASSERT(end <= std::numeric_limits<int>::max());
1758         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1759         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1760         [dataRequest respondWithData:nsData.get()];
1761     }
1762
1763     [request finishLoading];
1764 }
1765 #endif
1766
1767 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1768 {
1769     String scheme = [[[avRequest request] URL] scheme];
1770     String keyURI = [[[avRequest request] URL] absoluteString];
1771
1772 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1773     if (scheme == "skd") {
1774 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1775         // Create an initData with the following layout:
1776         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1777         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1778         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1779         unsigned byteLength = initDataBuffer->byteLength();
1780         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1781         initDataView->set<uint32_t>(0, keyURISize, true);
1782
1783         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1784         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1785
1786         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1787         if (!player()->keyNeeded(initData.get()))
1788             return false;
1789 #endif
1790         m_keyURIToRequestMap.set(keyURI, avRequest);
1791 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1792         if (m_cdmInstance)
1793             return false;
1794
1795         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1796         auto keyURIBuffer = SharedBuffer::create(keyURIData.get());
1797         player()->initializationDataEncountered(ASCIILiteral("skd"), keyURIBuffer->tryCreateArrayBuffer());
1798 #endif
1799         return true;
1800     }
1801
1802 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1803     if (scheme == "clearkey") {
1804         String keyID = [[[avRequest request] URL] resourceSpecifier];
1805         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1806
1807         auto initData = Uint8Array::create(encodedKeyId.size());
1808         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1809
1810         auto keyData = player()->cachedKeyForKeyId(keyID);
1811         if (keyData) {
1812             fulfillRequestWithKeyData(avRequest, keyData.get());
1813             return false;
1814         }
1815
1816         if (!player()->keyNeeded(initData.get()))
1817             return false;
1818
1819         m_keyURIToRequestMap.set(keyID, avRequest);
1820         return true;
1821     }
1822 #endif
1823 #endif
1824
1825     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1826     m_resourceLoaderMap.add(avRequest, resourceLoader);
1827     resourceLoader->startLoading();
1828     return true;
1829 }
1830
1831 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1832 {
1833     String scheme = [[[avRequest request] URL] scheme];
1834
1835     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1836
1837     if (resourceLoader)
1838         resourceLoader->stopLoading();
1839 }
1840
1841 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1842 {
1843     m_resourceLoaderMap.remove(avRequest);
1844 }
1845 #endif
1846
1847 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1848 {
1849     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1850 }
1851
1852 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1853 {
1854     if (!metaDataAvailable())
1855         return timeValue;
1856
1857     // FIXME - impossible to implement until rdar://8721510 is fixed.
1858     return timeValue;
1859 }
1860
1861 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1862 {
1863 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1864     return 0;
1865 #else
1866     return 5;
1867 #endif
1868 }
1869
1870 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1871 {
1872     if (!m_videoLayer)
1873         return;
1874
1875 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1876     // Do not attempt to change the video gravity while in full screen mode.
1877     // See setVideoFullscreenGravity().
1878     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1879         return;
1880 #endif
1881
1882     [CATransaction begin];
1883     [CATransaction setDisableActions:YES];    
1884     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1885     [m_videoLayer.get() setVideoGravity:gravity];
1886     [CATransaction commit];
1887 }
1888
1889 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1890 {
1891     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1892         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1893     }];
1894     if (index == NSNotFound)
1895         return nil;
1896     return [tracks objectAtIndex:index];
1897 }
1898
1899 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1900 {
1901     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1902     m_languageOfPrimaryAudioTrack = String();
1903
1904     if (!m_avAsset)
1905         return;
1906
1907     setDelayCharacteristicsChangedNotification(true);
1908
1909     bool haveCCTrack = false;
1910     bool hasCaptions = false;
1911
1912     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1913     // asked about those fairly fequently.
1914     if (!m_avPlayerItem) {
1915         // We don't have a player item yet, so check with the asset because some assets support inspection
1916         // prior to becoming ready to play.
1917         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1918         setHasVideo(firstEnabledVideoTrack);
1919         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1920 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1921         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1922 #endif
1923         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1924         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1925         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1926         if (size.width() < 0)
1927             size.setWidth(-size.width());
1928         if (size.height() < 0)
1929             size.setHeight(-size.height());
1930         presentationSizeDidChange(size);
1931     } else {
1932         bool hasVideo = false;
1933         bool hasAudio = false;
1934         bool hasMetaData = false;
1935         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1936             if ([track isEnabled]) {
1937                 AVAssetTrack *assetTrack = [track assetTrack];
1938                 NSString *mediaType = [assetTrack mediaType];
1939                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1940                     hasVideo = true;
1941                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1942                     hasAudio = true;
1943                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1944 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1945                     hasCaptions = true;
1946 #endif
1947                     haveCCTrack = true;
1948                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1949                     hasMetaData = true;
1950                 }
1951             }
1952         }
1953
1954 #if ENABLE(VIDEO_TRACK)
1955         updateAudioTracks();
1956         updateVideoTracks();
1957
1958 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1959         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1960         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1961 #endif
1962 #endif
1963
1964         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1965         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1966         // when it is not.
1967         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1968
1969         setHasAudio(hasAudio);
1970 #if ENABLE(DATACUE_VALUE)
1971         if (hasMetaData)
1972             processMetadataTrack();
1973 #endif
1974     }
1975
1976 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1977     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1978     if (legibleGroup && m_cachedTracks) {
1979         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1980         if (hasCaptions)
1981             processMediaSelectionOptions();
1982     }
1983 #endif
1984
1985 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1986     if (!hasCaptions && haveCCTrack)
1987         processLegacyClosedCaptionsTracks();
1988 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1989     if (haveCCTrack)
1990         processLegacyClosedCaptionsTracks();
1991 #endif
1992
1993     setHasClosedCaptions(hasCaptions);
1994
1995     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1996
1997     sizeChanged();
1998
1999     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
2000         characteristicsChanged();
2001
2002 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2003     if (m_provider)
2004         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2005 #endif
2006
2007     setDelayCharacteristicsChangedNotification(false);
2008 }
2009
2010 #if ENABLE(VIDEO_TRACK)
2011
2012 template <typename RefT, typename PassRefT>
2013 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2014 {
2015     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2016         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2017     }]]]);
2018     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2019
2020     for (auto& oldItem : oldItems) {
2021         if (oldItem->playerItemTrack())
2022             [oldTracks addObject:oldItem->playerItemTrack()];
2023     }
2024
2025     // Find the added & removed AVPlayerItemTracks:
2026     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2027     [removedTracks minusSet:newTracks.get()];
2028
2029     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2030     [addedTracks minusSet:oldTracks.get()];
2031
2032     typedef Vector<RefT> ItemVector;
2033     ItemVector replacementItems;
2034     ItemVector addedItems;
2035     ItemVector removedItems;
2036     for (auto& oldItem : oldItems) {
2037         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2038             removedItems.append(oldItem);
2039         else
2040             replacementItems.append(oldItem);
2041     }
2042
2043     for (AVPlayerItemTrack* track in addedTracks.get())
2044         addedItems.append(itemFactory(track));
2045
2046     replacementItems.appendVector(addedItems);
2047     oldItems.swap(replacementItems);
2048
2049     for (auto& removedItem : removedItems)
2050         (player->*removedFunction)(*removedItem);
2051
2052     for (auto& addedItem : addedItems)
2053         (player->*addedFunction)(*addedItem);
2054 }
2055
2056 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2057
2058 template <typename RefT, typename PassRefT>
2059 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2060 {
2061     group->updateOptions(characteristics);
2062
2063     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2064     for (auto& option : group->options()) {
2065         if (!option)
2066             continue;
2067         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2068         if (!avOption)
2069             continue;
2070         newSelectionOptions.add(option);
2071     }
2072
2073     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2074     for (auto& oldItem : oldItems) {
2075         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2076             oldSelectionOptions.add(option);
2077     }
2078
2079     // Find the added & removed AVMediaSelectionOptions:
2080     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2081     for (auto& oldOption : oldSelectionOptions) {
2082         if (!newSelectionOptions.contains(oldOption))
2083             removedSelectionOptions.add(oldOption);
2084     }
2085
2086     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2087     for (auto& newOption : newSelectionOptions) {
2088         if (!oldSelectionOptions.contains(newOption))
2089             addedSelectionOptions.add(newOption);
2090     }
2091
2092     typedef Vector<RefT> ItemVector;
2093     ItemVector replacementItems;
2094     ItemVector addedItems;
2095     ItemVector removedItems;
2096     for (auto& oldItem : oldItems) {
2097         if (!oldItem->mediaSelectionOption())
2098             removedItems.append(oldItem);
2099         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2100             removedItems.append(oldItem);
2101         else
2102             replacementItems.append(oldItem);
2103     }
2104
2105     for (auto& option : addedSelectionOptions)
2106         addedItems.append(itemFactory(*option.get()));
2107
2108     replacementItems.appendVector(addedItems);
2109     oldItems.swap(replacementItems);
2110     
2111     for (auto& removedItem : removedItems)
2112         (player->*removedFunction)(*removedItem);
2113
2114     for (auto& addedItem : addedItems)
2115         (player->*addedFunction)(*addedItem);
2116 }
2117
2118 #endif
2119
2120 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2121 {
2122 #if !RELEASE_LOG_DISABLED
2123     size_t count = m_audioTracks.size();
2124 #endif
2125
2126 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2127     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2128     if (!m_audibleGroup) {
2129         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2130             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2131     }
2132
2133     if (m_audibleGroup)
2134         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2135     else
2136 #endif
2137         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2138
2139     for (auto& track : m_audioTracks)
2140         track->resetPropertiesFromTrack();
2141
2142 #if !RELEASE_LOG_DISABLED
2143     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2144 #endif
2145 }
2146
2147 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2148 {
2149 #if !RELEASE_LOG_DISABLED
2150     size_t count = m_videoTracks.size();
2151 #endif
2152
2153     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2154
2155 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2156     if (!m_visualGroup) {
2157         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2158             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2159     }
2160
2161     if (m_visualGroup)
2162         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2163 #endif
2164
2165     for (auto& track : m_audioTracks)
2166         track->resetPropertiesFromTrack();
2167
2168 #if !RELEASE_LOG_DISABLED
2169     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2170 #endif
2171 }
2172
2173 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2174 {
2175 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2176     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2177         return true;
2178 #endif
2179     return false;
2180 }
2181
2182 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2183 {
2184 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2185     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2186         return;
2187
2188     [CATransaction begin];
2189     [CATransaction setDisableActions:YES];
2190
2191     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2192     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2193     [m_textTrackRepresentationLayer setFrame:textFrame];
2194
2195     [CATransaction commit];
2196 #endif
2197 }
2198
2199 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2200 {
2201 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2202     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2203     if (representationLayer == m_textTrackRepresentationLayer) {
2204         syncTextTrackBounds();
2205         return;
2206     }
2207
2208     [CATransaction begin];
2209     [CATransaction setDisableActions:YES];
2210
2211     if (m_textTrackRepresentationLayer)
2212         [m_textTrackRepresentationLayer removeFromSuperlayer];
2213
2214     m_textTrackRepresentationLayer = representationLayer;
2215
2216     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2217         syncTextTrackBounds();
2218         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2219     }
2220
2221     [CATransaction commit];
2222
2223 #else
2224     UNUSED_PARAM(representation);
2225 #endif
2226 }
2227
2228 #endif // ENABLE(VIDEO_TRACK)
2229
2230 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2231
2232 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2233 {
2234     if (!m_provider) {
2235         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2236         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2237     }
2238     return m_provider.get();
2239 }
2240
2241 #endif
2242
2243 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2244 {
2245     if (!m_avAsset)
2246         return;
2247
2248     setNaturalSize(m_cachedPresentationSize);
2249 }
2250
2251 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2252 {
2253     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2254 }
2255
2256 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2257 {
2258 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2259     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2260     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2261         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2262         return false;
2263
2264     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2265     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2266         return session.didPassCORSAccessChecks;
2267 #endif
2268     return false;
2269 }
2270
2271 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2272
2273 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2274 {
2275     INFO_LOG(LOGIDENTIFIER);
2276
2277     if (!m_avPlayerItem || m_videoOutput)
2278         return;
2279
2280 #if USE(VIDEOTOOLBOX)
2281     NSDictionary* attributes = nil;
2282 #else
2283     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2284 #endif
2285     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2286     ASSERT(m_videoOutput);
2287
2288     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2289
2290     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2291 }
2292
2293 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2294 {
2295     if (!m_videoOutput)
2296         return;
2297
2298     if (m_avPlayerItem)
2299         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2300
2301     INFO_LOG(LOGIDENTIFIER);
2302
2303     m_videoOutput = 0;
2304 }
2305
2306 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2307 {
2308     if (!m_videoOutput)
2309         createVideoOutput();
2310     ASSERT(m_videoOutput);
2311
2312     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2313
2314     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2315         return false;
2316
2317     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2318     m_lastImage = nullptr;
2319     return true;
2320 }
2321
2322 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2323 {
2324     if (!m_avPlayerItem)
2325         return false;
2326
2327     if (m_lastImage)
2328         return true;
2329
2330     if (!m_videoOutput)
2331         createVideoOutput();
2332
2333     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2334 }
2335
2336 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2337 {
2338     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2339     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2340     // should be displayed.
2341     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2342         return;
2343
2344     if (!m_pixelBufferConformer) {
2345 #if USE(VIDEOTOOLBOX)
2346         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2347 #else
2348         NSDictionary *attributes = nil;
2349 #endif
2350         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2351     }
2352
2353 #if !RELEASE_LOG_DISABLED
2354     MonotonicTime start = MonotonicTime::now();
2355 #endif
2356
2357     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2358
2359 #if !RELEASE_LOG_DISABLED
2360     DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2361 #endif
2362 }
2363
2364 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2365 {
2366     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2367         waitForVideoOutputMediaDataWillChange();
2368
2369     updateLastImage();
2370
2371     if (!m_lastImage)
2372         return;
2373
2374     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2375     if (!firstEnabledVideoTrack)
2376         return;
2377
2378     INFO_LOG(LOGIDENTIFIER);
2379
2380     GraphicsContextStateSaver stateSaver(context);
2381     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2382     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2383     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2384
2385     context.concatCTM(videoTransform);
2386     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2387
2388     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2389     // video frame, destroy it now that it is no longer needed.
2390     if (m_imageGenerator)
2391         destroyImageGenerator();
2392
2393 }
2394
2395 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2396 {
2397     ASSERT(context);
2398
2399     updateLastPixelBuffer();
2400     if (!m_lastPixelBuffer)
2401         return false;
2402
2403     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2404     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2405
2406     if (!m_videoTextureCopier)
2407         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2408
2409     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2410 }
2411
2412 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2413 {
2414     updateLastImage();
2415     return m_lastImage;
2416 }
2417
2418 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2419 {
2420     if (!m_videoOutputSemaphore)
2421         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2422
2423     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2424
2425     // Wait for 1 second.
2426     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2427
2428     if (result)
2429         ERROR_LOG(LOGIDENTIFIER, "timed out");
2430 }
2431
2432 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2433 {
2434     dispatch_semaphore_signal(m_videoOutputSemaphore);
2435 }
2436
2437 #endif
2438
2439 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2440
2441 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2442 {
2443     return m_keyURIToRequestMap.take(keyURI);
2444 }
2445
2446 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2447 {
2448     Vector<String> fulfilledKeyIds;
2449
2450     for (auto& pair : m_keyURIToRequestMap) {
2451         const String& keyId = pair.key;
2452         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2453
2454         auto keyData = player()->cachedKeyForKeyId(keyId);
2455         if (!keyData)
2456             continue;
2457
2458         fulfillRequestWithKeyData(request.get(), keyData.get());
2459         fulfilledKeyIds.append(keyId);
2460     }
2461
2462     for (auto& keyId : fulfilledKeyIds)
2463         m_keyURIToRequestMap.remove(keyId);
2464 }
2465
2466 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2467 {
2468     ASSERT_UNUSED(session, &session == m_session);
2469     m_session = nullptr;
2470 }
2471
2472 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2473 {
2474     if (!keySystemIsSupported(keySystem))
2475         return nullptr;
2476     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2477     m_session = session->createWeakPtr();
2478     return WTFMove(session);
2479 }
2480 #endif
2481
2482 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2483 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2484 {
2485 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2486     if (m_session && newValue)
2487         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2488 #endif
2489
2490 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2491     if (m_cdmInstance)
2492         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2493 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2494     UNUSED_PARAM(newValue);
2495 #endif
2496 }
2497 #endif
2498
2499 #if ENABLE(ENCRYPTED_MEDIA)
2500 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2501 {
2502 #if HAVE(AVCONTENTKEYSESSION)
2503     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2504         return;
2505
2506     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2507     if (&fpsInstance == m_cdmInstance)
2508         return;
2509
2510     if (m_cdmInstance)
2511         cdmInstanceDetached(*m_cdmInstance);
2512
2513     m_cdmInstance = &fpsInstance;
2514     [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2515 #else
2516     UNUSED_PARAM(instance);
2517 #endif
2518 }
2519
2520 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2521 {
2522 #if HAVE(AVCONTENTKEYSESSION)
2523     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2524     [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_avAsset.get()];
2525     m_cdmInstance = nullptr;
2526 #else
2527     UNUSED_PARAM(instance);
2528 #endif
2529 }
2530
2531 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2532 {
2533     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2534     for (auto& request : keyURIToRequestMap.values())
2535         [request finishLoading];
2536 }
2537 #endif
2538
2539 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2540
2541 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2542 {
2543 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2544     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2545 #endif
2546
2547     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2548     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2549
2550         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2551         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2552             continue;
2553
2554         bool newCCTrack = true;
2555         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2556             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2557                 continue;
2558
2559             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2560             if (track->avPlayerItemTrack() == playerItemTrack) {
2561                 removedTextTracks.remove(i - 1);
2562                 newCCTrack = false;
2563                 break;
2564             }
2565         }
2566
2567         if (!newCCTrack)
2568             continue;
2569         
2570         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2571     }
2572
2573     processNewAndRemovedTextTracks(removedTextTracks);
2574 }
2575
2576 #endif
2577
2578 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2579 {
2580     if (!m_avAsset)
2581         return nil;
2582
2583     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2584         return nil;
2585
2586     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2587 }
2588
2589 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2590
2591 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2592 {
2593     if (!m_avAsset)
2594         return false;
2595
2596     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2597         return false;
2598
2599     return true;
2600 }
2601
2602 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2603 {
2604     if (!hasLoadedMediaSelectionGroups())
2605         return nil;
2606
2607     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2608 }
2609
2610 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2611 {
2612     if (!hasLoadedMediaSelectionGroups())
2613         return nil;
2614
2615     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2616 }
2617
2618 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2619 {
2620     if (!hasLoadedMediaSelectionGroups())
2621         return nil;
2622
2623     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2624 }
2625
2626 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2627 {
2628     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2629     if (!legibleGroup) {
2630         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2631         return;
2632     }
2633
2634     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2635     // but set the selected legible track to nil so text tracks will not be automatically configured.
2636     if (!m_textTracks.size())
2637         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2638
2639     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2640     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2641     for (AVMediaSelectionOptionType *option in legibleOptions) {
2642         bool newTrack = true;
2643         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2644             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2645                 continue;
2646             
2647             RetainPtr<AVMediaSelectionOptionType> currentOption;
2648 #if ENABLE(AVF_CAPTIONS)
2649             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2650                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2651                 currentOption = track->mediaSelectionOption();
2652             } else
2653 #endif
2654             {
2655                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2656                 currentOption = track->mediaSelectionOption();
2657             }
2658             
2659             if ([currentOption.get() isEqual:option]) {
2660                 removedTextTracks.remove(i - 1);
2661                 newTrack = false;
2662                 break;
2663             }
2664         }
2665         if (!newTrack)
2666             continue;
2667
2668 #if ENABLE(AVF_CAPTIONS)
2669         if ([option outOfBandSource]) {
2670             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2671             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2672             continue;
2673         }
2674 #endif
2675
2676         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2677     }
2678
2679     processNewAndRemovedTextTracks(removedTextTracks);
2680 }
2681
2682 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2683 {
2684     if (m_metadataTrack)
2685         return;
2686
2687     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2688     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2689     player()->addTextTrack(*m_metadataTrack);
2690 }
2691
2692 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2693 {
2694     ASSERT(time >= MediaTime::zeroTime());
2695
2696     if (!m_currentTextTrack)
2697         return;
2698
2699     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2700 }
2701
2702 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2703 {
2704     INFO_LOG(LOGIDENTIFIER);
2705
2706     if (!m_currentTextTrack)
2707         return;
2708     
2709     m_currentTextTrack->resetCueValues();
2710 }
2711
2712 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2713
2714 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2715 {
2716     if (m_currentTextTrack == track)
2717         return;
2718
2719     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2720
2721     m_currentTextTrack = track;
2722
2723     if (track) {
2724         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2725 #pragma clang diagnostic push
2726 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2727             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2728 #pragma clang diagnostic pop
2729 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2730 #if ENABLE(AVF_CAPTIONS)
2731         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2732             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2733 #endif
2734         else
2735             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2736 #endif
2737     } else {
2738 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2739         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2740 #endif
2741 #pragma clang diagnostic push
2742 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2743         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2744 #pragma clang diagnostic pop
2745     }
2746
2747 }
2748
2749 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2750 {
2751     if (!m_languageOfPrimaryAudioTrack.isNull())
2752         return m_languageOfPrimaryAudioTrack;
2753
2754     if (!m_avPlayerItem.get())
2755         return emptyString();
2756
2757 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2758     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2759     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2760 #pragma clang diagnostic push
2761 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2762     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2763 #pragma clang diagnostic pop
2764     if (currentlySelectedAudibleOption) {
2765         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2766         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2767
2768         return m_languageOfPrimaryAudioTrack;
2769     }
2770 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2771
2772     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2773     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2774     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2775     if (!tracks || [tracks count] != 1) {
2776         m_languageOfPrimaryAudioTrack = emptyString();
2777         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2778         return m_languageOfPrimaryAudioTrack;
2779     }
2780
2781     AVAssetTrack *track = [tracks objectAtIndex:0];
2782     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2783
2784     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2785
2786     return m_languageOfPrimaryAudioTrack;
2787 }
2788
2789 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2790 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2791 {
2792     bool wirelessTarget = false;
2793
2794 #if !PLATFORM(IOS)
2795     if (m_playbackTarget) {
2796         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2797             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2798         else
2799             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2800     }
2801 #else
2802     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2803 #endif
2804
2805     INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2806
2807     return wirelessTarget;
2808 }
2809
2810 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2811 {
2812     if (!m_avPlayer)
2813         return MediaPlayer::TargetTypeNone;
2814
2815 #if PLATFORM(IOS)
2816     if (!AVFoundationLibrary())
2817         return MediaPlayer::TargetTypeNone;
2818
2819     switch ([m_avPlayer externalPlaybackType]) {
2820     case AVPlayerExternalPlaybackTypeNone:
2821         return MediaPlayer::TargetTypeNone;
2822     case AVPlayerExternalPlaybackTypeAirPlay:
2823         return MediaPlayer::TargetTypeAirPlay;
2824     case AVPlayerExternalPlaybackTypeTVOut:
2825         return MediaPlayer::TargetTypeTVOut;
2826     }
2827
2828     ASSERT_NOT_REACHED();
2829     return MediaPlayer::TargetTypeNone;
2830
2831 #else
2832     return MediaPlayer::TargetTypeAirPlay;
2833 #endif
2834 }
2835     
2836 #if PLATFORM(IOS)
2837 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2838 {
2839 #if HAVE(CELESTIAL)
2840     NSString *displayName = nil;
2841
2842     if (!AVFoundationLibrary())
2843         return nil;
2844
2845     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2846         return nil;
2847
2848     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2849     if (!pickableRoutes.count)
2850         return nil;
2851
2852     for (NSDictionary *pickableRoute in pickableRoutes) {
2853         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2854             continue;
2855
2856         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2857
2858         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2859         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2860             break;
2861
2862         // The route is a speaker or HDMI out, override the name to be the localized device model.
2863         NSString *localizedDeviceModel = [[UIDevice currentDevice] localizedModel];
2864
2865         // In cases where a route with that name already exists, prefix the name with the model.
2866         BOOL includeLocalizedDeviceModelName = NO;
2867         for (NSDictionary *otherRoute in pickableRoutes) {
2868             if (otherRoute == pickableRoute)
2869                 continue;
2870
2871             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2872                 includeLocalizedDeviceModelName = YES;
2873                 break;
2874             }
2875         }
2876
2877         if (includeLocalizedDeviceModelName)
2878             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2879         else
2880             displayName = localizedDeviceModel;
2881
2882         break;
2883     }
2884
2885     return displayName;
2886 #else
2887     UNUSED_PARAM(player);
2888     return nil;
2889 #endif
2890 }
2891 #endif
2892
2893 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2894 {
2895     if (!m_avPlayer)
2896         return emptyString();
2897
2898     String wirelessTargetName;
2899 #if !PLATFORM(IOS)
2900     if (m_playbackTarget)
2901         wirelessTargetName = m_playbackTarget->deviceName();
2902 #else
2903     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2904 #endif
2905
2906     return wirelessTargetName;
2907 }
2908
2909 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2910 {
2911     if (!m_avPlayer)
2912         return !m_allowsWirelessVideoPlayback;
2913
2914     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2915     INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2916
2917     return !m_allowsWirelessVideoPlayback;
2918 }
2919
2920 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2921 {
2922     INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2923     m_allowsWirelessVideoPlayback = !disabled;
2924     if (!m_avPlayer)
2925         return;
2926
2927     setDelayCallbacks(true);
2928     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2929     setDelayCallbacks(false);
2930 }
2931
2932 #if !PLATFORM(IOS)
2933
2934 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2935 {
2936     m_playbackTarget = WTFMove(target);
2937
2938     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2939
2940     INFO_LOG(LOGIDENTIFIER);
2941
2942     if (!m_playbackTarget->hasActiveRoute())
2943         setShouldPlayToPlaybackTarget(false);
2944 }
2945
2946 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2947 {
2948     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2949         return;
2950
2951     m_shouldPlayToPlaybackTarget = shouldPlay;
2952
2953     if (!m_playbackTarget)
2954         return;
2955
2956     INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2957
2958     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2959         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2960
2961         if (!m_avPlayer)
2962             return;
2963
2964         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2965         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2966             return;
2967
2968         setDelayCallbacks(true);
2969         m_avPlayer.get().outputContext = newContext;
2970         setDelayCallbacks(false);
2971
2972         return;
2973     }
2974
2975     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2976
2977     setDelayCallbacks(true);
2978     auto weakThis = createWeakPtr();
2979     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2980         if (!weakThis)
2981             return;
2982         weakThis->playbackTargetIsWirelessDidChange();
2983     }));
2984     setDelayCallbacks(false);
2985 }
2986
2987 #endif // !PLATFORM(IOS)
2988
2989 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2990 {
2991 #if PLATFORM(IOS)
2992     if (!m_avPlayer)
2993         return;
2994
2995     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2996 #endif
2997 }
2998
2999 #endif
3000
3001 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
3002 {
3003     m_cachedItemStatus = status;
3004
3005     updateStates();
3006 }
3007
3008 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
3009 {
3010     m_pendingStatusChanges++;
3011 }
3012
3013 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3014 {
3015     m_cachedLikelyToKeepUp = likelyToKeepUp;
3016
3017     ASSERT(m_pendingStatusChanges);
3018     if (!--m_pendingStatusChanges)
3019         updateStates();
3020 }
3021
3022 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3023 {
3024     m_pendingStatusChanges++;
3025 }
3026
3027 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3028 {
3029     m_cachedBufferEmpty = bufferEmpty;
3030
3031     ASSERT(m_pendingStatusChanges);
3032     if (!--m_pendingStatusChanges)
3033         updateStates();
3034 }
3035
3036 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3037 {
3038     m_pendingStatusChanges++;
3039 }
3040
3041 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3042 {
3043     m_cachedBufferFull = bufferFull;
3044
3045     ASSERT(m_pendingStatusChanges);
3046     if (!--m_pendingStatusChanges)
3047         updateStates();
3048 }
3049
3050 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3051 {
3052     m_cachedSeekableRanges = seekableRanges;
3053
3054     seekableTimeRangesChanged();
3055     updateStates();
3056 }
3057
3058 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3059 {
3060     m_cachedLoadedRanges = loadedRanges;
3061
3062     loadedTimeRangesChanged();
3063     updateStates();