Remove Cocoa CFURLConnection loading code
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "DeprecatedGlobalSettings.h"
40 #import "Extensions3D.h"
41 #import "FloatConversion.h"
42 #import "GraphicsContext.h"
43 #import "GraphicsContext3D.h"
44 #import "GraphicsContextCG.h"
45 #import "InbandMetadataTextTrackPrivateAVF.h"
46 #import "InbandTextTrackPrivateAVFObjC.h"
47 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
48 #import "Logging.h"
49 #import "MediaPlaybackTargetMac.h"
50 #import "MediaPlaybackTargetMock.h"
51 #import "MediaSelectionGroupAVFObjC.h"
52 #import "OutOfBandTextTrackPrivateAVF.h"
53 #import "PixelBufferConformerCV.h"
54 #import "PlatformTimeRanges.h"
55 #import "SecurityOrigin.h"
56 #import "SerializedPlatformRepresentationMac.h"
57 #import "TextEncoding.h"
58 #import "TextTrackRepresentation.h"
59 #import "TextureCacheCV.h"
60 #import "URL.h"
61 #import "VideoTextureCopierCV.h"
62 #import "VideoTrackPrivateAVFObjC.h"
63 #import "WebCoreAVFResourceLoader.h"
64 #import "WebCoreCALayerExtras.h"
65 #import "WebCoreNSURLSession.h"
66 #import <functional>
67 #import <map>
68 #import <objc/runtime.h>
69 #import <pal/avfoundation/MediaTimeAVFoundation.h>
70 #import <pal/spi/cocoa/QuartzCoreSPI.h>
71 #import <pal/spi/mac/AVFoundationSPI.h>
72 #import <runtime/DataView.h>
73 #import <runtime/JSCInlines.h>
74 #import <runtime/TypedArrayInlines.h>
75 #import <runtime/Uint16Array.h>
76 #import <runtime/Uint32Array.h>
77 #import <runtime/Uint8Array.h>
78 #import <wtf/BlockObjCExceptions.h>
79 #import <wtf/CurrentTime.h>
80 #import <wtf/ListHashSet.h>
81 #import <wtf/NeverDestroyed.h>
82 #import <wtf/OSObjectPtr.h>
83 #import <wtf/text/CString.h>
84
85 #if ENABLE(AVF_CAPTIONS)
86 #include "TextTrack.h"
87 #endif
88
89 #import <AVFoundation/AVAssetImageGenerator.h>
90 #import <AVFoundation/AVAssetTrack.h>
91 #import <AVFoundation/AVMediaSelectionGroup.h>
92 #import <AVFoundation/AVMetadataItem.h>
93 #import <AVFoundation/AVPlayer.h>
94 #import <AVFoundation/AVPlayerItem.h>
95 #import <AVFoundation/AVPlayerItemOutput.h>
96 #import <AVFoundation/AVPlayerItemTrack.h>
97 #import <AVFoundation/AVPlayerLayer.h>
98 #import <AVFoundation/AVTime.h>
99
100 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
101 #import "VideoFullscreenLayerManager.h"
102 #endif
103
104 #if PLATFORM(IOS)
105 #import "WAKAppKitStubs.h"
106 #import <CoreImage/CoreImage.h>
107 #import <UIKit/UIDevice.h>
108 #import <mach/mach_port.h>
109 #else
110 #import <Foundation/NSGeometry.h>
111 #import <QuartzCore/CoreImage.h>
112 #endif
113
114 #if USE(VIDEOTOOLBOX)
115 #import <CoreVideo/CoreVideo.h>
116 #import <VideoToolbox/VideoToolbox.h>
117 #endif
118
119 #import "CoreVideoSoftLink.h"
120 #import "MediaRemoteSoftLink.h"
121
122 namespace std {
123 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
124     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
125 };
126 }
127
128 #if ENABLE(AVF_CAPTIONS)
129 // Note: This must be defined before our SOFT_LINK macros:
130 @class AVMediaSelectionOption;
131 @interface AVMediaSelectionOption (OutOfBandExtensions)
132 @property (nonatomic, readonly) NSString* outOfBandSource;
133 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
134 @end
135 #endif
136
137 @interface AVURLAsset (WebKitExtensions)
138 @property (nonatomic, readonly) NSURL *resolvedURL;
139 @end
140
141 typedef AVPlayer AVPlayerType;
142 typedef AVPlayerItem AVPlayerItemType;
143 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
144 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
145 typedef AVMetadataItem AVMetadataItemType;
146 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
147 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
148 typedef AVAssetCache AVAssetCacheType;
149
150 #pragma mark - Soft Linking
151
152 // Soft-linking headers must be included last since they #define functions, constants, etc.
153 #import <pal/cf/CoreMediaSoftLink.h>
154
155 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
156
157 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
158
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
164 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
165 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
166 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
167
168 SOFT_LINK_CLASS(CoreImage, CIContext)
169 SOFT_LINK_CLASS(CoreImage, CIImage)
170
171 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
172 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
173 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
186
187 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
188 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
189 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
190
191 #define AVPlayer initAVPlayer()
192 #define AVPlayerItem initAVPlayerItem()
193 #define AVPlayerLayer initAVPlayerLayer()
194 #define AVURLAsset initAVURLAsset()
195 #define AVAssetImageGenerator initAVAssetImageGenerator()
196 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
197 #define AVMetadataItem initAVMetadataItem()
198 #define AVAssetCache initAVAssetCache()
199
200 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
201 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
202 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
203 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
204 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
205 #define AVMediaTypeVideo getAVMediaTypeVideo()
206 #define AVMediaTypeAudio getAVMediaTypeAudio()
207 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
208 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
209 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
210 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
211 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
212 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
213 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
214 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
215 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
216 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
217 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
218
219 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
220 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
221 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
222
223 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
224 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
225 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
226
227 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
228 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
229 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
230 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
231
232 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
233 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
234 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
235 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
236 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
237 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
238 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
239 #endif
240
241 #if ENABLE(AVF_CAPTIONS)
242 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
243 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
244 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
245 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
255
256 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
257 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
258 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
259 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
260 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
261 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
262 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
263 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
264 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
265 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
266 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
267 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
268 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
269 #endif
270
271 #if ENABLE(DATACUE_VALUE)
272 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
273 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
274 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
275 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
277
278 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
279 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
280 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
281 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
282 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
283 #endif
284
285 #if PLATFORM(IOS)
286 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
287 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
288 #endif
289
290 SOFT_LINK_FRAMEWORK(MediaToolbox)
291 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
292
293 #if PLATFORM(IOS)
294 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
295 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
296 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
297 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
298 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
299 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
300 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
301
302 SOFT_LINK_FRAMEWORK(UIKit)
303 SOFT_LINK_CLASS(UIKit, UIDevice)
304 #define UIDevice getUIDeviceClass()
305 #endif
306
307 using namespace WebCore;
308
309 enum MediaPlayerAVFoundationObservationContext {
310     MediaPlayerAVFoundationObservationContextPlayerItem,
311     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
312     MediaPlayerAVFoundationObservationContextPlayer,
313     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
314 };
315
316 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
317 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
318 #else
319 @interface WebCoreAVFMovieObserver : NSObject
320 #endif
321 {
322     MediaPlayerPrivateAVFoundationObjC* m_callback;
323     int m_delayCallbacks;
324 }
325 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
326 -(void)disconnect;
327 -(void)metadataLoaded;
328 -(void)didEnd:(NSNotification *)notification;
329 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
330 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
331 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
332 - (void)outputSequenceWasFlushed:(id)output;
333 #endif
334 @end
335
336 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
337 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
338     MediaPlayerPrivateAVFoundationObjC* m_callback;
339 }
340 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
341 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
342 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 @end
344 #endif
345
346 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
347 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
348     MediaPlayerPrivateAVFoundationObjC *m_callback;
349     dispatch_semaphore_t m_semaphore;
350 }
351 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
352 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
353 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
354 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
355 @end
356 #endif
357
358 namespace WebCore {
359 using namespace PAL;
360
361 static NSArray *assetMetadataKeyNames();
362 static NSArray *itemKVOProperties();
363 static NSArray *assetTrackMetadataKeyNames();
364 static NSArray *playerKVOProperties();
365 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
366
367 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
368 static dispatch_queue_t globalLoaderDelegateQueue()
369 {
370     static dispatch_queue_t globalQueue;
371     static dispatch_once_t onceToken;
372     dispatch_once(&onceToken, ^{
373         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
374     });
375     return globalQueue;
376 }
377 #endif
378
379 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
380 static dispatch_queue_t globalPullDelegateQueue()
381 {
382     static dispatch_queue_t globalQueue;
383     static dispatch_once_t onceToken;
384     dispatch_once(&onceToken, ^{
385         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
386     });
387     return globalQueue;
388 }
389 #endif
390
391 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
392 {
393     if (!isAvailable())
394         return;
395
396     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
397             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
398     AVFoundationMIMETypeCache::singleton().loadTypes();
399 }
400
401 static AVAssetCacheType *assetCacheForPath(const String& path)
402 {
403     NSURL *assetCacheURL;
404     
405     if (path.isEmpty())
406         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
407     else
408         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
409
410     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
411 }
412
413 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
414 {
415     HashSet<RefPtr<SecurityOrigin>> origins;
416     for (NSString *key in [assetCacheForPath(path) allKeys]) {
417         URL keyAsURL = URL(URL(), key);
418         if (keyAsURL.isValid())
419             origins.add(SecurityOrigin::create(keyAsURL));
420     }
421     return origins;
422 }
423
424 static std::chrono::system_clock::time_point toSystemClockTime(NSDate *date)
425 {
426     ASSERT(date);
427     using namespace std::chrono;
428
429     return system_clock::time_point(duration_cast<system_clock::duration>(duration<double>(date.timeIntervalSince1970)));
430 }
431
432 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, std::chrono::system_clock::time_point modifiedSince)
433 {
434     AVAssetCacheType* assetCache = assetCacheForPath(path);
435     
436     for (NSString *key in [assetCache allKeys]) {
437         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
438             [assetCache removeEntryForKey:key];
439     }
440
441     NSFileManager *fileManager = [NSFileManager defaultManager];
442     NSURL *baseURL = [assetCache URL];
443
444     if (modifiedSince <= std::chrono::system_clock::time_point { }) {
445         [fileManager removeItemAtURL:baseURL error:nil];
446         return;
447     }
448     
449     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
450     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
451         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
452         errorHandler:nil];
453     
454     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
455     for (NSURL *fileURL : enumerator) {
456         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
457     
458         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
459             continue;
460         
461         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
462             continue;
463         
464         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
465             continue;
466         
467         [urlsToDelete addObject:fileURL];
468     }
469     
470     for (NSURL *fileURL in urlsToDelete.get())
471         [fileManager removeItemAtURL:fileURL error:nil];
472 }
473
474 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
475 {
476     AVAssetCacheType* assetCache = assetCacheForPath(path);
477     for (NSString *key in [assetCache allKeys]) {
478         URL keyAsURL = URL(URL(), key);
479         if (keyAsURL.isValid()) {
480             if (origins.contains(SecurityOrigin::create(keyAsURL)))
481                 [assetCache removeEntryForKey:key];
482         }
483     }
484 }
485
486 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
487     : MediaPlayerPrivateAVFoundation(player)
488 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
489     , m_videoFullscreenLayerManager(VideoFullscreenLayerManager::create())
490     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
491 #endif
492     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
493     , m_videoFrameHasDrawn(false)
494     , m_haveCheckedPlayability(false)
495 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
496     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
497     , m_videoOutputSemaphore(nullptr)
498 #endif
499 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
500     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
501 #endif
502     , m_currentTextTrack(0)
503     , m_cachedRate(0)
504     , m_cachedTotalBytes(0)
505     , m_pendingStatusChanges(0)
506     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
507     , m_cachedLikelyToKeepUp(false)
508     , m_cachedBufferEmpty(false)
509     , m_cachedBufferFull(false)
510     , m_cachedHasEnabledAudio(false)
511     , m_shouldBufferData(true)
512     , m_cachedIsReadyForDisplay(false)
513     , m_haveBeenAskedToCreateLayer(false)
514 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
515     , m_allowsWirelessVideoPlayback(true)
516 #endif
517 {
518 }
519
520 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
521 {
522 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
523     [m_loaderDelegate.get() setCallback:0];
524     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
525
526     for (auto& pair : m_resourceLoaderMap)
527         pair.value->invalidate();
528 #endif
529 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
530     [m_videoOutputDelegate setCallback:0];
531     [m_videoOutput setDelegate:nil queue:0];
532     if (m_videoOutputSemaphore)
533         dispatch_release(m_videoOutputSemaphore);
534 #endif
535
536     if (m_videoLayer)
537         destroyVideoLayer();
538
539     cancelLoad();
540 }
541
542 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
543 {
544     INFO_LOG(LOGIDENTIFIER);
545     tearDownVideoRendering();
546
547     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
548     [m_objcObserver.get() disconnect];
549
550     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
551     setIgnoreLoadStateChanges(true);
552     if (m_avAsset) {
553         [m_avAsset.get() cancelLoading];
554         m_avAsset = nil;
555     }
556
557     clearTextTracks();
558
559 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
560     if (m_legibleOutput) {
561         if (m_avPlayerItem)
562             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
563         m_legibleOutput = nil;
564     }
565 #endif
566
567     if (m_avPlayerItem) {
568         for (NSString *keyName in itemKVOProperties())
569             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
570         
571         m_avPlayerItem = nil;
572     }
573     if (m_avPlayer) {
574         if (m_timeObserver)
575             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
576         m_timeObserver = nil;
577
578         for (NSString *keyName in playerKVOProperties())
579             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
580
581         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
582         m_avPlayer = nil;
583     }
584
585     // Reset cached properties
586     m_pendingStatusChanges = 0;
587     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
588     m_cachedSeekableRanges = nullptr;
589     m_cachedLoadedRanges = nullptr;
590     m_cachedHasEnabledAudio = false;
591     m_cachedPresentationSize = FloatSize();
592     m_cachedDuration = MediaTime::zeroTime();
593
594     for (AVPlayerItemTrack *track in m_cachedTracks.get())
595         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
596     m_cachedTracks = nullptr;
597
598 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
599     if (m_provider) {
600         m_provider->setPlayerItem(nullptr);
601         m_provider->setAudioTrack(nullptr);
602     }
603 #endif
604
605     setIgnoreLoadStateChanges(false);
606 }
607
608 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
609 {
610     return m_haveBeenAskedToCreateLayer;
611 }
612
613 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
614 {
615 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
616     if (m_videoOutput)
617         return true;
618 #endif
619     return m_imageGenerator;
620 }
621
622 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
623 {
624 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
625     createVideoOutput();
626 #else
627     createImageGenerator();
628 #endif
629 }
630
631 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
632 {
633     using namespace PAL;
634     INFO_LOG(LOGIDENTIFIER);
635
636     if (!m_avAsset || m_imageGenerator)
637         return;
638
639     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
640
641     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
642     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
643     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
644     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
645 }
646
647 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
648 {
649 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
650     destroyVideoOutput();
651     destroyOpenGLVideoOutput();
652 #endif
653     destroyImageGenerator();
654 }
655
656 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
657 {
658     if (!m_imageGenerator)
659         return;
660
661     INFO_LOG(LOGIDENTIFIER);
662
663     m_imageGenerator = 0;
664 }
665
666 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
667 {
668     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
669         return;
670
671     callOnMainThread([this, weakThis = createWeakPtr()] {
672         if (!weakThis)
673             return;
674
675         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
676             return;
677         m_haveBeenAskedToCreateLayer = true;
678
679         if (!m_videoLayer)
680             createAVPlayerLayer();
681
682 #if USE(VIDEOTOOLBOX)
683         if (!m_videoOutput)
684             createVideoOutput();
685 #endif
686
687         player()->client().mediaPlayerRenderingModeChanged(player());
688     });
689 }
690
691 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
692 {
693     if (!m_avPlayer)
694         return;
695
696     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
697     [m_videoLayer setPlayer:m_avPlayer.get()];
698
699 #ifndef NDEBUG
700     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
701 #endif
702     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
703     updateVideoLayerGravity();
704     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
705     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
706     INFO_LOG(LOGIDENTIFIER);
707
708 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
709     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
710
711 #if PLATFORM(IOS)
712     [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
713 #endif
714 #else
715     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
716 #endif
717 }
718
719 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
720 {
721     if (!m_videoLayer)
722         return;
723
724     INFO_LOG(LOGIDENTIFIER);
725
726     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
727     [m_videoLayer setPlayer:nil];
728
729 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
730     m_videoFullscreenLayerManager->didDestroyVideoLayer();
731 #endif
732
733     m_videoLayer = nil;
734 }
735
736 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
737 {
738     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
739     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
740
741     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
742     if (!date)
743         return MediaTime::invalidTime();
744
745     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
746
747     // Rounding due to second offset error when subtracting.
748     return MediaTime::createWithDouble(round(date - currentTime));
749 }
750
751 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
752 {
753     if (currentRenderingMode() == MediaRenderingToLayer)
754         return m_cachedIsReadyForDisplay;
755
756     return m_videoFrameHasDrawn;
757 }
758
759 #if ENABLE(AVF_CAPTIONS)
760 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
761 {
762     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
763     if (manualSelectionMode)
764         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
765
766     // FIXME: Match these to correct types:
767     if (kind == PlatformTextTrack::Caption)
768         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
769
770     if (kind == PlatformTextTrack::Subtitle)
771         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
772
773     if (kind == PlatformTextTrack::Description)
774         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
775
776     if (kind == PlatformTextTrack::Forced)
777         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
778
779     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
780 }
781     
782 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
783 {
784     trackModeChanged();
785 }
786     
787 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
788 {
789     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
790     
791     for (auto& textTrack : m_textTracks) {
792         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
793             continue;
794         
795         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
796         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
797         
798         for (auto& track : outOfBandTrackSources) {
799             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
800             
801             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
802                 continue;
803             
804             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
805             if (track->mode() == PlatformTextTrack::Hidden)
806                 mode = InbandTextTrackPrivate::Hidden;
807             else if (track->mode() == PlatformTextTrack::Disabled)
808                 mode = InbandTextTrackPrivate::Disabled;
809             else if (track->mode() == PlatformTextTrack::Showing)
810                 mode = InbandTextTrackPrivate::Showing;
811             
812             textTrack->setMode(mode);
813             break;
814         }
815     }
816 }
817 #endif
818
819
820 static NSURL *canonicalURL(const URL& url)
821 {
822     NSURL *cocoaURL = url;
823     if (url.isEmpty())
824         return cocoaURL;
825
826     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
827     if (!request)
828         return cocoaURL;
829
830     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
831     if (!canonicalRequest)
832         return cocoaURL;
833
834     return [canonicalRequest URL];
835 }
836
837 #if PLATFORM(IOS)
838 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
839 {
840     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
841     [properties setDictionary:@{
842         NSHTTPCookieName: cookie.name,
843         NSHTTPCookieValue: cookie.value,
844         NSHTTPCookieDomain: cookie.domain,
845         NSHTTPCookiePath: cookie.path,
846         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
847     }];
848     if (cookie.secure)
849         [properties setObject:@YES forKey:NSHTTPCookieSecure];
850     if (cookie.session)
851         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
852
853     return [NSHTTPCookie cookieWithProperties:properties.get()];
854 }
855 #endif
856
857 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
858 {
859     if (m_avAsset)
860         return;
861
862     INFO_LOG(LOGIDENTIFIER);
863
864     setDelayCallbacks(true);
865
866     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
867
868     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
869
870     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
871
872     String referrer = player()->referrer();
873     if (!referrer.isEmpty())
874         [headerFields.get() setObject:referrer forKey:@"Referer"];
875
876     String userAgent = player()->userAgent();
877     if (!userAgent.isEmpty())
878         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
879
880     if ([headerFields.get() count])
881         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
882
883     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
884         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
885
886 #if PLATFORM(IOS)
887     // FIXME: rdar://problem/20354688
888     String identifier = player()->sourceApplicationIdentifier();
889     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
890         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
891
892     if (AVURLAssetRequiresCustomURLLoadingKey)
893         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
894 #endif
895
896     auto type = player()->contentMIMEType();
897     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
898         auto codecs = player()->contentTypeCodecs();
899         if (!codecs.isEmpty()) {
900             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
901             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
902         } else
903             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
904     }
905
906 #if ENABLE(AVF_CAPTIONS)
907     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
908     if (!outOfBandTrackSources.isEmpty()) {
909         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
910         for (auto& trackSource : outOfBandTrackSources) {
911             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
912             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
913             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
914             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
915             [outOfBandTracks.get() addObject:@{
916                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
917                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
918                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
919                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
920                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
921                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
922             }];
923         }
924
925         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
926     }
927 #endif
928
929 #if PLATFORM(IOS)
930     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
931     if (!networkInterfaceName.isEmpty())
932         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
933 #endif
934
935 #if PLATFORM(IOS)
936     Vector<Cookie> cookies;
937     if (player()->getRawCookies(url, cookies)) {
938         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
939         for (auto& cookie : cookies)
940             [nsCookies addObject:toNSHTTPCookie(cookie)];
941
942         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
943     }
944 #endif
945
946     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
947     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
948     
949     if (usePersistentCache)
950         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
951
952     NSURL *cocoaURL = canonicalURL(url);
953     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
954
955 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
956     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
957     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
958
959 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
960     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
961         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
962         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
963         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
964         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
965         if (mediaResourceLoader)
966             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
967     }
968 #endif
969
970 #endif
971
972     m_haveCheckedPlayability = false;
973
974     setDelayCallbacks(false);
975 }
976
977 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
978 {
979     if (!m_avPlayer)
980         return;
981
982     if (pthread_main_np()) {
983         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
984         return;
985     }
986
987     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
988     RetainPtr<AVPlayerItemType> strongItem = item;
989     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
990         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
991     });
992 }
993
994 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
995 {
996     if (m_avPlayer)
997         return;
998
999     INFO_LOG(LOGIDENTIFIER);
1000
1001     setDelayCallbacks(true);
1002
1003     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1004     for (NSString *keyName in playerKVOProperties())
1005         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1006
1007 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1008     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1009 #endif
1010
1011 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1012     updateDisableExternalPlayback();
1013     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1014 #endif
1015
1016 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1017     if (m_shouldPlayToPlaybackTarget) {
1018         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1019         m_shouldPlayToPlaybackTarget = false;
1020         setShouldPlayToPlaybackTarget(true);
1021     }
1022 #endif
1023
1024 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
1025     setShouldDisableSleep(player()->shouldDisableSleep());
1026 #endif
1027
1028     if (m_muted) {
1029         // Clear m_muted so setMuted doesn't return without doing anything.
1030         m_muted = false;
1031         [m_avPlayer.get() setMuted:m_muted];
1032     }
1033
1034     if (player()->client().mediaPlayerIsVideo())
1035         createAVPlayerLayer();
1036
1037     if (m_avPlayerItem)
1038         setAVPlayerItem(m_avPlayerItem.get());
1039
1040     setDelayCallbacks(false);
1041 }
1042
1043 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1044 {
1045     if (m_avPlayerItem)
1046         return;
1047
1048     INFO_LOG(LOGIDENTIFIER);
1049
1050     setDelayCallbacks(true);
1051
1052     // Create the player item so we can load media data. 
1053     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1054
1055     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1056
1057     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1058     for (NSString *keyName in itemKVOProperties())
1059         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1060
1061     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1062
1063     if (m_avPlayer)
1064         setAVPlayerItem(m_avPlayerItem.get());
1065
1066 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1067     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1068
1069     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1070     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1071     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1072
1073     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1074     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1075     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1076     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1077 #endif
1078
1079 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1080     if (m_provider) {
1081         m_provider->setPlayerItem(m_avPlayerItem.get());
1082         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1083     }
1084 #endif
1085
1086     setDelayCallbacks(false);
1087 }
1088
1089 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1090 {
1091     if (m_haveCheckedPlayability)
1092         return;
1093     m_haveCheckedPlayability = true;
1094
1095     INFO_LOG(LOGIDENTIFIER);
1096     auto weakThis = createWeakPtr();
1097
1098     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1099         callOnMainThread([weakThis] {
1100             if (weakThis)
1101                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1102         });
1103     }];
1104 }
1105
1106 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1107 {
1108     INFO_LOG(LOGIDENTIFIER);
1109
1110     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1111     dispatch_group_enter(metadataLoadingGroup.get());
1112     auto weakThis = createWeakPtr();
1113     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1114
1115         callOnMainThread([weakThis, metadataLoadingGroup] {
1116             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1117                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1118                     dispatch_group_enter(metadataLoadingGroup.get());
1119                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1120                         dispatch_group_leave(metadataLoadingGroup.get());
1121                     }];
1122                 }
1123             }
1124             dispatch_group_leave(metadataLoadingGroup.get());
1125         });
1126     }];
1127
1128     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1129         callOnMainThread([weakThis] {
1130             if (weakThis)
1131                 [weakThis->m_objcObserver.get() metadataLoaded];
1132         });
1133     });
1134 }
1135
1136 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1137 {
1138     if (!m_avPlayerItem)
1139         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1140
1141     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1142         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1143     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1144         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1145     if (m_cachedLikelyToKeepUp)
1146         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1147     if (m_cachedBufferFull)
1148         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1149     if (m_cachedBufferEmpty)
1150         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1151
1152     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1153 }
1154
1155 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1156 {
1157     INFO_LOG(LOGIDENTIFIER);
1158     PlatformMedia pm;
1159     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1160     pm.media.avfMediaPlayer = m_avPlayer.get();
1161     return pm;
1162 }
1163
1164 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1165 {
1166 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1167     return m_haveBeenAskedToCreateLayer ? m_videoFullscreenLayerManager->videoInlineLayer() : nullptr;
1168 #else
1169     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1170 #endif
1171 }
1172
1173 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1174 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, WTF::Function<void()>&& completionHandler)
1175 {
1176     if (m_videoFullscreenLayerManager->videoFullscreenLayer() == videoFullscreenLayer) {
1177         completionHandler();
1178         return;
1179     }
1180
1181     [CATransaction begin];
1182     [CATransaction setDisableActions:YES];
1183
1184     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler));
1185
1186     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
1187         syncTextTrackBounds();
1188         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
1189     }
1190
1191     [CATransaction commit];
1192
1193     updateDisableExternalPlayback();
1194 }
1195
1196 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1197 {
1198     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1199     syncTextTrackBounds();
1200 }
1201
1202 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1203 {
1204     m_videoFullscreenGravity = gravity;
1205
1206     if (!m_videoLayer)
1207         return;
1208
1209     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1210     if (gravity == MediaPlayer::VideoGravityResize)
1211         videoGravity = AVLayerVideoGravityResize;
1212     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1213         videoGravity = AVLayerVideoGravityResizeAspect;
1214     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1215         videoGravity = AVLayerVideoGravityResizeAspectFill;
1216     else
1217         ASSERT_NOT_REACHED();
1218     
1219     if ([m_videoLayer videoGravity] == videoGravity)
1220         return;
1221
1222     [m_videoLayer setVideoGravity:videoGravity];
1223     syncTextTrackBounds();
1224 }
1225
1226 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1227 {
1228 #if PLATFORM(IOS)
1229     [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1230     updateDisableExternalPlayback();
1231 #else
1232     UNUSED_PARAM(mode);
1233 #endif
1234 }
1235
1236 #endif // PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1237
1238 #if PLATFORM(IOS)
1239 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1240 {
1241     if (m_currentMetaData)
1242         return m_currentMetaData.get();
1243     return nil;
1244 }
1245
1246 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1247 {
1248     if (!m_avPlayerItem)
1249         return emptyString();
1250     
1251     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1252     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1253
1254     return logString.get();
1255 }
1256
1257 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1258 {
1259     if (!m_avPlayerItem)
1260         return emptyString();
1261
1262     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1263     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1264
1265     return logString.get();
1266 }
1267 #endif
1268
1269 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1270 {
1271     [CATransaction begin];
1272     [CATransaction setDisableActions:YES];    
1273     if (m_videoLayer)
1274         [m_videoLayer.get() setHidden:!isVisible];
1275     [CATransaction commit];
1276 }
1277     
1278 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1279 {
1280     INFO_LOG(LOGIDENTIFIER);
1281     if (!metaDataAvailable())
1282         return;
1283
1284     setDelayCallbacks(true);
1285     m_cachedRate = requestedRate();
1286     [m_avPlayer.get() setRate:requestedRate()];
1287     setDelayCallbacks(false);
1288 }
1289
1290 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1291 {
1292     INFO_LOG(LOGIDENTIFIER);
1293     if (!metaDataAvailable())
1294         return;
1295
1296     setDelayCallbacks(true);
1297     m_cachedRate = 0;
1298     [m_avPlayer.get() setRate:0];
1299     setDelayCallbacks(false);
1300 }
1301
1302 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1303 {
1304     // Do not ask the asset for duration before it has been loaded or it will fetch the
1305     // answer synchronously.
1306     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1307         return MediaTime::invalidTime();
1308     
1309     CMTime cmDuration;
1310     
1311     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1312     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1313         cmDuration = [m_avPlayerItem.get() duration];
1314     else
1315         cmDuration = [m_avAsset.get() duration];
1316
1317     if (CMTIME_IS_NUMERIC(cmDuration))
1318         return PAL::toMediaTime(cmDuration);
1319
1320     if (CMTIME_IS_INDEFINITE(cmDuration))
1321         return MediaTime::positiveInfiniteTime();
1322
1323     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1324     return MediaTime::invalidTime();
1325 }
1326
1327 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1328 {
1329     if (!metaDataAvailable() || !m_avPlayerItem)
1330         return MediaTime::zeroTime();
1331
1332     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1333     if (CMTIME_IS_NUMERIC(itemTime))
1334         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1335
1336     return MediaTime::zeroTime();
1337 }
1338
1339 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1340 {
1341     // setCurrentTime generates several event callbacks, update afterwards.
1342     setDelayCallbacks(true);
1343
1344     if (m_metadataTrack)
1345         m_metadataTrack->flushPartialCues();
1346
1347     CMTime cmTime = PAL::toCMTime(time);
1348     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1349     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1350
1351     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1352     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1353         cmBefore = kCMTimeZero;
1354     
1355     auto weakThis = createWeakPtr();
1356
1357     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1358         callOnMainThread([weakThis, finished] {
1359             auto _this = weakThis.get();
1360             if (!_this)
1361                 return;
1362
1363             _this->seekCompleted(finished);
1364         });
1365     }];
1366
1367     setDelayCallbacks(false);
1368 }
1369
1370 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1371 {
1372 #if PLATFORM(IOS)
1373     UNUSED_PARAM(volume);
1374     return;
1375 #else
1376     if (!m_avPlayer)
1377         return;
1378
1379     [m_avPlayer.get() setVolume:volume];
1380 #endif
1381 }
1382
1383 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1384 {
1385     if (m_muted == muted)
1386         return;
1387
1388     INFO_LOG(LOGIDENTIFIER, "- ", muted);
1389
1390     m_muted = muted;
1391
1392     if (!m_avPlayer)
1393         return;
1394
1395     [m_avPlayer.get() setMuted:m_muted];
1396 }
1397
1398 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1399 {
1400     UNUSED_PARAM(closedCaptionsVisible);
1401
1402     if (!metaDataAvailable())
1403         return;
1404
1405     INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1406 }
1407
1408 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1409 {
1410     setDelayCallbacks(true);
1411     m_cachedRate = rate;
1412     [m_avPlayer.get() setRate:rate];
1413     setDelayCallbacks(false);
1414 }
1415
1416 double MediaPlayerPrivateAVFoundationObjC::rate() const
1417 {
1418     if (!metaDataAvailable())
1419         return 0;
1420
1421     return m_cachedRate;
1422 }
1423
1424 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1425 {
1426 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1427     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1428 #else
1429     return 0;
1430 #endif
1431 }
1432
1433 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1434 {
1435 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1436     return [m_avPlayerItem liveUpdateInterval];
1437 #else
1438     return 0;
1439 #endif
1440 }
1441
1442 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1443 {
1444     if (m_avPlayerItem)
1445         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1446 }
1447
1448 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1449 {
1450     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1451
1452     if (!m_avPlayerItem)
1453         return timeRanges;
1454
1455     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1456         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1457         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1458             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1459     }
1460     return timeRanges;
1461 }
1462
1463 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1464 {
1465     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1466         return MediaTime::zeroTime();
1467
1468     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1469     bool hasValidRange = false;
1470     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1471         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1472         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1473             continue;
1474
1475         hasValidRange = true;
1476         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1477         if (minTimeSeekable > startOfRange)
1478             minTimeSeekable = startOfRange;
1479     }
1480     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1481 }
1482
1483 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1484 {
1485     if (!m_cachedSeekableRanges)
1486         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1487
1488     MediaTime maxTimeSeekable;
1489     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1490         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1491         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1492             continue;
1493         
1494         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1495         if (maxTimeSeekable < endOfRange)
1496             maxTimeSeekable = endOfRange;
1497     }
1498     return maxTimeSeekable;
1499 }
1500
1501 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1502 {
1503     if (!m_cachedLoadedRanges)
1504         return MediaTime::zeroTime();
1505
1506     MediaTime maxTimeLoaded;
1507     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1508         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1509         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1510             continue;
1511         
1512         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1513         if (maxTimeLoaded < endOfRange)
1514             maxTimeLoaded = endOfRange;
1515     }
1516
1517     return maxTimeLoaded;   
1518 }
1519
1520 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1521 {
1522     if (!metaDataAvailable())
1523         return 0;
1524
1525     if (m_cachedTotalBytes)
1526         return m_cachedTotalBytes;
1527
1528     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1529         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1530
1531     return m_cachedTotalBytes;
1532 }
1533
1534 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1535 {
1536     m_avAsset = asset;
1537 }
1538
1539 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1540 {
1541     if (!m_avAsset)
1542         return MediaPlayerAVAssetStatusDoesNotExist;
1543
1544     for (NSString *keyName in assetMetadataKeyNames()) {
1545         NSError *error = nil;
1546         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1547
1548         if (error)
1549             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1550
1551         if (keyStatus < AVKeyValueStatusLoaded)
1552             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1553         
1554         if (keyStatus == AVKeyValueStatusFailed)
1555             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1556
1557         if (keyStatus == AVKeyValueStatusCancelled)
1558             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1559     }
1560
1561     if (!player()->shouldCheckHardwareSupport())
1562         m_tracksArePlayable = true;
1563
1564     if (!m_tracksArePlayable) {
1565         m_tracksArePlayable = true;
1566         for (AVAssetTrack *track in [m_avAsset tracks]) {
1567             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1568                 m_tracksArePlayable = false;
1569                 break;
1570             }
1571         }
1572     }
1573
1574     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1575         return MediaPlayerAVAssetStatusPlayable;
1576
1577     return MediaPlayerAVAssetStatusLoaded;
1578 }
1579
1580 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1581 {
1582     if (!m_avAsset)
1583         return 0;
1584
1585     NSError *error = nil;
1586     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1587     return [error code];
1588 }
1589
1590 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1591 {
1592     if (!metaDataAvailable() || context.paintingDisabled())
1593         return;
1594
1595     setDelayCallbacks(true);
1596     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1597
1598 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1599     if (videoOutputHasAvailableFrame())
1600         paintWithVideoOutput(context, rect);
1601     else
1602 #endif
1603         paintWithImageGenerator(context, rect);
1604
1605     END_BLOCK_OBJC_EXCEPTIONS;
1606     setDelayCallbacks(false);
1607
1608     m_videoFrameHasDrawn = true;
1609 }
1610
1611 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1612 {
1613     if (!metaDataAvailable() || context.paintingDisabled())
1614         return;
1615
1616     // We can ignore the request if we are already rendering to a layer.
1617     if (currentRenderingMode() == MediaRenderingToLayer)
1618         return;
1619
1620     // paint() is best effort, so only paint if we already have an image generator or video output available.
1621     if (!hasContextRenderer())
1622         return;
1623
1624     paintCurrentFrameInContext(context, rect);
1625 }
1626
1627 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1628 {
1629     INFO_LOG(LOGIDENTIFIER);
1630
1631     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1632     if (image) {
1633         GraphicsContextStateSaver stateSaver(context);
1634         context.translate(rect.x(), rect.y() + rect.height());
1635         context.scale(FloatSize(1.0f, -1.0f));
1636         context.setImageInterpolationQuality(InterpolationLow);
1637         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1638         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1639     }
1640 }
1641
1642 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1643 {
1644     if (!m_imageGenerator)
1645         createImageGenerator();
1646     ASSERT(m_imageGenerator);
1647
1648 #if !RELEASE_LOG_DISABLED
1649     double start = monotonicallyIncreasingTime();
1650 #endif
1651
1652     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1653     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1654     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1655
1656 #if !RELEASE_LOG_DISABLED
1657     DEBUG_LOG(LOGIDENTIFIER, "creating image took ", monotonicallyIncreasingTime() - start);
1658 #endif
1659
1660     return image;
1661 }
1662
1663 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1664 {
1665     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1666
1667
1668 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1669 static bool keySystemIsSupported(const String& keySystem)
1670 {
1671     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1672         return true;
1673     return false;
1674 }
1675 #endif
1676
1677 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1678 {
1679 #if ENABLE(MEDIA_SOURCE)
1680     if (parameters.isMediaSource)
1681         return MediaPlayer::IsNotSupported;
1682 #endif
1683 #if ENABLE(MEDIA_STREAM)
1684     if (parameters.isMediaStream)
1685         return MediaPlayer::IsNotSupported;
1686 #endif
1687
1688     auto containerType = parameters.type.containerType();
1689     if (isUnsupportedMIMEType(containerType))
1690         return MediaPlayer::IsNotSupported;
1691
1692     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().types().contains(containerType))
1693         return MediaPlayer::IsNotSupported;
1694
1695     // The spec says:
1696     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1697     if (parameters.type.codecs().isEmpty())
1698         return MediaPlayer::MayBeSupported;
1699
1700     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1701         return MediaPlayer::IsNotSupported;
1702
1703     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1704     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1705 }
1706
1707 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1708 {
1709 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1710     if (!keySystem.isEmpty()) {
1711         // "Clear Key" is only supported with HLS:
1712         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1713             return MediaPlayer::IsNotSupported;
1714
1715         if (!keySystemIsSupported(keySystem))
1716             return false;
1717
1718         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1719             return false;
1720
1721         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1722             return false;
1723
1724         return true;
1725     }
1726 #else
1727     UNUSED_PARAM(keySystem);
1728     UNUSED_PARAM(mimeType);
1729 #endif
1730     return false;
1731 }
1732
1733 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1734 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1735 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1736 {
1737     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1738         [infoRequest setContentLength:keyData->byteLength()];
1739         [infoRequest setByteRangeAccessSupported:YES];
1740     }
1741
1742     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1743         long long start = [dataRequest currentOffset];
1744         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1745
1746         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1747             [request finishLoadingWithError:nil];
1748             return;
1749         }
1750
1751         ASSERT(start <= std::numeric_limits<int>::max());
1752         ASSERT(end <= std::numeric_limits<int>::max());
1753         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1754         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1755         [dataRequest respondWithData:nsData.get()];
1756     }
1757
1758     [request finishLoading];
1759 }
1760 #endif
1761
1762 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1763 {
1764     String scheme = [[[avRequest request] URL] scheme];
1765     String keyURI = [[[avRequest request] URL] absoluteString];
1766
1767 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1768     if (scheme == "skd") {
1769         // Create an initData with the following layout:
1770         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1771         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1772         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1773         unsigned byteLength = initDataBuffer->byteLength();
1774         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1775         initDataView->set<uint32_t>(0, keyURISize, true);
1776
1777         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1778         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1779
1780         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1781         if (!player()->keyNeeded(initData.get()))
1782             return false;
1783
1784         m_keyURIToRequestMap.set(keyURI, avRequest);
1785         return true;
1786     }
1787
1788     if (scheme == "clearkey") {
1789         String keyID = [[[avRequest request] URL] resourceSpecifier];
1790         StringView keyIDView(keyID);
1791         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1792
1793         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1794         initData->setRange(reinterpret_cast<const JSC::Uint8Adaptor::Type*>(utf8EncodedKeyId.data()), utf8EncodedKeyId.length(), 0);
1795
1796         auto keyData = player()->cachedKeyForKeyId(keyID);
1797         if (keyData) {
1798             fulfillRequestWithKeyData(avRequest, keyData.get());
1799             return false;
1800         }
1801
1802         if (!player()->keyNeeded(initData.get()))
1803             return false;
1804
1805         m_keyURIToRequestMap.set(keyID, avRequest);
1806         return true;
1807     }
1808 #endif
1809
1810     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1811     m_resourceLoaderMap.add(avRequest, resourceLoader);
1812     resourceLoader->startLoading();
1813     return true;
1814 }
1815
1816 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* challenge)
1817 {
1818     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1819 }
1820
1821 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1822 {
1823     String scheme = [[[avRequest request] URL] scheme];
1824
1825     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1826
1827     if (resourceLoader)
1828         resourceLoader->stopLoading();
1829 }
1830
1831 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1832 {
1833     m_resourceLoaderMap.remove(avRequest);
1834 }
1835 #endif
1836
1837 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1838 {
1839     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1840 }
1841
1842 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1843 {
1844     if (!metaDataAvailable())
1845         return timeValue;
1846
1847     // FIXME - impossible to implement until rdar://8721510 is fixed.
1848     return timeValue;
1849 }
1850
1851 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1852 {
1853 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1854     return 0;
1855 #else
1856     return 5;
1857 #endif
1858 }
1859
1860 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1861 {
1862     if (!m_videoLayer)
1863         return;
1864
1865 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
1866     // Do not attempt to change the video gravity while in full screen mode.
1867     // See setVideoFullscreenGravity().
1868     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1869         return;
1870 #endif
1871
1872     [CATransaction begin];
1873     [CATransaction setDisableActions:YES];    
1874     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1875     [m_videoLayer.get() setVideoGravity:gravity];
1876     [CATransaction commit];
1877 }
1878
1879 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1880 {
1881     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1882         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1883     }];
1884     if (index == NSNotFound)
1885         return nil;
1886     return [tracks objectAtIndex:index];
1887 }
1888
1889 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1890 {
1891     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1892     m_languageOfPrimaryAudioTrack = String();
1893
1894     if (!m_avAsset)
1895         return;
1896
1897     setDelayCharacteristicsChangedNotification(true);
1898
1899     bool haveCCTrack = false;
1900     bool hasCaptions = false;
1901
1902     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1903     // asked about those fairly fequently.
1904     if (!m_avPlayerItem) {
1905         // We don't have a player item yet, so check with the asset because some assets support inspection
1906         // prior to becoming ready to play.
1907         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1908         setHasVideo(firstEnabledVideoTrack);
1909         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1910 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1911         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1912 #endif
1913         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1914         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1915         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1916         if (size.width() < 0)
1917             size.setWidth(-size.width());
1918         if (size.height() < 0)
1919             size.setHeight(-size.height());
1920         presentationSizeDidChange(size);
1921     } else {
1922         bool hasVideo = false;
1923         bool hasAudio = false;
1924         bool hasMetaData = false;
1925         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1926             if ([track isEnabled]) {
1927                 AVAssetTrack *assetTrack = [track assetTrack];
1928                 NSString *mediaType = [assetTrack mediaType];
1929                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1930                     hasVideo = true;
1931                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1932                     hasAudio = true;
1933                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1934 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1935                     hasCaptions = true;
1936 #endif
1937                     haveCCTrack = true;
1938                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1939                     hasMetaData = true;
1940                 }
1941             }
1942         }
1943
1944 #if ENABLE(VIDEO_TRACK)
1945         updateAudioTracks();
1946         updateVideoTracks();
1947
1948 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1949         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1950         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1951 #endif
1952 #endif
1953
1954         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1955         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1956         // when it is not.
1957         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1958
1959         setHasAudio(hasAudio);
1960 #if ENABLE(DATACUE_VALUE)
1961         if (hasMetaData)
1962             processMetadataTrack();
1963 #endif
1964     }
1965
1966 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1967     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1968     if (legibleGroup && m_cachedTracks) {
1969         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1970         if (hasCaptions)
1971             processMediaSelectionOptions();
1972     }
1973 #endif
1974
1975 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1976     if (!hasCaptions && haveCCTrack)
1977         processLegacyClosedCaptionsTracks();
1978 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1979     if (haveCCTrack)
1980         processLegacyClosedCaptionsTracks();
1981 #endif
1982
1983     setHasClosedCaptions(hasCaptions);
1984
1985     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1986
1987     sizeChanged();
1988
1989     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1990         characteristicsChanged();
1991
1992 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1993     if (m_provider)
1994         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1995 #endif
1996
1997     setDelayCharacteristicsChangedNotification(false);
1998 }
1999
2000 #if ENABLE(VIDEO_TRACK)
2001
2002 template <typename RefT, typename PassRefT>
2003 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2004 {
2005     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2006         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2007     }]]]);
2008     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2009
2010     for (auto& oldItem : oldItems) {
2011         if (oldItem->playerItemTrack())
2012             [oldTracks addObject:oldItem->playerItemTrack()];
2013     }
2014
2015     // Find the added & removed AVPlayerItemTracks:
2016     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2017     [removedTracks minusSet:newTracks.get()];
2018
2019     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2020     [addedTracks minusSet:oldTracks.get()];
2021
2022     typedef Vector<RefT> ItemVector;
2023     ItemVector replacementItems;
2024     ItemVector addedItems;
2025     ItemVector removedItems;
2026     for (auto& oldItem : oldItems) {
2027         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2028             removedItems.append(oldItem);
2029         else
2030             replacementItems.append(oldItem);
2031     }
2032
2033     for (AVPlayerItemTrack* track in addedTracks.get())
2034         addedItems.append(itemFactory(track));
2035
2036     replacementItems.appendVector(addedItems);
2037     oldItems.swap(replacementItems);
2038
2039     for (auto& removedItem : removedItems)
2040         (player->*removedFunction)(*removedItem);
2041
2042     for (auto& addedItem : addedItems)
2043         (player->*addedFunction)(*addedItem);
2044 }
2045
2046 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2047
2048 template <typename RefT, typename PassRefT>
2049 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2050 {
2051     group->updateOptions(characteristics);
2052
2053     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2054     for (auto& option : group->options()) {
2055         if (!option)
2056             continue;
2057         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2058         if (!avOption)
2059             continue;
2060         newSelectionOptions.add(option);
2061     }
2062
2063     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2064     for (auto& oldItem : oldItems) {
2065         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2066             oldSelectionOptions.add(option);
2067     }
2068
2069     // Find the added & removed AVMediaSelectionOptions:
2070     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2071     for (auto& oldOption : oldSelectionOptions) {
2072         if (!newSelectionOptions.contains(oldOption))
2073             removedSelectionOptions.add(oldOption);
2074     }
2075
2076     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2077     for (auto& newOption : newSelectionOptions) {
2078         if (!oldSelectionOptions.contains(newOption))
2079             addedSelectionOptions.add(newOption);
2080     }
2081
2082     typedef Vector<RefT> ItemVector;
2083     ItemVector replacementItems;
2084     ItemVector addedItems;
2085     ItemVector removedItems;
2086     for (auto& oldItem : oldItems) {
2087         if (!oldItem->mediaSelectionOption())
2088             removedItems.append(oldItem);
2089         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2090             removedItems.append(oldItem);
2091         else
2092             replacementItems.append(oldItem);
2093     }
2094
2095     for (auto& option : addedSelectionOptions)
2096         addedItems.append(itemFactory(*option.get()));
2097
2098     replacementItems.appendVector(addedItems);
2099     oldItems.swap(replacementItems);
2100     
2101     for (auto& removedItem : removedItems)
2102         (player->*removedFunction)(*removedItem);
2103
2104     for (auto& addedItem : addedItems)
2105         (player->*addedFunction)(*addedItem);
2106 }
2107
2108 #endif
2109
2110 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2111 {
2112 #if !RELEASE_LOG_DISABLED
2113     size_t count = m_audioTracks.size();
2114 #endif
2115
2116 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2117     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2118     if (!m_audibleGroup) {
2119         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2120             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2121     }
2122
2123     if (m_audibleGroup)
2124         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2125     else
2126 #endif
2127         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2128
2129     for (auto& track : m_audioTracks)
2130         track->resetPropertiesFromTrack();
2131
2132 #if !RELEASE_LOG_DISABLED
2133     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2134 #endif
2135 }
2136
2137 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2138 {
2139 #if !RELEASE_LOG_DISABLED
2140     size_t count = m_videoTracks.size();
2141 #endif
2142
2143     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2144
2145 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2146     if (!m_visualGroup) {
2147         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2148             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2149     }
2150
2151     if (m_visualGroup)
2152         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2153 #endif
2154
2155     for (auto& track : m_audioTracks)
2156         track->resetPropertiesFromTrack();
2157
2158 #if !RELEASE_LOG_DISABLED
2159     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2160 #endif
2161 }
2162
2163 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2164 {
2165 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2166     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
2167         return true;
2168 #endif
2169     return false;
2170 }
2171
2172 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2173 {
2174 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2175     if (!m_videoFullscreenLayerManager->videoFullscreenLayer() || !m_textTrackRepresentationLayer)
2176         return;
2177
2178     [CATransaction begin];
2179     [CATransaction setDisableActions:YES];
2180
2181     FloatRect videoFullscreenFrame = m_videoFullscreenLayerManager->videoFullscreenFrame();
2182     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, videoFullscreenFrame.width(), videoFullscreenFrame.height());
2183     [m_textTrackRepresentationLayer setFrame:textFrame];
2184
2185     [CATransaction commit];
2186 #endif
2187 }
2188
2189 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2190 {
2191 #if PLATFORM(IOS) || (PLATFORM(MAC) && ENABLE(VIDEO_PRESENTATION_MODE))
2192     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2193     if (representationLayer == m_textTrackRepresentationLayer) {
2194         syncTextTrackBounds();
2195         return;
2196     }
2197
2198     [CATransaction begin];
2199     [CATransaction setDisableActions:YES];
2200
2201     if (m_textTrackRepresentationLayer)
2202         [m_textTrackRepresentationLayer removeFromSuperlayer];
2203
2204     m_textTrackRepresentationLayer = representationLayer;
2205
2206     if (m_videoFullscreenLayerManager->videoFullscreenLayer() && m_textTrackRepresentationLayer) {
2207         syncTextTrackBounds();
2208         [m_videoFullscreenLayerManager->videoFullscreenLayer() addSublayer:m_textTrackRepresentationLayer.get()];
2209     }
2210
2211     [CATransaction commit];
2212
2213 #else
2214     UNUSED_PARAM(representation);
2215 #endif
2216 }
2217
2218 #endif // ENABLE(VIDEO_TRACK)
2219
2220 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2221
2222 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2223 {
2224     if (!m_provider) {
2225         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2226         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2227     }
2228     return m_provider.get();
2229 }
2230
2231 #endif
2232
2233 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2234 {
2235     if (!m_avAsset)
2236         return;
2237
2238     setNaturalSize(m_cachedPresentationSize);
2239 }
2240
2241 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2242 {
2243     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2244 }
2245
2246 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2247 {
2248 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2249     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2250     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2251         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2252         return false;
2253
2254     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2255     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2256         return session.didPassCORSAccessChecks;
2257 #endif
2258     return false;
2259 }
2260
2261 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2262
2263 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2264 {
2265     INFO_LOG(LOGIDENTIFIER);
2266
2267     if (!m_avPlayerItem || m_videoOutput)
2268         return;
2269
2270 #if USE(VIDEOTOOLBOX)
2271     NSDictionary* attributes = nil;
2272 #else
2273     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2274 #endif
2275     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2276     ASSERT(m_videoOutput);
2277
2278     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2279
2280     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2281 }
2282
2283 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2284 {
2285     if (!m_videoOutput)
2286         return;
2287
2288     if (m_avPlayerItem)
2289         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2290
2291     INFO_LOG(LOGIDENTIFIER);
2292
2293     m_videoOutput = 0;
2294 }
2295
2296 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2297 {
2298     if (!m_videoOutput)
2299         createVideoOutput();
2300     ASSERT(m_videoOutput);
2301
2302     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2303
2304     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2305         return 0;
2306
2307     return adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2308 }
2309
2310 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2311 {
2312     if (!m_avPlayerItem)
2313         return false;
2314
2315     if (m_lastImage)
2316         return true;
2317
2318     if (!m_videoOutput)
2319         createVideoOutput();
2320
2321     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2322 }
2323
2324 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2325 {
2326     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2327
2328     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2329     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2330     // should be displayed.
2331     if (!pixelBuffer)
2332         return;
2333
2334     if (!m_pixelBufferConformer) {
2335 #if USE(VIDEOTOOLBOX)
2336         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2337 #else
2338         NSDictionary *attributes = nil;
2339 #endif
2340         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2341     }
2342
2343 #if !RELEASE_LOG_DISABLED
2344     double start = monotonicallyIncreasingTime();
2345 #endif
2346
2347     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(pixelBuffer.get());
2348
2349 #if !RELEASE_LOG_DISABLED
2350     DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", monotonicallyIncreasingTime() - start);
2351 #endif
2352 }
2353
2354 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2355 {
2356     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2357         waitForVideoOutputMediaDataWillChange();
2358
2359     updateLastImage();
2360
2361     if (!m_lastImage)
2362         return;
2363
2364     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2365     if (!firstEnabledVideoTrack)
2366         return;
2367
2368     INFO_LOG(LOGIDENTIFIER);
2369
2370     GraphicsContextStateSaver stateSaver(context);
2371     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2372     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2373     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2374
2375     context.concatCTM(videoTransform);
2376     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2377
2378     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2379     // video frame, destroy it now that it is no longer needed.
2380     if (m_imageGenerator)
2381         destroyImageGenerator();
2382
2383 }
2384
2385 void MediaPlayerPrivateAVFoundationObjC::createOpenGLVideoOutput()
2386 {
2387     INFO_LOG(LOGIDENTIFIER);
2388
2389     if (!m_avPlayerItem || m_openGLVideoOutput)
2390         return;
2391
2392 #if PLATFORM(IOS)
2393     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLESFBOCompatibilityKey: @YES};
2394 #else
2395     NSDictionary* attributes = @{(NSString *)kCVPixelBufferIOSurfaceOpenGLFBOCompatibilityKey: @YES};
2396 #endif
2397     m_openGLVideoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2398     ASSERT(m_openGLVideoOutput);
2399
2400     [m_avPlayerItem.get() addOutput:m_openGLVideoOutput.get()];
2401 }
2402
2403 void MediaPlayerPrivateAVFoundationObjC::destroyOpenGLVideoOutput()
2404 {
2405     if (!m_openGLVideoOutput)
2406         return;
2407
2408     INFO_LOG(LOGIDENTIFIER);
2409
2410     if (m_avPlayerItem)
2411         [m_avPlayerItem.get() removeOutput:m_openGLVideoOutput.get()];
2412
2413     m_openGLVideoOutput = 0;
2414 }
2415
2416 void MediaPlayerPrivateAVFoundationObjC::updateLastOpenGLImage()
2417 {
2418     if (!m_openGLVideoOutput)
2419         return;
2420
2421     CMTime currentTime = [m_openGLVideoOutput itemTimeForHostTime:CACurrentMediaTime()];
2422     if (![m_openGLVideoOutput hasNewPixelBufferForItemTime:currentTime])
2423         return;
2424
2425     m_lastOpenGLImage = adoptCF([m_openGLVideoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2426 }
2427
2428 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2429 {
2430     ASSERT(context);
2431
2432     if (!m_openGLVideoOutput)
2433         createOpenGLVideoOutput();
2434
2435     updateLastOpenGLImage();
2436
2437     if (!m_lastOpenGLImage)
2438         return false;
2439
2440     size_t width = CVPixelBufferGetWidth(m_lastOpenGLImage.get());
2441     size_t height = CVPixelBufferGetHeight(m_lastOpenGLImage.get());
2442
2443     if (!m_textureCache) {
2444         m_textureCache = TextureCacheCV::create(*context);
2445         if (!m_textureCache)
2446             return false;
2447     }
2448
2449     RetainPtr<CVOpenGLTextureRef> videoTexture = m_textureCache->textureFromImage(m_lastOpenGLImage.get(), outputTarget, level, internalFormat, format, type);
2450
2451     if (!m_videoTextureCopier)
2452         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2453
2454     return m_videoTextureCopier->copyVideoTextureToPlatformTexture(videoTexture.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2455 }
2456
2457 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2458 {
2459     updateLastImage();
2460     return m_lastImage;
2461 }
2462
2463 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2464 {
2465     if (!m_videoOutputSemaphore)
2466         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2467
2468     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2469
2470     // Wait for 1 second.
2471     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2472
2473     if (result)
2474         ERROR_LOG(LOGIDENTIFIER, "timed out");
2475 }
2476
2477 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2478 {
2479     dispatch_semaphore_signal(m_videoOutputSemaphore);
2480 }
2481
2482 #endif
2483
2484 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2485
2486 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2487 {
2488     return m_keyURIToRequestMap.take(keyURI);
2489 }
2490
2491 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2492 {
2493     Vector<String> fulfilledKeyIds;
2494
2495     for (auto& pair : m_keyURIToRequestMap) {
2496         const String& keyId = pair.key;
2497         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2498
2499         auto keyData = player()->cachedKeyForKeyId(keyId);
2500         if (!keyData)
2501             continue;
2502
2503         fulfillRequestWithKeyData(request.get(), keyData.get());
2504         fulfilledKeyIds.append(keyId);
2505     }
2506
2507     for (auto& keyId : fulfilledKeyIds)
2508         m_keyURIToRequestMap.remove(keyId);
2509 }
2510
2511 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2512 {
2513     ASSERT_UNUSED(session, &session == m_session);
2514     m_session = nullptr;
2515 }
2516
2517 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2518 {
2519     if (!keySystemIsSupported(keySystem))
2520         return nullptr;
2521     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2522     m_session = session->createWeakPtr();
2523     return WTFMove(session);
2524 }
2525
2526 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2527 {
2528     if (m_session && newValue)
2529         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2530 }
2531
2532 #endif
2533
2534 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2535
2536 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2537 {
2538 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2539     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2540 #endif
2541
2542     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2543     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2544
2545         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2546         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2547             continue;
2548
2549         bool newCCTrack = true;
2550         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2551             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2552                 continue;
2553
2554             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2555             if (track->avPlayerItemTrack() == playerItemTrack) {
2556                 removedTextTracks.remove(i - 1);
2557                 newCCTrack = false;
2558                 break;
2559             }
2560         }
2561
2562         if (!newCCTrack)
2563             continue;
2564         
2565         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2566     }
2567
2568     processNewAndRemovedTextTracks(removedTextTracks);
2569 }
2570
2571 #endif
2572
2573 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2574 {
2575     if (!m_avAsset)
2576         return nil;
2577
2578     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2579         return nil;
2580
2581     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2582 }
2583
2584 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2585
2586 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2587 {
2588     if (!m_avAsset)
2589         return false;
2590
2591     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2592         return false;
2593
2594     return true;
2595 }
2596
2597 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2598 {
2599     if (!hasLoadedMediaSelectionGroups())
2600         return nil;
2601
2602     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2603 }
2604
2605 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2606 {
2607     if (!hasLoadedMediaSelectionGroups())
2608         return nil;
2609
2610     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2611 }
2612
2613 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2614 {
2615     if (!hasLoadedMediaSelectionGroups())
2616         return nil;
2617
2618     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2619 }
2620
2621 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2622 {
2623     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2624     if (!legibleGroup) {
2625         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2626         return;
2627     }
2628
2629     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2630     // but set the selected legible track to nil so text tracks will not be automatically configured.
2631     if (!m_textTracks.size())
2632         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2633
2634     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2635     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2636     for (AVMediaSelectionOptionType *option in legibleOptions) {
2637         bool newTrack = true;
2638         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2639             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2640                 continue;
2641             
2642             RetainPtr<AVMediaSelectionOptionType> currentOption;
2643 #if ENABLE(AVF_CAPTIONS)
2644             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2645                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2646                 currentOption = track->mediaSelectionOption();
2647             } else
2648 #endif
2649             {
2650                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2651                 currentOption = track->mediaSelectionOption();
2652             }
2653             
2654             if ([currentOption.get() isEqual:option]) {
2655                 removedTextTracks.remove(i - 1);
2656                 newTrack = false;
2657                 break;
2658             }
2659         }
2660         if (!newTrack)
2661             continue;
2662
2663 #if ENABLE(AVF_CAPTIONS)
2664         if ([option outOfBandSource]) {
2665             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2666             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2667             continue;
2668         }
2669 #endif
2670
2671         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2672     }
2673
2674     processNewAndRemovedTextTracks(removedTextTracks);
2675 }
2676
2677 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2678 {
2679     if (m_metadataTrack)
2680         return;
2681
2682     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2683     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2684     player()->addTextTrack(*m_metadataTrack);
2685 }
2686
2687 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2688 {
2689     ASSERT(time >= MediaTime::zeroTime());
2690
2691     if (!m_currentTextTrack)
2692         return;
2693
2694     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2695 }
2696
2697 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2698 {
2699     INFO_LOG(LOGIDENTIFIER);
2700
2701     if (!m_currentTextTrack)
2702         return;
2703     
2704     m_currentTextTrack->resetCueValues();
2705 }
2706
2707 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2708
2709 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2710 {
2711     if (m_currentTextTrack == track)
2712         return;
2713
2714     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2715
2716     m_currentTextTrack = track;
2717
2718     if (track) {
2719         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2720 #pragma clang diagnostic push
2721 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2722             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2723 #pragma clang diagnostic pop
2724 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2725 #if ENABLE(AVF_CAPTIONS)
2726         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2727             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2728 #endif
2729         else
2730             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2731 #endif
2732     } else {
2733 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2734         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2735 #endif
2736 #pragma clang diagnostic push
2737 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2738         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2739 #pragma clang diagnostic pop
2740     }
2741
2742 }
2743
2744 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2745 {
2746     if (!m_languageOfPrimaryAudioTrack.isNull())
2747         return m_languageOfPrimaryAudioTrack;
2748
2749     if (!m_avPlayerItem.get())
2750         return emptyString();
2751
2752 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2753     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2754     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2755 #pragma clang diagnostic push
2756 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2757     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2758 #pragma clang diagnostic pop
2759     if (currentlySelectedAudibleOption) {
2760         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2761         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2762
2763         return m_languageOfPrimaryAudioTrack;
2764     }
2765 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2766
2767     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2768     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2769     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2770     if (!tracks || [tracks count] != 1) {
2771         m_languageOfPrimaryAudioTrack = emptyString();
2772         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2773         return m_languageOfPrimaryAudioTrack;
2774     }
2775
2776     AVAssetTrack *track = [tracks objectAtIndex:0];
2777     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2778
2779     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2780
2781     return m_languageOfPrimaryAudioTrack;
2782 }
2783
2784 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2785 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2786 {
2787     bool wirelessTarget = false;
2788
2789 #if !PLATFORM(IOS)
2790     if (m_playbackTarget) {
2791         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2792             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2793         else
2794             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2795     }
2796 #else
2797     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2798 #endif
2799
2800     INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2801
2802     return wirelessTarget;
2803 }
2804
2805 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2806 {
2807     if (!m_avPlayer)
2808         return MediaPlayer::TargetTypeNone;
2809
2810 #if PLATFORM(IOS)
2811     if (!AVFoundationLibrary())
2812         return MediaPlayer::TargetTypeNone;
2813
2814     switch ([m_avPlayer externalPlaybackType]) {
2815     case AVPlayerExternalPlaybackTypeNone:
2816         return MediaPlayer::TargetTypeNone;
2817     case AVPlayerExternalPlaybackTypeAirPlay:
2818         return MediaPlayer::TargetTypeAirPlay;
2819     case AVPlayerExternalPlaybackTypeTVOut:
2820         return MediaPlayer::TargetTypeTVOut;
2821     }
2822
2823     ASSERT_NOT_REACHED();
2824     return MediaPlayer::TargetTypeNone;
2825
2826 #else
2827     return MediaPlayer::TargetTypeAirPlay;
2828 #endif
2829 }
2830     
2831 #if PLATFORM(IOS)
2832 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2833 {
2834     NSString *displayName = nil;
2835
2836     if (!AVFoundationLibrary())
2837         return nil;
2838
2839     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2840         return nil;
2841
2842     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2843     if (!pickableRoutes.count)
2844         return nil;
2845
2846     for (NSDictionary *pickableRoute in pickableRoutes) {
2847         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2848             continue;
2849
2850         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2851
2852         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2853         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2854             break;
2855
2856         // The route is a speaker or HDMI out, override the name to be the localized device model.
2857         NSString *localizedDeviceModel = [[UIDevice currentDevice] localizedModel];
2858
2859         // In cases where a route with that name already exists, prefix the name with the model.
2860         BOOL includeLocalizedDeviceModelName = NO;
2861         for (NSDictionary *otherRoute in pickableRoutes) {
2862             if (otherRoute == pickableRoute)
2863                 continue;
2864
2865             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2866                 includeLocalizedDeviceModelName = YES;
2867                 break;
2868             }
2869         }
2870
2871         if (includeLocalizedDeviceModelName)
2872             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2873         else
2874             displayName = localizedDeviceModel;
2875
2876         break;
2877     }
2878
2879     return displayName;
2880 }
2881 #endif
2882
2883 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2884 {
2885     if (!m_avPlayer)
2886         return emptyString();
2887
2888     String wirelessTargetName;
2889 #if !PLATFORM(IOS)
2890     if (m_playbackTarget)
2891         wirelessTargetName = m_playbackTarget->deviceName();
2892 #else
2893     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2894 #endif
2895
2896     return wirelessTargetName;
2897 }
2898
2899 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2900 {
2901     if (!m_avPlayer)
2902         return !m_allowsWirelessVideoPlayback;
2903
2904     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2905     INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2906
2907     return !m_allowsWirelessVideoPlayback;
2908 }
2909
2910 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2911 {
2912     INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2913     m_allowsWirelessVideoPlayback = !disabled;
2914     if (!m_avPlayer)
2915         return;
2916
2917     setDelayCallbacks(true);
2918     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2919     setDelayCallbacks(false);
2920 }
2921
2922 #if !PLATFORM(IOS)
2923
2924 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2925 {
2926     m_playbackTarget = WTFMove(target);
2927
2928     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2929
2930     INFO_LOG(LOGIDENTIFIER);
2931
2932     if (!m_playbackTarget->hasActiveRoute())
2933         setShouldPlayToPlaybackTarget(false);
2934 }
2935
2936 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2937 {
2938     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2939         return;
2940
2941     m_shouldPlayToPlaybackTarget = shouldPlay;
2942
2943     if (!m_playbackTarget)
2944         return;
2945
2946     INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2947
2948     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2949         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2950
2951         if (!m_avPlayer)
2952             return;
2953
2954         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2955         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2956             return;
2957
2958         setDelayCallbacks(true);
2959         m_avPlayer.get().outputContext = newContext;
2960         setDelayCallbacks(false);
2961
2962         return;
2963     }
2964
2965     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2966
2967     setDelayCallbacks(true);
2968     auto weakThis = createWeakPtr();
2969     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2970         if (!weakThis)
2971             return;
2972         weakThis->playbackTargetIsWirelessDidChange();
2973     }));
2974     setDelayCallbacks(false);
2975 }
2976
2977 #endif // !PLATFORM(IOS)
2978
2979 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2980 {
2981 #if PLATFORM(IOS)
2982     if (!m_avPlayer)
2983         return;
2984
2985     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2986 #endif
2987 }
2988
2989 #endif
2990
2991 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2992 {
2993     m_cachedItemStatus = status;
2994
2995     updateStates();
2996 }
2997
2998 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2999 {
3000     m_pendingStatusChanges++;
3001 }
3002
3003 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
3004 {
3005     m_cachedLikelyToKeepUp = likelyToKeepUp;
3006
3007     ASSERT(m_pendingStatusChanges);
3008     if (!--m_pendingStatusChanges)
3009         updateStates();
3010 }
3011
3012 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
3013 {
3014     m_pendingStatusChanges++;
3015 }
3016
3017 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
3018 {
3019     m_cachedBufferEmpty = bufferEmpty;
3020
3021     ASSERT(m_pendingStatusChanges);
3022     if (!--m_pendingStatusChanges)
3023         updateStates();
3024 }
3025
3026 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
3027 {
3028     m_pendingStatusChanges++;
3029 }
3030
3031 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
3032 {
3033     m_cachedBufferFull = bufferFull;
3034
3035     ASSERT(m_pendingStatusChanges);
3036     if (!--m_pendingStatusChanges)
3037         updateStates();
3038 }
3039
3040 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3041 {
3042     m_cachedSeekableRanges = seekableRanges;
3043
3044     seekableTimeRangesChanged();
3045     updateStates();
3046 }
3047
3048 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3049 {
3050     m_cachedLoadedRanges = loadedRanges;
3051
3052     loadedTimeRangesChanged();
3053     updateStates();
3054 }
3055
3056 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3057 {
3058     m_cachedIsReadyForDisplay = isReady;
3059     if (!hasVideo() && isReady)
3060         tracksChanged();
3061     updateStates();
3062 }
3063
3064 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)