6a12f28d57cf65941b282f07e68837a0d0d971f8
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2017 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27 #import "MediaPlayerPrivateAVFoundationObjC.h"
28
29 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
30
31 #import "AVAssetTrackUtilities.h"
32 #import "AVFoundationMIMETypeCache.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "AudioSourceProviderAVFObjC.h"
35 #import "AudioTrackPrivateAVFObjC.h"
36 #import "AuthenticationChallenge.h"
37 #import "CDMInstanceFairPlayStreamingAVFObjC.h"
38 #import "CDMSessionAVFoundationObjC.h"
39 #import "Cookie.h"
40 #import "DeprecatedGlobalSettings.h"
41 #import "Extensions3D.h"
42 #import "FloatConversion.h"
43 #import "GraphicsContext.h"
44 #import "GraphicsContext3D.h"
45 #import "GraphicsContextCG.h"
46 #import "InbandMetadataTextTrackPrivateAVF.h"
47 #import "InbandTextTrackPrivateAVFObjC.h"
48 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaPlaybackTargetMock.h"
52 #import "MediaSelectionGroupAVFObjC.h"
53 #import "OutOfBandTextTrackPrivateAVF.h"
54 #import "PixelBufferConformerCV.h"
55 #import "PlatformTimeRanges.h"
56 #import "SecurityOrigin.h"
57 #import "SerializedPlatformRepresentationMac.h"
58 #import "SharedBuffer.h"
59 #import "TextEncoding.h"
60 #import "TextTrackRepresentation.h"
61 #import "TextureCacheCV.h"
62 #import "URL.h"
63 #import "VideoFullscreenLayerManagerObjC.h"
64 #import "VideoTextureCopierCV.h"
65 #import "VideoTrackPrivateAVFObjC.h"
66 #import "WebCoreAVFResourceLoader.h"
67 #import "WebCoreCALayerExtras.h"
68 #import "WebCoreNSURLSession.h"
69 #import <JavaScriptCore/DataView.h>
70 #import <JavaScriptCore/JSCInlines.h>
71 #import <JavaScriptCore/TypedArrayInlines.h>
72 #import <JavaScriptCore/Uint16Array.h>
73 #import <JavaScriptCore/Uint32Array.h>
74 #import <JavaScriptCore/Uint8Array.h>
75 #import <functional>
76 #import <objc/runtime.h>
77 #import <pal/avfoundation/MediaTimeAVFoundation.h>
78 #import <pal/spi/cocoa/QuartzCoreSPI.h>
79 #import <pal/spi/mac/AVFoundationSPI.h>
80 #import <wtf/BlockObjCExceptions.h>
81 #import <wtf/ListHashSet.h>
82 #import <wtf/NeverDestroyed.h>
83 #import <wtf/OSObjectPtr.h>
84 #import <wtf/text/CString.h>
85
86 #if ENABLE(AVF_CAPTIONS)
87 #include "TextTrack.h"
88 #endif
89
90 #import <AVFoundation/AVAssetImageGenerator.h>
91 #import <AVFoundation/AVAssetTrack.h>
92 #import <AVFoundation/AVMediaSelectionGroup.h>
93 #import <AVFoundation/AVMetadataItem.h>
94 #import <AVFoundation/AVPlayer.h>
95 #import <AVFoundation/AVPlayerItem.h>
96 #import <AVFoundation/AVPlayerItemOutput.h>
97 #import <AVFoundation/AVPlayerItemTrack.h>
98 #import <AVFoundation/AVPlayerLayer.h>
99 #import <AVFoundation/AVTime.h>
100
101 #if PLATFORM(IOS)
102 #import "WAKAppKitStubs.h"
103 #import <CoreImage/CoreImage.h>
104 #import <UIKit/UIDevice.h>
105 #import <mach/mach_port.h>
106 #else
107 #import <Foundation/NSGeometry.h>
108 #import <QuartzCore/CoreImage.h>
109 #endif
110
111 #if USE(VIDEOTOOLBOX)
112 #import <CoreVideo/CoreVideo.h>
113 #import <VideoToolbox/VideoToolbox.h>
114 #endif
115
116 #import "CoreVideoSoftLink.h"
117 #import "MediaRemoteSoftLink.h"
118
119 namespace std {
120 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
121     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
122 };
123 }
124
125 #if ENABLE(AVF_CAPTIONS)
126 // Note: This must be defined before our SOFT_LINK macros:
127 @class AVMediaSelectionOption;
128 @interface AVMediaSelectionOption (OutOfBandExtensions)
129 @property (nonatomic, readonly) NSString* outOfBandSource;
130 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
131 @end
132 #endif
133
134 @interface AVURLAsset (WebKitExtensions)
135 @property (nonatomic, readonly) NSURL *resolvedURL;
136 @end
137
138 typedef AVPlayer AVPlayerType;
139 typedef AVPlayerItem AVPlayerItemType;
140 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
141 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
142 typedef AVMetadataItem AVMetadataItemType;
143 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
144 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
145 typedef AVAssetCache AVAssetCacheType;
146
147 #pragma mark - Soft Linking
148
149 // Soft-linking headers must be included last since they #define functions, constants, etc.
150 #import <pal/cf/CoreMediaSoftLink.h>
151
152 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
153
154 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
155
156 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayer)
157 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItem)
158 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerItemVideoOutput)
159 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVPlayerLayer)
160 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVURLAsset)
161 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetImageGenerator)
162 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVMetadataItem)
163 SOFT_LINK_CLASS_FOR_SOURCE(WebCore, AVFoundation, AVAssetCache)
164
165 SOFT_LINK_CLASS(CoreImage, CIContext)
166 SOFT_LINK_CLASS(CoreImage, CIImage)
167
168 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
169 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
172 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
173 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
174 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
175 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
176 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
177 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
178 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
179 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
183
184 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
185 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetRequiresCustomURLLoadingKey, NSString *)
186 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetOutOfBandMIMETypeKey, NSString *)
187 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetUseClientURLLoadingExclusively, NSString *)
188
189 #define AVPlayer initAVPlayer()
190 #define AVPlayerItem initAVPlayerItem()
191 #define AVPlayerLayer initAVPlayerLayer()
192 #define AVURLAsset initAVURLAsset()
193 #define AVAssetImageGenerator initAVAssetImageGenerator()
194 #define AVPlayerItemVideoOutput initAVPlayerItemVideoOutput()
195 #define AVMetadataItem initAVMetadataItem()
196 #define AVAssetCache initAVAssetCache()
197
198 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
199 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
200 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
201 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
202 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
203 #define AVMediaTypeVideo getAVMediaTypeVideo()
204 #define AVMediaTypeAudio getAVMediaTypeAudio()
205 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
206 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
207 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
208 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
209 #define AVURLAssetRequiresCustomURLLoadingKey getAVURLAssetRequiresCustomURLLoadingKey()
210 #define AVURLAssetOutOfBandMIMETypeKey getAVURLAssetOutOfBandMIMETypeKey()
211 #define AVURLAssetUseClientURLLoadingExclusively getAVURLAssetUseClientURLLoadingExclusively()
212 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
213 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
214 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
215 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
216 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
217
218 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
219 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
220 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
221
222 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
223 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
224 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
225
226 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
227 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
228 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
229 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
230
231 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
232 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
233 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
234 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
235 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
236 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
237 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
238 #endif
239
240 #if ENABLE(AVF_CAPTIONS)
241 SOFT_LINK_POINTER(AVFoundation, AVURLAssetCacheKey, NSString*)
242 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
243 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
244 SOFT_LINK_POINTER(AVFoundation, AVURLAssetUsesNoPersistentCacheKey, NSString*)
245 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicIsAuxiliaryContent, NSString*)
254
255 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
256 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
257 #define AVURLAssetCacheKey getAVURLAssetCacheKey()
258 #define AVURLAssetUsesNoPersistentCacheKey getAVURLAssetUsesNoPersistentCacheKey()
259 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
260 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
261 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
262 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
263 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
264 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
265 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
266 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
267 #define AVMediaCharacteristicIsAuxiliaryContent getAVMediaCharacteristicIsAuxiliaryContent()
268 #endif
269
270 #if ENABLE(DATACUE_VALUE)
271 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
272 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
273 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
274 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
275 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
276
277 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
278 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
279 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
280 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
281 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
282 #endif
283
284 #if PLATFORM(IOS)
285 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
286 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
287 #endif
288
289 SOFT_LINK_FRAMEWORK(MediaToolbox)
290 SOFT_LINK_OPTIONAL(MediaToolbox, MTEnableCaption2015Behavior, Boolean, (), ())
291
292 #if PLATFORM(IOS)
293 #if HAVE(CELESTIAL)
294 SOFT_LINK_PRIVATE_FRAMEWORK(Celestial)
295 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteCurrentlyPicked, NSString *)
296 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_RouteName, NSString *)
297 SOFT_LINK_POINTER(Celestial, AVController_RouteDescriptionKey_AVAudioRouteName, NSString *)
298 #define AVController_RouteDescriptionKey_RouteCurrentlyPicked getAVController_RouteDescriptionKey_RouteCurrentlyPicked()
299 #define AVController_RouteDescriptionKey_RouteName getAVController_RouteDescriptionKey_RouteName()
300 #define AVController_RouteDescriptionKey_AVAudioRouteName getAVController_RouteDescriptionKey_AVAudioRouteName()
301 #endif // HAVE(CELESTIAL)
302
303 SOFT_LINK_FRAMEWORK(UIKit)
304 SOFT_LINK_CLASS(UIKit, UIDevice)
305 #define UIDevice getUIDeviceClass()
306 #endif // PLATFORM(IOS)
307
308 using namespace WebCore;
309
310 enum MediaPlayerAVFoundationObservationContext {
311     MediaPlayerAVFoundationObservationContextPlayerItem,
312     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
313     MediaPlayerAVFoundationObservationContextPlayer,
314     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
315 };
316
317 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
318 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
319 #else
320 @interface WebCoreAVFMovieObserver : NSObject
321 #endif
322 {
323     MediaPlayerPrivateAVFoundationObjC* m_callback;
324     int m_delayCallbacks;
325 }
326 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
327 -(void)disconnect;
328 -(void)metadataLoaded;
329 -(void)didEnd:(NSNotification *)notification;
330 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
331 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
332 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
333 - (void)outputSequenceWasFlushed:(id)output;
334 #endif
335 @end
336
337 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
338 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
339     MediaPlayerPrivateAVFoundationObjC* m_callback;
340 }
341 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
342 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
343 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
344 @end
345 #endif
346
347 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
348 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
349     MediaPlayerPrivateAVFoundationObjC *m_callback;
350     dispatch_semaphore_t m_semaphore;
351 }
352 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
353 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
354 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
355 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
356 @end
357 #endif
358
359 namespace WebCore {
360 using namespace PAL;
361
362 static NSArray *assetMetadataKeyNames();
363 static NSArray *itemKVOProperties();
364 static NSArray *assetTrackMetadataKeyNames();
365 static NSArray *playerKVOProperties();
366 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
367
368 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
369 static dispatch_queue_t globalLoaderDelegateQueue()
370 {
371     static dispatch_queue_t globalQueue;
372     static dispatch_once_t onceToken;
373     dispatch_once(&onceToken, ^{
374         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
375     });
376     return globalQueue;
377 }
378 #endif
379
380 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
381 static dispatch_queue_t globalPullDelegateQueue()
382 {
383     static dispatch_queue_t globalQueue;
384     static dispatch_once_t onceToken;
385     dispatch_once(&onceToken, ^{
386         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
387     });
388     return globalQueue;
389 }
390 #endif
391
392 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
393 {
394     if (!isAvailable())
395         return;
396
397     registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
398             getSupportedTypes, supportsType, originsInMediaCache, clearMediaCache, clearMediaCacheForOrigins, supportsKeySystem);
399     AVFoundationMIMETypeCache::singleton().loadTypes();
400 }
401
402 static AVAssetCacheType *assetCacheForPath(const String& path)
403 {
404     NSURL *assetCacheURL;
405     
406     if (path.isEmpty())
407         assetCacheURL = [[NSURL fileURLWithPath:NSTemporaryDirectory()] URLByAppendingPathComponent:@"MediaCache" isDirectory:YES];
408     else
409         assetCacheURL = [NSURL fileURLWithPath:path isDirectory:YES];
410
411     return [initAVAssetCache() assetCacheWithURL:assetCacheURL];
412 }
413
414 HashSet<RefPtr<SecurityOrigin>> MediaPlayerPrivateAVFoundationObjC::originsInMediaCache(const String& path)
415 {
416     HashSet<RefPtr<SecurityOrigin>> origins;
417     for (NSString *key in [assetCacheForPath(path) allKeys]) {
418         URL keyAsURL = URL(URL(), key);
419         if (keyAsURL.isValid())
420             origins.add(SecurityOrigin::create(keyAsURL));
421     }
422     return origins;
423 }
424
425 static WallTime toSystemClockTime(NSDate *date)
426 {
427     ASSERT(date);
428     return WallTime::fromRawSeconds(date.timeIntervalSince1970);
429 }
430
431 void MediaPlayerPrivateAVFoundationObjC::clearMediaCache(const String& path, WallTime modifiedSince)
432 {
433     AVAssetCacheType* assetCache = assetCacheForPath(path);
434     
435     for (NSString *key in [assetCache allKeys]) {
436         if (toSystemClockTime([assetCache lastModifiedDateOfEntryForKey:key]) > modifiedSince)
437             [assetCache removeEntryForKey:key];
438     }
439
440     NSFileManager *fileManager = [NSFileManager defaultManager];
441     NSURL *baseURL = [assetCache URL];
442
443     if (modifiedSince <= WallTime::fromRawSeconds(0)) {
444         [fileManager removeItemAtURL:baseURL error:nil];
445         return;
446     }
447     
448     NSArray *propertyKeys = @[NSURLNameKey, NSURLContentModificationDateKey, NSURLIsRegularFileKey];
449     NSDirectoryEnumerator *enumerator = [fileManager enumeratorAtURL:baseURL includingPropertiesForKeys:
450         propertyKeys options:NSDirectoryEnumerationSkipsSubdirectoryDescendants
451         errorHandler:nil];
452     
453     RetainPtr<NSMutableArray> urlsToDelete = adoptNS([[NSMutableArray alloc] init]);
454     for (NSURL *fileURL : enumerator) {
455         NSDictionary *fileAttributes = [fileURL resourceValuesForKeys:propertyKeys error:nil];
456     
457         if (![fileAttributes[NSURLNameKey] hasPrefix:@"CachedMedia-"])
458             continue;
459         
460         if (![fileAttributes[NSURLIsRegularFileKey] boolValue])
461             continue;
462         
463         if (toSystemClockTime(fileAttributes[NSURLContentModificationDateKey]) <= modifiedSince)
464             continue;
465         
466         [urlsToDelete addObject:fileURL];
467     }
468     
469     for (NSURL *fileURL in urlsToDelete.get())
470         [fileManager removeItemAtURL:fileURL error:nil];
471 }
472
473 void MediaPlayerPrivateAVFoundationObjC::clearMediaCacheForOrigins(const String& path, const HashSet<RefPtr<SecurityOrigin>>& origins)
474 {
475     AVAssetCacheType* assetCache = assetCacheForPath(path);
476     for (NSString *key in [assetCache allKeys]) {
477         URL keyAsURL = URL(URL(), key);
478         if (keyAsURL.isValid()) {
479             if (origins.contains(SecurityOrigin::create(keyAsURL)))
480                 [assetCache removeEntryForKey:key];
481         }
482     }
483 }
484
485 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
486     : MediaPlayerPrivateAVFoundation(player)
487     , m_videoFullscreenLayerManager(std::make_unique<VideoFullscreenLayerManagerObjC>())
488     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
489     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
490     , m_videoFrameHasDrawn(false)
491     , m_haveCheckedPlayability(false)
492 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
493     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
494     , m_videoOutputSemaphore(nullptr)
495 #endif
496 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
497     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
498 #endif
499     , m_currentTextTrack(0)
500     , m_cachedRate(0)
501     , m_cachedTotalBytes(0)
502     , m_pendingStatusChanges(0)
503     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
504     , m_cachedLikelyToKeepUp(false)
505     , m_cachedBufferEmpty(false)
506     , m_cachedBufferFull(false)
507     , m_cachedHasEnabledAudio(false)
508     , m_shouldBufferData(true)
509     , m_cachedIsReadyForDisplay(false)
510     , m_haveBeenAskedToCreateLayer(false)
511 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
512     , m_allowsWirelessVideoPlayback(true)
513 #endif
514 {
515 }
516
517 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
518 {
519 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
520     [m_loaderDelegate.get() setCallback:0];
521     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
522
523     for (auto& pair : m_resourceLoaderMap)
524         pair.value->invalidate();
525 #endif
526 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
527     [m_videoOutputDelegate setCallback:0];
528     [m_videoOutput setDelegate:nil queue:0];
529     if (m_videoOutputSemaphore)
530         dispatch_release(m_videoOutputSemaphore);
531 #endif
532
533     if (m_videoLayer)
534         destroyVideoLayer();
535
536     cancelLoad();
537 }
538
539 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
540 {
541     INFO_LOG(LOGIDENTIFIER);
542     tearDownVideoRendering();
543
544     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
545     [m_objcObserver.get() disconnect];
546
547     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
548     setIgnoreLoadStateChanges(true);
549     if (m_avAsset) {
550         [m_avAsset.get() cancelLoading];
551         m_avAsset = nil;
552     }
553
554     clearTextTracks();
555
556 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
557     if (m_legibleOutput) {
558         if (m_avPlayerItem)
559             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
560         m_legibleOutput = nil;
561     }
562 #endif
563
564     if (m_avPlayerItem) {
565         for (NSString *keyName in itemKVOProperties())
566             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
567         
568         m_avPlayerItem = nil;
569     }
570     if (m_avPlayer) {
571         if (m_timeObserver)
572             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
573         m_timeObserver = nil;
574
575         for (NSString *keyName in playerKVOProperties())
576             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
577
578         [m_avPlayer replaceCurrentItemWithPlayerItem:nil];
579         m_avPlayer = nil;
580     }
581
582     // Reset cached properties
583     m_pendingStatusChanges = 0;
584     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
585     m_cachedSeekableRanges = nullptr;
586     m_cachedLoadedRanges = nullptr;
587     m_cachedHasEnabledAudio = false;
588     m_cachedPresentationSize = FloatSize();
589     m_cachedDuration = MediaTime::zeroTime();
590
591     for (AVPlayerItemTrack *track in m_cachedTracks.get())
592         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
593     m_cachedTracks = nullptr;
594
595 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
596     if (m_provider) {
597         m_provider->setPlayerItem(nullptr);
598         m_provider->setAudioTrack(nullptr);
599     }
600 #endif
601
602     setIgnoreLoadStateChanges(false);
603 }
604
605 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
606 {
607     return m_haveBeenAskedToCreateLayer;
608 }
609
610 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
611 {
612 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
613     if (m_videoOutput)
614         return true;
615 #endif
616     return m_imageGenerator;
617 }
618
619 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
620 {
621 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
622     createVideoOutput();
623 #else
624     createImageGenerator();
625 #endif
626 }
627
628 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
629 {
630     using namespace PAL;
631     INFO_LOG(LOGIDENTIFIER);
632
633     if (!m_avAsset || m_imageGenerator)
634         return;
635
636     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
637
638     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
639     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
640     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
641     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
642 }
643
644 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
645 {
646 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
647     destroyVideoOutput();
648 #endif
649     destroyImageGenerator();
650 }
651
652 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
653 {
654     if (!m_imageGenerator)
655         return;
656
657     INFO_LOG(LOGIDENTIFIER);
658
659     m_imageGenerator = 0;
660 }
661
662 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
663 {
664     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
665         return;
666
667     callOnMainThread([this, weakThis = createWeakPtr()] {
668         if (!weakThis)
669             return;
670
671         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
672             return;
673         m_haveBeenAskedToCreateLayer = true;
674
675         if (!m_videoLayer)
676             createAVPlayerLayer();
677
678 #if USE(VIDEOTOOLBOX) && HAVE(AVFOUNDATION_VIDEO_OUTPUT)
679         if (!m_videoOutput)
680             createVideoOutput();
681 #endif
682
683         player()->client().mediaPlayerRenderingModeChanged(player());
684     });
685 }
686
687 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
688 {
689     if (!m_avPlayer)
690         return;
691
692     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
693     [m_videoLayer setPlayer:m_avPlayer.get()];
694
695 #ifndef NDEBUG
696     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
697 #endif
698     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
699     updateVideoLayerGravity();
700     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
701     IntSize defaultSize = snappedIntRect(player()->client().mediaPlayerContentBoxRect()).size();
702     INFO_LOG(LOGIDENTIFIER);
703
704     m_videoFullscreenLayerManager->setVideoLayer(m_videoLayer.get(), defaultSize);
705
706 #if PLATFORM(IOS) && !ENABLE(EXTRA_ZOOM_MODE)
707     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
708         [m_videoLayer setPIPModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModePictureInPicture)];
709 #endif
710 }
711
712 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
713 {
714     if (!m_videoLayer)
715         return;
716
717     INFO_LOG(LOGIDENTIFIER);
718
719     [m_videoLayer removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
720     [m_videoLayer setPlayer:nil];
721     m_videoFullscreenLayerManager->didDestroyVideoLayer();
722
723     m_videoLayer = nil;
724 }
725
726 MediaTime MediaPlayerPrivateAVFoundationObjC::getStartDate() const
727 {
728     // Date changes as the track's playback position changes. Must subtract currentTime (offset in seconds) from date offset to get date beginning
729     double date = [[m_avPlayerItem currentDate] timeIntervalSince1970] * 1000;
730
731     // No live streams were made during the epoch (1970). AVFoundation returns 0 if the media file doesn't have a start date
732     if (!date)
733         return MediaTime::invalidTime();
734
735     double currentTime = CMTimeGetSeconds([m_avPlayerItem currentTime]) * 1000;
736
737     // Rounding due to second offset error when subtracting.
738     return MediaTime::createWithDouble(round(date - currentTime));
739 }
740
741 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
742 {
743     if (currentRenderingMode() == MediaRenderingToLayer)
744         return m_cachedIsReadyForDisplay;
745
746 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
747     if (m_videoOutput && (m_lastPixelBuffer || [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]]))
748         return true;
749 #endif
750
751     return m_videoFrameHasDrawn;
752 }
753
754 #if ENABLE(AVF_CAPTIONS)
755 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
756 {
757     static bool manualSelectionMode = MTEnableCaption2015BehaviorPtr() && MTEnableCaption2015BehaviorPtr()();
758     if (manualSelectionMode)
759         return @[ AVMediaCharacteristicIsAuxiliaryContent ];
760
761     // FIXME: Match these to correct types:
762     if (kind == PlatformTextTrack::Caption)
763         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
764
765     if (kind == PlatformTextTrack::Subtitle)
766         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
767
768     if (kind == PlatformTextTrack::Description)
769         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
770
771     if (kind == PlatformTextTrack::Forced)
772         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
773
774     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
775 }
776     
777 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
778 {
779     trackModeChanged();
780 }
781     
782 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
783 {
784     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
785     
786     for (auto& textTrack : m_textTracks) {
787         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
788             continue;
789         
790         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
791         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
792         
793         for (auto& track : outOfBandTrackSources) {
794             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
795             
796             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
797                 continue;
798             
799             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
800             if (track->mode() == PlatformTextTrack::Hidden)
801                 mode = InbandTextTrackPrivate::Hidden;
802             else if (track->mode() == PlatformTextTrack::Disabled)
803                 mode = InbandTextTrackPrivate::Disabled;
804             else if (track->mode() == PlatformTextTrack::Showing)
805                 mode = InbandTextTrackPrivate::Showing;
806             
807             textTrack->setMode(mode);
808             break;
809         }
810     }
811 }
812 #endif
813
814
815 static NSURL *canonicalURL(const URL& url)
816 {
817     NSURL *cocoaURL = url;
818     if (url.isEmpty())
819         return cocoaURL;
820
821     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
822     if (!request)
823         return cocoaURL;
824
825     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
826     if (!canonicalRequest)
827         return cocoaURL;
828
829     return [canonicalRequest URL];
830 }
831
832 #if PLATFORM(IOS)
833 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
834 {
835     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
836     [properties setDictionary:@{
837         NSHTTPCookieName: cookie.name,
838         NSHTTPCookieValue: cookie.value,
839         NSHTTPCookieDomain: cookie.domain,
840         NSHTTPCookiePath: cookie.path,
841         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
842     }];
843     if (cookie.secure)
844         [properties setObject:@YES forKey:NSHTTPCookieSecure];
845     if (cookie.session)
846         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
847
848     return [NSHTTPCookie cookieWithProperties:properties.get()];
849 }
850 #endif
851
852 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const URL& url)
853 {
854     if (m_avAsset)
855         return;
856
857     INFO_LOG(LOGIDENTIFIER);
858
859     setDelayCallbacks(true);
860
861     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
862
863     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
864
865     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
866
867     String referrer = player()->referrer();
868     if (!referrer.isEmpty())
869         [headerFields.get() setObject:referrer forKey:@"Referer"];
870
871     String userAgent = player()->userAgent();
872     if (!userAgent.isEmpty())
873         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
874
875     if ([headerFields.get() count])
876         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
877
878     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
879         [options.get() setObject:@YES forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
880
881     if (AVURLAssetUseClientURLLoadingExclusively)
882         [options setObject:@YES forKey:AVURLAssetUseClientURLLoadingExclusively];
883 #if PLATFORM(IOS)
884     else if (AVURLAssetRequiresCustomURLLoadingKey)
885         [options setObject:@YES forKey:AVURLAssetRequiresCustomURLLoadingKey];
886     // FIXME: rdar://problem/20354688
887     String identifier = player()->sourceApplicationIdentifier();
888     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
889         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
890 #endif
891
892     auto type = player()->contentMIMEType();
893     if (AVURLAssetOutOfBandMIMETypeKey && !type.isEmpty() && !player()->contentMIMETypeWasInferredFromExtension()) {
894         auto codecs = player()->contentTypeCodecs();
895         if (!codecs.isEmpty()) {
896             NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
897             [options setObject:typeString forKey:AVURLAssetOutOfBandMIMETypeKey];
898         } else
899             [options setObject:(NSString *)type forKey:AVURLAssetOutOfBandMIMETypeKey];
900     }
901
902 #if ENABLE(AVF_CAPTIONS)
903     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
904     if (!outOfBandTrackSources.isEmpty()) {
905         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
906         for (auto& trackSource : outOfBandTrackSources) {
907             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
908             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
909             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
910             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
911             [outOfBandTracks.get() addObject:@{
912                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
913                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
914                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
915                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
916                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
917                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
918             }];
919         }
920
921         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
922     }
923 #endif
924
925 #if PLATFORM(IOS)
926     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
927     if (!networkInterfaceName.isEmpty())
928         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
929 #endif
930
931 #if PLATFORM(IOS)
932     Vector<Cookie> cookies;
933     if (player()->getRawCookies(url, cookies)) {
934         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
935         for (auto& cookie : cookies)
936             [nsCookies addObject:toNSHTTPCookie(cookie)];
937
938         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
939     }
940 #endif
941
942     bool usePersistentCache = player()->client().mediaPlayerShouldUsePersistentCache();
943     [options setObject:@(!usePersistentCache) forKey:AVURLAssetUsesNoPersistentCacheKey];
944     
945     if (usePersistentCache)
946         [options setObject:assetCacheForPath(player()->client().mediaPlayerMediaCacheDirectory()) forKey:AVURLAssetCacheKey];
947
948     NSURL *cocoaURL = canonicalURL(url);
949     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
950
951 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
952     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
953     [resourceLoader setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
954
955 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
956     if (DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
957         && [resourceLoader respondsToSelector:@selector(setURLSession:)]
958         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegate)]
959         && [resourceLoader respondsToSelector:@selector(URLSessionDataDelegateQueue)]) {
960         RefPtr<PlatformMediaResourceLoader> mediaResourceLoader = player()->createResourceLoader();
961         if (mediaResourceLoader)
962             resourceLoader.URLSession = (NSURLSession *)[[[WebCoreNSURLSession alloc] initWithResourceLoader:*mediaResourceLoader delegate:resourceLoader.URLSessionDataDelegate delegateQueue:resourceLoader.URLSessionDataDelegateQueue] autorelease];
963     }
964 #endif
965
966 #endif
967
968     m_haveCheckedPlayability = false;
969
970     setDelayCallbacks(false);
971 }
972
973 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
974 {
975     if (!m_avPlayer)
976         return;
977
978     if (pthread_main_np()) {
979         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
980         return;
981     }
982
983     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
984     RetainPtr<AVPlayerItemType> strongItem = item;
985     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
986         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
987     });
988 }
989
990 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
991 {
992     if (m_avPlayer)
993         return;
994
995     INFO_LOG(LOGIDENTIFIER);
996
997     setDelayCallbacks(true);
998
999     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
1000     for (NSString *keyName in playerKVOProperties())
1001         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
1002
1003 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1004     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
1005 #endif
1006
1007 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
1008     updateDisableExternalPlayback();
1009     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
1010 #endif
1011
1012 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
1013     if (m_shouldPlayToPlaybackTarget) {
1014         // Clear m_shouldPlayToPlaybackTarget so doesn't return without doing anything.
1015         m_shouldPlayToPlaybackTarget = false;
1016         setShouldPlayToPlaybackTarget(true);
1017     }
1018 #endif
1019
1020 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
1021     setShouldDisableSleep(player()->shouldDisableSleep());
1022 #endif
1023
1024     if (m_muted) {
1025         // Clear m_muted so setMuted doesn't return without doing anything.
1026         m_muted = false;
1027         [m_avPlayer.get() setMuted:m_muted];
1028     }
1029
1030     if (player()->client().mediaPlayerIsVideo())
1031         createAVPlayerLayer();
1032
1033     if (m_avPlayerItem)
1034         setAVPlayerItem(m_avPlayerItem.get());
1035
1036     setDelayCallbacks(false);
1037 }
1038
1039 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
1040 {
1041     if (m_avPlayerItem)
1042         return;
1043
1044     INFO_LOG(LOGIDENTIFIER);
1045
1046     setDelayCallbacks(true);
1047
1048     // Create the player item so we can load media data. 
1049     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
1050
1051     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
1052
1053     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
1054     for (NSString *keyName in itemKVOProperties())
1055         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
1056
1057     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1058
1059     if (m_avPlayer)
1060         setAVPlayerItem(m_avPlayerItem.get());
1061
1062 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1063     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1064
1065     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1066     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1067     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1068
1069     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1070     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1071     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1072     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1073 #endif
1074
1075 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1076     if (m_provider) {
1077         m_provider->setPlayerItem(m_avPlayerItem.get());
1078         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1079     }
1080 #endif
1081
1082 #if ENABLE(EXTRA_ZOOM_MODE)
1083     createVideoOutput();
1084 #endif
1085
1086     setDelayCallbacks(false);
1087 }
1088
1089 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1090 {
1091     if (m_haveCheckedPlayability)
1092         return;
1093     m_haveCheckedPlayability = true;
1094
1095     INFO_LOG(LOGIDENTIFIER);
1096     auto weakThis = createWeakPtr();
1097
1098     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:@"playable", @"tracks", nil] completionHandler:^{
1099         callOnMainThread([weakThis] {
1100             if (weakThis)
1101                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1102         });
1103     }];
1104 }
1105
1106 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1107 {
1108     INFO_LOG(LOGIDENTIFIER);
1109
1110     OSObjectPtr<dispatch_group_t> metadataLoadingGroup = adoptOSObject(dispatch_group_create());
1111     dispatch_group_enter(metadataLoadingGroup.get());
1112     auto weakThis = createWeakPtr();
1113     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1114
1115         callOnMainThread([weakThis, metadataLoadingGroup] {
1116             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1117                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1118                     dispatch_group_enter(metadataLoadingGroup.get());
1119                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1120                         dispatch_group_leave(metadataLoadingGroup.get());
1121                     }];
1122                 }
1123             }
1124             dispatch_group_leave(metadataLoadingGroup.get());
1125         });
1126     }];
1127
1128     dispatch_group_notify(metadataLoadingGroup.get(), dispatch_get_main_queue(), ^{
1129         callOnMainThread([weakThis] {
1130             if (weakThis)
1131                 [weakThis->m_objcObserver.get() metadataLoaded];
1132         });
1133     });
1134 }
1135
1136 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1137 {
1138     if (!m_avPlayerItem)
1139         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1140
1141     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1142         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1143     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1144         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1145     if (m_cachedLikelyToKeepUp)
1146         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1147     if (m_cachedBufferFull)
1148         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1149     if (m_cachedBufferEmpty)
1150         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1151
1152     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1153 }
1154
1155 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1156 {
1157     INFO_LOG(LOGIDENTIFIER);
1158     PlatformMedia pm;
1159     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1160     pm.media.avfMediaPlayer = m_avPlayer.get();
1161     return pm;
1162 }
1163
1164 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1165 {
1166     return m_videoFullscreenLayerManager->videoInlineLayer();
1167 }
1168
1169 void MediaPlayerPrivateAVFoundationObjC::updateVideoFullscreenInlineImage()
1170 {
1171 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1172     updateLastImage(UpdateType::UpdateSynchronously);
1173     m_videoFullscreenLayerManager->updateVideoFullscreenInlineImage(m_lastImage);
1174 #endif
1175 }
1176
1177 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer, Function<void()>&& completionHandler)
1178 {
1179 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1180     updateLastImage(UpdateType::UpdateSynchronously);
1181     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), m_lastImage);
1182 #else
1183     m_videoFullscreenLayerManager->setVideoFullscreenLayer(videoFullscreenLayer, WTFMove(completionHandler), nil);
1184 #endif
1185     updateDisableExternalPlayback();
1186 }
1187
1188 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1189 {
1190     m_videoFullscreenLayerManager->setVideoFullscreenFrame(frame);
1191 }
1192
1193 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1194 {
1195     m_videoFullscreenGravity = gravity;
1196
1197     if (!m_videoLayer)
1198         return;
1199
1200     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1201     if (gravity == MediaPlayer::VideoGravityResize)
1202         videoGravity = AVLayerVideoGravityResize;
1203     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1204         videoGravity = AVLayerVideoGravityResizeAspect;
1205     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1206         videoGravity = AVLayerVideoGravityResizeAspectFill;
1207     else
1208         ASSERT_NOT_REACHED();
1209     
1210     if ([m_videoLayer videoGravity] == videoGravity)
1211         return;
1212
1213     [m_videoLayer setVideoGravity:videoGravity];
1214     syncTextTrackBounds();
1215 }
1216
1217 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1218 {
1219 #if PLATFORM(IOS) && !ENABLE(EXTRA_ZOOM_MODE)
1220     if ([m_videoLayer respondsToSelector:@selector(setPIPModeEnabled:)])
1221         [m_videoLayer setPIPModeEnabled:(mode & MediaPlayer::VideoFullscreenModePictureInPicture)];
1222     updateDisableExternalPlayback();
1223 #else
1224     UNUSED_PARAM(mode);
1225 #endif
1226 }
1227
1228 #if PLATFORM(IOS)
1229 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1230 {
1231     if (m_currentMetaData)
1232         return m_currentMetaData.get();
1233     return nil;
1234 }
1235
1236 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1237 {
1238     if (!m_avPlayerItem)
1239         return emptyString();
1240     
1241     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1242     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1243
1244     return logString.get();
1245 }
1246
1247 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1248 {
1249     if (!m_avPlayerItem)
1250         return emptyString();
1251
1252     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1253     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1254
1255     return logString.get();
1256 }
1257 #endif
1258
1259 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1260 {
1261     [CATransaction begin];
1262     [CATransaction setDisableActions:YES];    
1263     if (m_videoLayer)
1264         [m_videoLayer.get() setHidden:!isVisible];
1265     [CATransaction commit];
1266 }
1267     
1268 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1269 {
1270     INFO_LOG(LOGIDENTIFIER);
1271     if (!metaDataAvailable())
1272         return;
1273
1274     setDelayCallbacks(true);
1275     m_cachedRate = requestedRate();
1276     [m_avPlayer.get() setRate:requestedRate()];
1277     setDelayCallbacks(false);
1278 }
1279
1280 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1281 {
1282     INFO_LOG(LOGIDENTIFIER);
1283     if (!metaDataAvailable())
1284         return;
1285
1286     setDelayCallbacks(true);
1287     m_cachedRate = 0;
1288     [m_avPlayer.get() setRate:0];
1289     setDelayCallbacks(false);
1290 }
1291
1292 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1293 {
1294     // Do not ask the asset for duration before it has been loaded or it will fetch the
1295     // answer synchronously.
1296     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1297         return MediaTime::invalidTime();
1298     
1299     CMTime cmDuration;
1300     
1301     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1302     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1303         cmDuration = [m_avPlayerItem.get() duration];
1304     else
1305         cmDuration = [m_avAsset.get() duration];
1306
1307     if (CMTIME_IS_NUMERIC(cmDuration))
1308         return PAL::toMediaTime(cmDuration);
1309
1310     if (CMTIME_IS_INDEFINITE(cmDuration))
1311         return MediaTime::positiveInfiniteTime();
1312
1313     INFO_LOG(LOGIDENTIFIER, "returning invalid time");
1314     return MediaTime::invalidTime();
1315 }
1316
1317 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1318 {
1319     if (!metaDataAvailable() || !m_avPlayerItem)
1320         return MediaTime::zeroTime();
1321
1322     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1323     if (CMTIME_IS_NUMERIC(itemTime))
1324         return std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
1325
1326     return MediaTime::zeroTime();
1327 }
1328
1329 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1330 {
1331     // setCurrentTime generates several event callbacks, update afterwards.
1332     setDelayCallbacks(true);
1333
1334     if (m_metadataTrack)
1335         m_metadataTrack->flushPartialCues();
1336
1337     CMTime cmTime = PAL::toCMTime(time);
1338     CMTime cmBefore = PAL::toCMTime(negativeTolerance);
1339     CMTime cmAfter = PAL::toCMTime(positiveTolerance);
1340
1341     // [AVPlayerItem seekToTime] will throw an exception if toleranceBefore is negative.
1342     if (CMTimeCompare(cmBefore, kCMTimeZero) < 0)
1343         cmBefore = kCMTimeZero;
1344     
1345     auto weakThis = createWeakPtr();
1346
1347     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1348         callOnMainThread([weakThis, finished] {
1349             auto _this = weakThis.get();
1350             if (!_this)
1351                 return;
1352
1353             _this->seekCompleted(finished);
1354         });
1355     }];
1356
1357     setDelayCallbacks(false);
1358 }
1359
1360 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1361 {
1362 #if PLATFORM(IOS)
1363     UNUSED_PARAM(volume);
1364     return;
1365 #else
1366     if (!m_avPlayer)
1367         return;
1368
1369     [m_avPlayer.get() setVolume:volume];
1370 #endif
1371 }
1372
1373 void MediaPlayerPrivateAVFoundationObjC::setMuted(bool muted)
1374 {
1375     if (m_muted == muted)
1376         return;
1377
1378     INFO_LOG(LOGIDENTIFIER, "- ", muted);
1379
1380     m_muted = muted;
1381
1382     if (!m_avPlayer)
1383         return;
1384
1385     [m_avPlayer.get() setMuted:m_muted];
1386 }
1387
1388 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1389 {
1390     UNUSED_PARAM(closedCaptionsVisible);
1391
1392     if (!metaDataAvailable())
1393         return;
1394
1395     INFO_LOG(LOGIDENTIFIER, "- ", closedCaptionsVisible);
1396 }
1397
1398 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1399 {
1400     setDelayCallbacks(true);
1401     m_cachedRate = rate;
1402     [m_avPlayer.get() setRate:rate];
1403     setDelayCallbacks(false);
1404 }
1405
1406 double MediaPlayerPrivateAVFoundationObjC::rate() const
1407 {
1408     if (!metaDataAvailable())
1409         return 0;
1410
1411     return m_cachedRate;
1412 }
1413
1414 double MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesLastModifiedTime() const
1415 {
1416 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1417     return [m_avPlayerItem seekableTimeRangesLastModifiedTime];
1418 #else
1419     return 0;
1420 #endif
1421 }
1422
1423 double MediaPlayerPrivateAVFoundationObjC::liveUpdateInterval() const
1424 {
1425 #if (PLATFORM(MAC) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101300) || (PLATFORM(IOS) && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110000)
1426     return [m_avPlayerItem liveUpdateInterval];
1427 #else
1428     return 0;
1429 #endif
1430 }
1431
1432 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1433 {
1434     if (m_avPlayerItem)
1435         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1436 }
1437
1438 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1439 {
1440     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1441
1442     if (!m_avPlayerItem)
1443         return timeRanges;
1444
1445     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1446         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1447         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1448             timeRanges->add(PAL::toMediaTime(timeRange.start), PAL::toMediaTime(CMTimeRangeGetEnd(timeRange)));
1449     }
1450     return timeRanges;
1451 }
1452
1453 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1454 {
1455     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1456         return MediaTime::zeroTime();
1457
1458     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1459     bool hasValidRange = false;
1460     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1461         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1462         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1463             continue;
1464
1465         hasValidRange = true;
1466         MediaTime startOfRange = PAL::toMediaTime(timeRange.start);
1467         if (minTimeSeekable > startOfRange)
1468             minTimeSeekable = startOfRange;
1469     }
1470     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1471 }
1472
1473 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1474 {
1475     if (!m_cachedSeekableRanges)
1476         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1477
1478     MediaTime maxTimeSeekable;
1479     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1480         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1481         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1482             continue;
1483         
1484         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1485         if (maxTimeSeekable < endOfRange)
1486             maxTimeSeekable = endOfRange;
1487     }
1488     return maxTimeSeekable;
1489 }
1490
1491 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1492 {
1493     if (!m_cachedLoadedRanges)
1494         return MediaTime::zeroTime();
1495
1496     MediaTime maxTimeLoaded;
1497     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1498         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1499         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1500             continue;
1501         
1502         MediaTime endOfRange = PAL::toMediaTime(CMTimeRangeGetEnd(timeRange));
1503         if (maxTimeLoaded < endOfRange)
1504             maxTimeLoaded = endOfRange;
1505     }
1506
1507     return maxTimeLoaded;   
1508 }
1509
1510 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1511 {
1512     if (!metaDataAvailable())
1513         return 0;
1514
1515     if (m_cachedTotalBytes)
1516         return m_cachedTotalBytes;
1517
1518     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1519         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1520
1521     return m_cachedTotalBytes;
1522 }
1523
1524 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1525 {
1526     m_avAsset = asset;
1527 }
1528
1529 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1530 {
1531     if (!m_avAsset)
1532         return MediaPlayerAVAssetStatusDoesNotExist;
1533
1534     for (NSString *keyName in assetMetadataKeyNames()) {
1535         NSError *error = nil;
1536         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1537
1538         if (error)
1539             ERROR_LOG(LOGIDENTIFIER, "failed for ", [keyName UTF8String], ", error = ", [[error localizedDescription] UTF8String]);
1540
1541         if (keyStatus < AVKeyValueStatusLoaded)
1542             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1543         
1544         if (keyStatus == AVKeyValueStatusFailed)
1545             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1546
1547         if (keyStatus == AVKeyValueStatusCancelled)
1548             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1549     }
1550
1551     if (!player()->shouldCheckHardwareSupport())
1552         m_tracksArePlayable = true;
1553
1554     if (!m_tracksArePlayable) {
1555         m_tracksArePlayable = true;
1556         for (AVAssetTrack *track in [m_avAsset tracks]) {
1557             if (!assetTrackMeetsHardwareDecodeRequirements(track, player()->mediaContentTypesRequiringHardwareSupport())) {
1558                 m_tracksArePlayable = false;
1559                 break;
1560             }
1561         }
1562     }
1563
1564     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue] && m_tracksArePlayable.value())
1565         return MediaPlayerAVAssetStatusPlayable;
1566
1567     return MediaPlayerAVAssetStatusLoaded;
1568 }
1569
1570 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1571 {
1572     if (!m_avAsset)
1573         return 0;
1574
1575     NSError *error = nil;
1576     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1577     return [error code];
1578 }
1579
1580 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext& context, const FloatRect& rect)
1581 {
1582     if (!metaDataAvailable() || context.paintingDisabled())
1583         return;
1584
1585     setDelayCallbacks(true);
1586     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1587
1588 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1589     if (videoOutputHasAvailableFrame())
1590         paintWithVideoOutput(context, rect);
1591     else
1592 #endif
1593         paintWithImageGenerator(context, rect);
1594
1595     END_BLOCK_OBJC_EXCEPTIONS;
1596     setDelayCallbacks(false);
1597
1598     m_videoFrameHasDrawn = true;
1599 }
1600
1601 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext& context, const FloatRect& rect)
1602 {
1603     if (!metaDataAvailable() || context.paintingDisabled())
1604         return;
1605
1606     // We can ignore the request if we are already rendering to a layer.
1607     if (currentRenderingMode() == MediaRenderingToLayer)
1608         return;
1609
1610     // paint() is best effort, so only paint if we already have an image generator or video output available.
1611     if (!hasContextRenderer())
1612         return;
1613
1614     paintCurrentFrameInContext(context, rect);
1615 }
1616
1617 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext& context, const FloatRect& rect)
1618 {
1619     INFO_LOG(LOGIDENTIFIER);
1620
1621     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1622     if (image) {
1623         GraphicsContextStateSaver stateSaver(context);
1624         context.translate(rect.x(), rect.y() + rect.height());
1625         context.scale(FloatSize(1.0f, -1.0f));
1626         context.setImageInterpolationQuality(InterpolationLow);
1627         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1628         CGContextDrawImage(context.platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1629     }
1630 }
1631
1632 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1633 {
1634     if (!m_imageGenerator)
1635         createImageGenerator();
1636     ASSERT(m_imageGenerator);
1637
1638 #if !RELEASE_LOG_DISABLED
1639     MonotonicTime start = MonotonicTime::now();
1640 #endif
1641
1642     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1643     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1644     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), sRGBColorSpaceRef()));
1645
1646 #if !RELEASE_LOG_DISABLED
1647     DEBUG_LOG(LOGIDENTIFIER, "creating image took ", (MonotonicTime::now() - start).seconds());
1648 #endif
1649
1650     return image;
1651 }
1652
1653 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String, ASCIICaseInsensitiveHash>& supportedTypes)
1654 {
1655     supportedTypes = AVFoundationMIMETypeCache::singleton().types();
1656
1657
1658 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1659 static bool keySystemIsSupported(const String& keySystem)
1660 {
1661     if (equalIgnoringASCIICase(keySystem, "com.apple.fps") || equalIgnoringASCIICase(keySystem, "com.apple.fps.1_0") || equalIgnoringASCIICase(keySystem, "org.w3c.clearkey"))
1662         return true;
1663     return false;
1664 }
1665 #endif
1666
1667 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1668 {
1669 #if ENABLE(MEDIA_SOURCE)
1670     if (parameters.isMediaSource)
1671         return MediaPlayer::IsNotSupported;
1672 #endif
1673 #if ENABLE(MEDIA_STREAM)
1674     if (parameters.isMediaStream)
1675         return MediaPlayer::IsNotSupported;
1676 #endif
1677
1678     auto containerType = parameters.type.containerType();
1679     if (isUnsupportedMIMEType(containerType))
1680         return MediaPlayer::IsNotSupported;
1681
1682     if (!staticMIMETypeList().contains(containerType) && !AVFoundationMIMETypeCache::singleton().types().contains(containerType))
1683         return MediaPlayer::IsNotSupported;
1684
1685     // The spec says:
1686     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1687     if (parameters.type.codecs().isEmpty())
1688         return MediaPlayer::MayBeSupported;
1689
1690     if (!contentTypeMeetsHardwareDecodeRequirements(parameters.type, parameters.contentTypesRequiringHardwareSupport))
1691         return MediaPlayer::IsNotSupported;
1692
1693     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)containerType, (NSString *)parameters.type.parameter(ContentType::codecsParameter())];
1694     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1695 }
1696
1697 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1698 {
1699 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1700     if (!keySystem.isEmpty()) {
1701         // "Clear Key" is only supported with HLS:
1702         if (equalIgnoringASCIICase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringASCIICase(mimeType, "application/x-mpegurl"))
1703             return MediaPlayer::IsNotSupported;
1704
1705         if (!keySystemIsSupported(keySystem))
1706             return false;
1707
1708         if (!mimeType.isEmpty() && isUnsupportedMIMEType(mimeType))
1709             return false;
1710
1711         if (!mimeType.isEmpty() && !staticMIMETypeList().contains(mimeType) && !AVFoundationMIMETypeCache::singleton().types().contains(mimeType))
1712             return false;
1713
1714         return true;
1715     }
1716 #else
1717     UNUSED_PARAM(keySystem);
1718     UNUSED_PARAM(mimeType);
1719 #endif
1720     return false;
1721 }
1722
1723 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1724 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1725 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1726 {
1727     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1728         [infoRequest setContentLength:keyData->byteLength()];
1729         [infoRequest setByteRangeAccessSupported:YES];
1730     }
1731
1732     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1733         long long start = [dataRequest currentOffset];
1734         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1735
1736         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1737             [request finishLoadingWithError:nil];
1738             return;
1739         }
1740
1741         ASSERT(start <= std::numeric_limits<int>::max());
1742         ASSERT(end <= std::numeric_limits<int>::max());
1743         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1744         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1745         [dataRequest respondWithData:nsData.get()];
1746     }
1747
1748     [request finishLoading];
1749 }
1750 #endif
1751
1752 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1753 {
1754     String scheme = [[[avRequest request] URL] scheme];
1755     String keyURI = [[[avRequest request] URL] absoluteString];
1756
1757 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
1758     if (scheme == "skd") {
1759 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1760         // Create an initData with the following layout:
1761         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1762         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1763         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1764         unsigned byteLength = initDataBuffer->byteLength();
1765         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer.copyRef(), 0, byteLength);
1766         initDataView->set<uint32_t>(0, keyURISize, true);
1767
1768         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer.copyRef(), 4, keyURI.length());
1769         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1770
1771         RefPtr<Uint8Array> initData = Uint8Array::create(WTFMove(initDataBuffer), 0, byteLength);
1772         if (!player()->keyNeeded(initData.get()))
1773             return false;
1774 #endif
1775         m_keyURIToRequestMap.set(keyURI, avRequest);
1776 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
1777         if (m_cdmInstance)
1778             return false;
1779
1780         RetainPtr<NSData> keyURIData = [keyURI dataUsingEncoding:NSUTF8StringEncoding allowLossyConversion:YES];
1781         auto keyURIBuffer = SharedBuffer::create(keyURIData.get());
1782         player()->initializationDataEncountered(ASCIILiteral("skd"), keyURIBuffer->tryCreateArrayBuffer());
1783 #endif
1784         return true;
1785     }
1786
1787 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
1788     if (scheme == "clearkey") {
1789         String keyID = [[[avRequest request] URL] resourceSpecifier];
1790         auto encodedKeyId = UTF8Encoding().encode(keyID, UnencodableHandling::URLEncodedEntities);
1791
1792         auto initData = Uint8Array::create(encodedKeyId.size());
1793         initData->setRange(encodedKeyId.data(), encodedKeyId.size(), 0);
1794
1795         auto keyData = player()->cachedKeyForKeyId(keyID);
1796         if (keyData) {
1797             fulfillRequestWithKeyData(avRequest, keyData.get());
1798             return false;
1799         }
1800
1801         if (!player()->keyNeeded(initData.get()))
1802             return false;
1803
1804         m_keyURIToRequestMap.set(keyID, avRequest);
1805         return true;
1806     }
1807 #endif
1808 #endif
1809
1810     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1811     m_resourceLoaderMap.add(avRequest, resourceLoader);
1812     resourceLoader->startLoading();
1813     return true;
1814 }
1815
1816 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1817 {
1818     String scheme = [[[avRequest request] URL] scheme];
1819
1820     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1821
1822     if (resourceLoader)
1823         resourceLoader->stopLoading();
1824 }
1825
1826 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1827 {
1828     m_resourceLoaderMap.remove(avRequest);
1829 }
1830 #endif
1831
1832 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1833 {
1834     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1835 }
1836
1837 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1838 {
1839     if (!metaDataAvailable())
1840         return timeValue;
1841
1842     // FIXME - impossible to implement until rdar://8721510 is fixed.
1843     return timeValue;
1844 }
1845
1846 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1847 {
1848 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1849     return 0;
1850 #else
1851     return 5;
1852 #endif
1853 }
1854
1855 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1856 {
1857     if (!m_videoLayer)
1858         return;
1859
1860     // Do not attempt to change the video gravity while in full screen mode.
1861     // See setVideoFullscreenGravity().
1862     if (m_videoFullscreenLayerManager->videoFullscreenLayer())
1863         return;
1864
1865     [CATransaction begin];
1866     [CATransaction setDisableActions:YES];    
1867     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1868     [m_videoLayer.get() setVideoGravity:gravity];
1869     [CATransaction commit];
1870 }
1871
1872 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1873 {
1874     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1875         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1876     }];
1877     if (index == NSNotFound)
1878         return nil;
1879     return [tracks objectAtIndex:index];
1880 }
1881
1882 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1883 {
1884     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1885     m_languageOfPrimaryAudioTrack = String();
1886
1887     if (!m_avAsset)
1888         return;
1889
1890     setDelayCharacteristicsChangedNotification(true);
1891
1892     bool haveCCTrack = false;
1893     bool hasCaptions = false;
1894
1895     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1896     // asked about those fairly fequently.
1897     if (!m_avPlayerItem) {
1898         // We don't have a player item yet, so check with the asset because some assets support inspection
1899         // prior to becoming ready to play.
1900         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1901         setHasVideo(firstEnabledVideoTrack);
1902         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1903 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1904         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1905 #endif
1906         auto size = firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize();
1907         // For videos with rotation tag set, the transformation above might return a CGSize instance with negative width or height.
1908         // See https://bugs.webkit.org/show_bug.cgi?id=172648.
1909         if (size.width() < 0)
1910             size.setWidth(-size.width());
1911         if (size.height() < 0)
1912             size.setHeight(-size.height());
1913         presentationSizeDidChange(size);
1914     } else {
1915         bool hasVideo = false;
1916         bool hasAudio = false;
1917         bool hasMetaData = false;
1918         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1919             if ([track isEnabled]) {
1920                 AVAssetTrack *assetTrack = [track assetTrack];
1921                 NSString *mediaType = [assetTrack mediaType];
1922                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1923                     hasVideo = true;
1924                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1925                     hasAudio = true;
1926                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1927 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1928                     hasCaptions = true;
1929 #endif
1930                     haveCCTrack = true;
1931                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1932                     hasMetaData = true;
1933                 }
1934             }
1935         }
1936
1937 #if ENABLE(VIDEO_TRACK)
1938         updateAudioTracks();
1939         updateVideoTracks();
1940
1941 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1942         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1943         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1944 #endif
1945 #endif
1946
1947         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1948         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1949         // when it is not.
1950         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1951
1952         setHasAudio(hasAudio);
1953 #if ENABLE(DATACUE_VALUE)
1954         if (hasMetaData)
1955             processMetadataTrack();
1956 #endif
1957     }
1958
1959 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1960     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1961     if (legibleGroup && m_cachedTracks) {
1962         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1963         if (hasCaptions)
1964             processMediaSelectionOptions();
1965     }
1966 #endif
1967
1968 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1969     if (!hasCaptions && haveCCTrack)
1970         processLegacyClosedCaptionsTracks();
1971 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1972     if (haveCCTrack)
1973         processLegacyClosedCaptionsTracks();
1974 #endif
1975
1976     setHasClosedCaptions(hasCaptions);
1977
1978     INFO_LOG(LOGIDENTIFIER, "has video = ", hasVideo(), ", has audio = ", hasAudio(), ", has captions = ", hasClosedCaptions());
1979
1980     sizeChanged();
1981
1982     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1983         characteristicsChanged();
1984
1985 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1986     if (m_provider)
1987         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1988 #endif
1989
1990     setDelayCharacteristicsChangedNotification(false);
1991 }
1992
1993 #if ENABLE(VIDEO_TRACK)
1994
1995 template <typename RefT, typename PassRefT>
1996 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1997 {
1998     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1999         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2000     }]]]);
2001     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2002
2003     for (auto& oldItem : oldItems) {
2004         if (oldItem->playerItemTrack())
2005             [oldTracks addObject:oldItem->playerItemTrack()];
2006     }
2007
2008     // Find the added & removed AVPlayerItemTracks:
2009     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2010     [removedTracks minusSet:newTracks.get()];
2011
2012     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2013     [addedTracks minusSet:oldTracks.get()];
2014
2015     typedef Vector<RefT> ItemVector;
2016     ItemVector replacementItems;
2017     ItemVector addedItems;
2018     ItemVector removedItems;
2019     for (auto& oldItem : oldItems) {
2020         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2021             removedItems.append(oldItem);
2022         else
2023             replacementItems.append(oldItem);
2024     }
2025
2026     for (AVPlayerItemTrack* track in addedTracks.get())
2027         addedItems.append(itemFactory(track));
2028
2029     replacementItems.appendVector(addedItems);
2030     oldItems.swap(replacementItems);
2031
2032     for (auto& removedItem : removedItems)
2033         (player->*removedFunction)(*removedItem);
2034
2035     for (auto& addedItem : addedItems)
2036         (player->*addedFunction)(*addedItem);
2037 }
2038
2039 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2040
2041 template <typename RefT, typename PassRefT>
2042 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2043 {
2044     group->updateOptions(characteristics);
2045
2046     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2047     for (auto& option : group->options()) {
2048         if (!option)
2049             continue;
2050         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2051         if (!avOption)
2052             continue;
2053         newSelectionOptions.add(option);
2054     }
2055
2056     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2057     for (auto& oldItem : oldItems) {
2058         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2059             oldSelectionOptions.add(option);
2060     }
2061
2062     // Find the added & removed AVMediaSelectionOptions:
2063     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2064     for (auto& oldOption : oldSelectionOptions) {
2065         if (!newSelectionOptions.contains(oldOption))
2066             removedSelectionOptions.add(oldOption);
2067     }
2068
2069     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2070     for (auto& newOption : newSelectionOptions) {
2071         if (!oldSelectionOptions.contains(newOption))
2072             addedSelectionOptions.add(newOption);
2073     }
2074
2075     typedef Vector<RefT> ItemVector;
2076     ItemVector replacementItems;
2077     ItemVector addedItems;
2078     ItemVector removedItems;
2079     for (auto& oldItem : oldItems) {
2080         if (!oldItem->mediaSelectionOption())
2081             removedItems.append(oldItem);
2082         else if (removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2083             removedItems.append(oldItem);
2084         else
2085             replacementItems.append(oldItem);
2086     }
2087
2088     for (auto& option : addedSelectionOptions)
2089         addedItems.append(itemFactory(*option.get()));
2090
2091     replacementItems.appendVector(addedItems);
2092     oldItems.swap(replacementItems);
2093     
2094     for (auto& removedItem : removedItems)
2095         (player->*removedFunction)(*removedItem);
2096
2097     for (auto& addedItem : addedItems)
2098         (player->*addedFunction)(*addedItem);
2099 }
2100
2101 #endif
2102
2103 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2104 {
2105 #if !RELEASE_LOG_DISABLED
2106     size_t count = m_audioTracks.size();
2107 #endif
2108
2109 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2110     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2111     if (!m_audibleGroup) {
2112         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2113             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2114     }
2115
2116     if (m_audibleGroup)
2117         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2118     else
2119 #endif
2120         determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2121
2122     for (auto& track : m_audioTracks)
2123         track->resetPropertiesFromTrack();
2124
2125 #if !RELEASE_LOG_DISABLED
2126     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_audioTracks.size());
2127 #endif
2128 }
2129
2130 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2131 {
2132 #if !RELEASE_LOG_DISABLED
2133     size_t count = m_videoTracks.size();
2134 #endif
2135
2136     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2137
2138 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2139     if (!m_visualGroup) {
2140         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2141             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2142     }
2143
2144     if (m_visualGroup)
2145         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2146 #endif
2147
2148     for (auto& track : m_audioTracks)
2149         track->resetPropertiesFromTrack();
2150
2151 #if !RELEASE_LOG_DISABLED
2152     INFO_LOG(LOGIDENTIFIER, "track count was ", count, ", is ", m_videoTracks.size());
2153 #endif
2154 }
2155
2156 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2157 {
2158     return m_videoFullscreenLayerManager->requiresTextTrackRepresentation();
2159 }
2160
2161 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2162 {
2163     m_videoFullscreenLayerManager->syncTextTrackBounds();
2164 }
2165
2166 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2167 {
2168     m_videoFullscreenLayerManager->setTextTrackRepresentation(representation);
2169 }
2170
2171 #endif // ENABLE(VIDEO_TRACK)
2172
2173 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2174
2175 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2176 {
2177     if (!m_provider) {
2178         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2179         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2180     }
2181     return m_provider.get();
2182 }
2183
2184 #endif
2185
2186 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2187 {
2188     if (!m_avAsset)
2189         return;
2190
2191     setNaturalSize(m_cachedPresentationSize);
2192 }
2193
2194 void MediaPlayerPrivateAVFoundationObjC::resolvedURLChanged()
2195 {
2196     setResolvedURL(m_avAsset ? URL([m_avAsset resolvedURL]) : URL());
2197 }
2198
2199 bool MediaPlayerPrivateAVFoundationObjC::didPassCORSAccessCheck() const
2200 {
2201 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED > 101100
2202     AVAssetResourceLoader *resourceLoader = m_avAsset.get().resourceLoader;
2203     if (!DeprecatedGlobalSettings::isAVFoundationNSURLSessionEnabled()
2204         || ![resourceLoader respondsToSelector:@selector(URLSession)])
2205         return false;
2206
2207     WebCoreNSURLSession *session = (WebCoreNSURLSession *)resourceLoader.URLSession;
2208     if ([session isKindOfClass:[WebCoreNSURLSession class]])
2209         return session.didPassCORSAccessChecks;
2210 #endif
2211     return false;
2212 }
2213
2214 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2215
2216 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2217 {
2218     INFO_LOG(LOGIDENTIFIER);
2219
2220     if (!m_avPlayerItem || m_videoOutput)
2221         return;
2222
2223 #if USE(VIDEOTOOLBOX)
2224     NSDictionary* attributes = nil;
2225 #else
2226     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
2227 #endif
2228     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
2229     ASSERT(m_videoOutput);
2230
2231     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2232
2233     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2234 }
2235
2236 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2237 {
2238     if (!m_videoOutput)
2239         return;
2240
2241     if (m_avPlayerItem)
2242         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2243
2244     INFO_LOG(LOGIDENTIFIER);
2245
2246     m_videoOutput = 0;
2247 }
2248
2249 bool MediaPlayerPrivateAVFoundationObjC::updateLastPixelBuffer()
2250 {
2251     if (!m_avPlayerItem)
2252         return false;
2253
2254     if (!m_videoOutput)
2255         createVideoOutput();
2256     ASSERT(m_videoOutput);
2257
2258     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2259
2260     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2261         return false;
2262
2263     m_lastPixelBuffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2264     m_lastImage = nullptr;
2265     return true;
2266 }
2267
2268 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2269 {
2270     if (!m_avPlayerItem)
2271         return false;
2272
2273     if (m_lastImage)
2274         return true;
2275
2276     if (!m_videoOutput)
2277         createVideoOutput();
2278
2279     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2280 }
2281
2282 void MediaPlayerPrivateAVFoundationObjC::updateLastImage(UpdateType type)
2283 {
2284 #if HAVE(CORE_VIDEO)
2285     if (!m_avPlayerItem)
2286         return;
2287
2288     if (type == UpdateType::UpdateSynchronously && !m_lastImage && !videoOutputHasAvailableFrame())
2289         waitForVideoOutputMediaDataWillChange();
2290
2291     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2292     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2293     // should be displayed.
2294     if (!updateLastPixelBuffer() && (m_lastImage || !m_lastPixelBuffer))
2295         return;
2296
2297     if (!m_pixelBufferConformer) {
2298 #if USE(VIDEOTOOLBOX)
2299         NSDictionary *attributes = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA) };
2300 #else
2301         NSDictionary *attributes = nil;
2302 #endif
2303         m_pixelBufferConformer = std::make_unique<PixelBufferConformerCV>((CFDictionaryRef)attributes);
2304     }
2305
2306 #if !RELEASE_LOG_DISABLED
2307     MonotonicTime start = MonotonicTime::now();
2308 #endif
2309
2310     m_lastImage = m_pixelBufferConformer->createImageFromPixelBuffer(m_lastPixelBuffer.get());
2311
2312 #if !RELEASE_LOG_DISABLED
2313     DEBUG_LOG(LOGIDENTIFIER, "creating buffer took ", (MonotonicTime::now() - start).seconds());
2314 #endif
2315 #endif // HAVE(CORE_VIDEO)
2316 }
2317
2318 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext& context, const FloatRect& outputRect)
2319 {
2320     updateLastImage(UpdateType::UpdateSynchronously);
2321     if (!m_lastImage)
2322         return;
2323
2324     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2325     if (!firstEnabledVideoTrack)
2326         return;
2327
2328     INFO_LOG(LOGIDENTIFIER);
2329
2330     GraphicsContextStateSaver stateSaver(context);
2331     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2332     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2333     FloatRect transformedOutputRect = videoTransform.inverse().value_or(AffineTransform()).mapRect(outputRect);
2334
2335     context.concatCTM(videoTransform);
2336     context.drawNativeImage(m_lastImage.get(), imageRect.size(), transformedOutputRect, imageRect);
2337
2338     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2339     // video frame, destroy it now that it is no longer needed.
2340     if (m_imageGenerator)
2341         destroyImageGenerator();
2342
2343 }
2344
2345 bool MediaPlayerPrivateAVFoundationObjC::copyVideoTextureToPlatformTexture(GraphicsContext3D* context, Platform3DObject outputTexture, GC3Denum outputTarget, GC3Dint level, GC3Denum internalFormat, GC3Denum format, GC3Denum type, bool premultiplyAlpha, bool flipY)
2346 {
2347     ASSERT(context);
2348
2349     updateLastPixelBuffer();
2350     if (!m_lastPixelBuffer)
2351         return false;
2352
2353     size_t width = CVPixelBufferGetWidth(m_lastPixelBuffer.get());
2354     size_t height = CVPixelBufferGetHeight(m_lastPixelBuffer.get());
2355
2356     if (!m_videoTextureCopier)
2357         m_videoTextureCopier = std::make_unique<VideoTextureCopierCV>(*context);
2358
2359     return m_videoTextureCopier->copyImageToPlatformTexture(m_lastPixelBuffer.get(), width, height, outputTexture, outputTarget, level, internalFormat, format, type, premultiplyAlpha, flipY);
2360 }
2361
2362 NativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2363 {
2364     updateLastImage();
2365     return m_lastImage;
2366 }
2367
2368 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2369 {
2370     if (!m_videoOutputSemaphore)
2371         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2372
2373     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2374
2375     // Wait for 1 second.
2376     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2377
2378     if (result)
2379         ERROR_LOG(LOGIDENTIFIER, "timed out");
2380 }
2381
2382 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutputType *)
2383 {
2384     dispatch_semaphore_signal(m_videoOutputSemaphore);
2385 }
2386
2387 #endif
2388
2389 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2390
2391 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2392 {
2393     return m_keyURIToRequestMap.take(keyURI);
2394 }
2395
2396 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2397 {
2398     Vector<String> fulfilledKeyIds;
2399
2400     for (auto& pair : m_keyURIToRequestMap) {
2401         const String& keyId = pair.key;
2402         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2403
2404         auto keyData = player()->cachedKeyForKeyId(keyId);
2405         if (!keyData)
2406             continue;
2407
2408         fulfillRequestWithKeyData(request.get(), keyData.get());
2409         fulfilledKeyIds.append(keyId);
2410     }
2411
2412     for (auto& keyId : fulfilledKeyIds)
2413         m_keyURIToRequestMap.remove(keyId);
2414 }
2415
2416 void MediaPlayerPrivateAVFoundationObjC::removeSession(LegacyCDMSession& session)
2417 {
2418     ASSERT_UNUSED(session, &session == m_session);
2419     m_session = nullptr;
2420 }
2421
2422 std::unique_ptr<LegacyCDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem, LegacyCDMSessionClient* client)
2423 {
2424     if (!keySystemIsSupported(keySystem))
2425         return nullptr;
2426     auto session = std::make_unique<CDMSessionAVFoundationObjC>(this, client);
2427     m_session = session->createWeakPtr();
2428     return WTFMove(session);
2429 }
2430 #endif
2431
2432 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(LEGACY_ENCRYPTED_MEDIA)
2433 void MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged(bool newValue)
2434 {
2435 #if ENABLE(LEGACY_ENCRYPTED_MEDIA)
2436     if (m_session && newValue)
2437         m_session->playerDidReceiveError([NSError errorWithDomain:@"com.apple.WebKit" code:'HDCP' userInfo:nil]);
2438 #endif
2439
2440 #if ENABLE(ENCRYPTED_MEDIA) && HAVE(AVCONTENTKEYSESSION)
2441     if (m_cdmInstance)
2442         m_cdmInstance->outputObscuredDueToInsufficientExternalProtectionChanged(newValue);
2443 #elif !ENABLE(LEGACY_ENCRYPTED_MEDIA)
2444     UNUSED_PARAM(newValue);
2445 #endif
2446 }
2447 #endif
2448
2449 #if ENABLE(ENCRYPTED_MEDIA)
2450 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceAttached(CDMInstance& instance)
2451 {
2452 #if HAVE(AVCONTENTKEYSESSION)
2453     if (!is<CDMInstanceFairPlayStreamingAVFObjC>(instance))
2454         return;
2455
2456     auto& fpsInstance = downcast<CDMInstanceFairPlayStreamingAVFObjC>(instance);
2457     if (&fpsInstance == m_cdmInstance)
2458         return;
2459
2460     if (m_cdmInstance)
2461         cdmInstanceDetached(*m_cdmInstance);
2462
2463     m_cdmInstance = &fpsInstance;
2464     [m_cdmInstance->contentKeySession() addContentKeyRecipient:m_avAsset.get()];
2465 #else
2466     UNUSED_PARAM(instance);
2467 #endif
2468 }
2469
2470 void MediaPlayerPrivateAVFoundationObjC::cdmInstanceDetached(CDMInstance& instance)
2471 {
2472 #if HAVE(AVCONTENTKEYSESSION)
2473     ASSERT_UNUSED(instance, m_cdmInstance && m_cdmInstance == &instance);
2474     [m_cdmInstance->contentKeySession() removeContentKeyRecipient:m_avAsset.get()];
2475     m_cdmInstance = nullptr;
2476 #else
2477     UNUSED_PARAM(instance);
2478 #endif
2479 }
2480
2481 void MediaPlayerPrivateAVFoundationObjC::attemptToDecryptWithInstance(CDMInstance&)
2482 {
2483     auto keyURIToRequestMap = WTFMove(m_keyURIToRequestMap);
2484     for (auto& request : keyURIToRequestMap.values())
2485         [request finishLoading];
2486 }
2487 #endif
2488
2489 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2490
2491 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2492 {
2493 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2494     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2495 #endif
2496
2497     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2498     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2499
2500         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2501         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2502             continue;
2503
2504         bool newCCTrack = true;
2505         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2506             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2507                 continue;
2508
2509             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2510             if (track->avPlayerItemTrack() == playerItemTrack) {
2511                 removedTextTracks.remove(i - 1);
2512                 newCCTrack = false;
2513                 break;
2514             }
2515         }
2516
2517         if (!newCCTrack)
2518             continue;
2519         
2520         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2521     }
2522
2523     processNewAndRemovedTextTracks(removedTextTracks);
2524 }
2525
2526 #endif
2527
2528 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2529 {
2530     if (!m_avAsset)
2531         return nil;
2532
2533     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2534         return nil;
2535
2536     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2537 }
2538
2539 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2540
2541 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2542 {
2543     if (!m_avAsset)
2544         return false;
2545
2546     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2547         return false;
2548
2549     return true;
2550 }
2551
2552 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2553 {
2554     if (!hasLoadedMediaSelectionGroups())
2555         return nil;
2556
2557     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2558 }
2559
2560 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2561 {
2562     if (!hasLoadedMediaSelectionGroups())
2563         return nil;
2564
2565     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2566 }
2567
2568 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2569 {
2570     if (!hasLoadedMediaSelectionGroups())
2571         return nil;
2572
2573     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2574 }
2575
2576 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2577 {
2578     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2579     if (!legibleGroup) {
2580         INFO_LOG(LOGIDENTIFIER, "no mediaSelectionGroup");
2581         return;
2582     }
2583
2584     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2585     // but set the selected legible track to nil so text tracks will not be automatically configured.
2586     if (!m_textTracks.size())
2587         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2588
2589     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2590     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2591     for (AVMediaSelectionOptionType *option in legibleOptions) {
2592         bool newTrack = true;
2593         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2594             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2595                 continue;
2596             
2597             RetainPtr<AVMediaSelectionOptionType> currentOption;
2598 #if ENABLE(AVF_CAPTIONS)
2599             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2600                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2601                 currentOption = track->mediaSelectionOption();
2602             } else
2603 #endif
2604             {
2605                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2606                 currentOption = track->mediaSelectionOption();
2607             }
2608             
2609             if ([currentOption.get() isEqual:option]) {
2610                 removedTextTracks.remove(i - 1);
2611                 newTrack = false;
2612                 break;
2613             }
2614         }
2615         if (!newTrack)
2616             continue;
2617
2618 #if ENABLE(AVF_CAPTIONS)
2619         if ([option outOfBandSource]) {
2620             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2621             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2622             continue;
2623         }
2624 #endif
2625
2626         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2627     }
2628
2629     processNewAndRemovedTextTracks(removedTextTracks);
2630 }
2631
2632 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2633 {
2634     if (m_metadataTrack)
2635         return;
2636
2637     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2638     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2639     player()->addTextTrack(*m_metadataTrack);
2640 }
2641
2642 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2643 {
2644     ASSERT(time >= MediaTime::zeroTime());
2645
2646     if (!m_currentTextTrack)
2647         return;
2648
2649     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2650 }
2651
2652 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2653 {
2654     INFO_LOG(LOGIDENTIFIER);
2655
2656     if (!m_currentTextTrack)
2657         return;
2658     
2659     m_currentTextTrack->resetCueValues();
2660 }
2661
2662 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2663
2664 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2665 {
2666     if (m_currentTextTrack == track)
2667         return;
2668
2669     INFO_LOG(LOGIDENTIFIER, "selecting track with language ", track ? track->language() : "");
2670
2671     m_currentTextTrack = track;
2672
2673     if (track) {
2674         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2675 #pragma clang diagnostic push
2676 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2677             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2678 #pragma clang diagnostic pop
2679 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2680 #if ENABLE(AVF_CAPTIONS)
2681         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2682             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2683 #endif
2684         else
2685             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2686 #endif
2687     } else {
2688 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2689         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2690 #endif
2691 #pragma clang diagnostic push
2692 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2693         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2694 #pragma clang diagnostic pop
2695     }
2696
2697 }
2698
2699 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2700 {
2701     if (!m_languageOfPrimaryAudioTrack.isNull())
2702         return m_languageOfPrimaryAudioTrack;
2703
2704     if (!m_avPlayerItem.get())
2705         return emptyString();
2706
2707 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2708     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2709     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2710 #pragma clang diagnostic push
2711 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
2712     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2713 #pragma clang diagnostic pop
2714     if (currentlySelectedAudibleOption) {
2715         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2716         INFO_LOG(LOGIDENTIFIER, "language of selected audible option ", m_languageOfPrimaryAudioTrack);
2717
2718         return m_languageOfPrimaryAudioTrack;
2719     }
2720 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2721
2722     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2723     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2724     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2725     if (!tracks || [tracks count] != 1) {
2726         m_languageOfPrimaryAudioTrack = emptyString();
2727         INFO_LOG(LOGIDENTIFIER, tracks ? [tracks count] : 0, " audio tracks, returning empty");
2728         return m_languageOfPrimaryAudioTrack;
2729     }
2730
2731     AVAssetTrack *track = [tracks objectAtIndex:0];
2732     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2733
2734     INFO_LOG(LOGIDENTIFIER, "single audio track has language \"", m_languageOfPrimaryAudioTrack, "\"");
2735
2736     return m_languageOfPrimaryAudioTrack;
2737 }
2738
2739 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2740 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2741 {
2742     bool wirelessTarget = false;
2743
2744 #if !PLATFORM(IOS)
2745     if (m_playbackTarget) {
2746         if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation)
2747             wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2748         else
2749             wirelessTarget = m_shouldPlayToPlaybackTarget && m_playbackTarget->hasActiveRoute();
2750     }
2751 #else
2752     wirelessTarget = m_avPlayer && m_avPlayer.get().externalPlaybackActive;
2753 #endif
2754
2755     INFO_LOG(LOGIDENTIFIER, "- ", wirelessTarget);
2756
2757     return wirelessTarget;
2758 }
2759
2760 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2761 {
2762     if (!m_avPlayer)
2763         return MediaPlayer::TargetTypeNone;
2764
2765 #if PLATFORM(IOS)
2766     if (!AVFoundationLibrary())
2767         return MediaPlayer::TargetTypeNone;
2768
2769     switch ([m_avPlayer externalPlaybackType]) {
2770     case AVPlayerExternalPlaybackTypeNone:
2771         return MediaPlayer::TargetTypeNone;
2772     case AVPlayerExternalPlaybackTypeAirPlay:
2773         return MediaPlayer::TargetTypeAirPlay;
2774     case AVPlayerExternalPlaybackTypeTVOut:
2775         return MediaPlayer::TargetTypeTVOut;
2776     }
2777
2778     ASSERT_NOT_REACHED();
2779     return MediaPlayer::TargetTypeNone;
2780
2781 #else
2782     return MediaPlayer::TargetTypeAirPlay;
2783 #endif
2784 }
2785     
2786 #if PLATFORM(IOS)
2787 static NSString *exernalDeviceDisplayNameForPlayer(AVPlayerType *player)
2788 {
2789 #if HAVE(CELESTIAL)
2790     NSString *displayName = nil;
2791
2792     if (!AVFoundationLibrary())
2793         return nil;
2794
2795     if (player.externalPlaybackType != AVPlayerExternalPlaybackTypeAirPlay)
2796         return nil;
2797
2798     NSArray *pickableRoutes = CFBridgingRelease(MRMediaRemoteCopyPickableRoutes());
2799     if (!pickableRoutes.count)
2800         return nil;
2801
2802     for (NSDictionary *pickableRoute in pickableRoutes) {
2803         if (![pickableRoute[AVController_RouteDescriptionKey_RouteCurrentlyPicked] boolValue])
2804             continue;
2805
2806         displayName = pickableRoute[AVController_RouteDescriptionKey_RouteName];
2807
2808         NSString *routeName = pickableRoute[AVController_RouteDescriptionKey_AVAudioRouteName];
2809         if (![routeName isEqualToString:@"Speaker"] && ![routeName isEqualToString:@"HDMIOutput"])
2810             break;
2811
2812         // The route is a speaker or HDMI out, override the name to be the localized device model.
2813         NSString *localizedDeviceModel = [[UIDevice currentDevice] localizedModel];
2814
2815         // In cases where a route with that name already exists, prefix the name with the model.
2816         BOOL includeLocalizedDeviceModelName = NO;
2817         for (NSDictionary *otherRoute in pickableRoutes) {
2818             if (otherRoute == pickableRoute)
2819                 continue;
2820
2821             if ([otherRoute[AVController_RouteDescriptionKey_RouteName] rangeOfString:displayName].location != NSNotFound) {
2822                 includeLocalizedDeviceModelName = YES;
2823                 break;
2824             }
2825         }
2826
2827         if (includeLocalizedDeviceModelName)
2828             displayName =  [NSString stringWithFormat:@"%@ %@", localizedDeviceModel, displayName];
2829         else
2830             displayName = localizedDeviceModel;
2831
2832         break;
2833     }
2834
2835     return displayName;
2836 #else
2837     UNUSED_PARAM(player);
2838     return nil;
2839 #endif
2840 }
2841 #endif
2842
2843 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2844 {
2845     if (!m_avPlayer)
2846         return emptyString();
2847
2848     String wirelessTargetName;
2849 #if !PLATFORM(IOS)
2850     if (m_playbackTarget)
2851         wirelessTargetName = m_playbackTarget->deviceName();
2852 #else
2853     wirelessTargetName = exernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2854 #endif
2855
2856     return wirelessTargetName;
2857 }
2858
2859 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2860 {
2861     if (!m_avPlayer)
2862         return !m_allowsWirelessVideoPlayback;
2863
2864     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2865     INFO_LOG(LOGIDENTIFIER, "- ", !m_allowsWirelessVideoPlayback);
2866
2867     return !m_allowsWirelessVideoPlayback;
2868 }
2869
2870 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2871 {
2872     INFO_LOG(LOGIDENTIFIER, "- ", disabled);
2873     m_allowsWirelessVideoPlayback = !disabled;
2874     if (!m_avPlayer)
2875         return;
2876
2877     setDelayCallbacks(true);
2878     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2879     setDelayCallbacks(false);
2880 }
2881
2882 #if !PLATFORM(IOS)
2883
2884 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2885 {
2886     m_playbackTarget = WTFMove(target);
2887
2888     m_outputContext = m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation ? toMediaPlaybackTargetMac(m_playbackTarget.get())->outputContext() : nullptr;
2889
2890     INFO_LOG(LOGIDENTIFIER);
2891
2892     if (!m_playbackTarget->hasActiveRoute())
2893         setShouldPlayToPlaybackTarget(false);
2894 }
2895
2896 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2897 {
2898     if (m_shouldPlayToPlaybackTarget == shouldPlay)
2899         return;
2900
2901     m_shouldPlayToPlaybackTarget = shouldPlay;
2902
2903     if (!m_playbackTarget)
2904         return;
2905
2906     INFO_LOG(LOGIDENTIFIER, "- ", shouldPlay);
2907
2908     if (m_playbackTarget->targetType() == MediaPlaybackTarget::AVFoundation) {
2909         AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2910
2911         if (!m_avPlayer)
2912             return;
2913
2914         RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2915         if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2916             return;
2917
2918         setDelayCallbacks(true);
2919         m_avPlayer.get().outputContext = newContext;
2920         setDelayCallbacks(false);
2921
2922         return;
2923     }
2924
2925     ASSERT(m_playbackTarget->targetType() == MediaPlaybackTarget::Mock);
2926
2927     setDelayCallbacks(true);
2928     auto weakThis = createWeakPtr();
2929     scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis] {
2930         if (!weakThis)
2931             return;
2932         weakThis->playbackTargetIsWirelessDidChange();
2933     }));
2934     setDelayCallbacks(false);
2935 }
2936
2937 #endif // !PLATFORM(IOS)
2938
2939 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2940 {
2941 #if PLATFORM(IOS)
2942     if (!m_avPlayer)
2943         return;
2944
2945     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeStandard];
2946 #endif
2947 }
2948
2949 #endif
2950
2951 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2952 {
2953     m_cachedItemStatus = status;
2954
2955     updateStates();
2956 }
2957
2958 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2959 {
2960     m_pendingStatusChanges++;
2961 }
2962
2963 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2964 {
2965     m_cachedLikelyToKeepUp = likelyToKeepUp;
2966
2967     ASSERT(m_pendingStatusChanges);
2968     if (!--m_pendingStatusChanges)
2969         updateStates();
2970 }
2971
2972 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2973 {
2974     m_pendingStatusChanges++;
2975 }
2976
2977 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2978 {
2979     m_cachedBufferEmpty = bufferEmpty;
2980
2981     ASSERT(m_pendingStatusChanges);
2982     if (!--m_pendingStatusChanges)
2983         updateStates();
2984 }
2985
2986 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2987 {
2988     m_pendingStatusChanges++;
2989 }
2990
2991 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2992 {
2993     m_cachedBufferFull = bufferFull;
2994
2995     ASSERT(m_pendingStatusChanges);
2996     if (!--m_pendingStatusChanges)
2997         updateStates();
2998 }
2999
3000 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
3001 {
3002     m_cachedSeekableRanges = seekableRanges;
3003
3004     seekableTimeRangesChanged();
3005     updateStates();
3006 }
3007
3008 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
3009 {
3010     m_cachedLoadedRanges = loadedRanges;
3011
3012     loadedTimeRangesChanged();
3013     updateStates();
3014 }
3015
3016 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
3017 {
3018     m_cachedIsReadyForDisplay = isReady;
3019     if (!hasVideo() && isReady)
3020         tracksChanged();
3021     updateStates();
3022 }
3023
3024 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
3025 {
3026     tracksChanged();
3027     updateStates();
3028 }
3029
3030 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
3031 {
3032     INFO_LOG(LOGIDENTIFIER, "- ", shouldBuffer);
3033
3034     if (m_shouldBufferData == shouldBuffer)
3035         return;
3036
3037     m_shouldBufferData = shouldBuffer;
3038     
3039     if (!m_avPlayer)
3040         return;
3041
3042     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3043 }
3044
3045 #if ENABLE(DATACUE_VALUE)
3046
3047 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3048 {
3049     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3050     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3051     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3052     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3053     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3054
3055     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3056         return quickTimeUserData;
3057     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3058         return isoUserData;
3059     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3060         return quickTimeMetadata;
3061     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3062         return iTunesMetadata;
3063     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3064         return id3Metadata;
3065
3066     return emptyAtom();
3067 }
3068
3069 #endif
3070
3071 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3072 {
3073     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3074
3075     DEBUG_LOG(LOGIDENTIFIER, "adding ", m_currentMetaData ? [m_currentMetaData.get() count] : 0, " at time ", mediaTime);
3076
3077 #if ENABLE(DATACUE_VALUE)
3078     if (seeking())
3079         return;
3080
3081     if (!m_metadataTrack)
3082         processMetadataTrack();
3083
3084     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3085         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3086         return;
3087     }
3088
3089     // Set the duration of all incomplete cues before adding new ones.
3090     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3091     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3092         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
3093         if (start < earliestStartTime)
3094             earliestStartTime = start;
3095     }
3096     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3097
3098     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3099         MediaTime start = std::max(PAL::toMediaTime(item.time), MediaTime::zeroTime());
3100         MediaTime end = MediaTime::positiveInfiniteTime();
3101         if (CMTIME_IS_VALID(item.duration))
3102             end = start + PAL::toMediaTime(item.duration);
3103
3104         AtomicString type = nullAtom();
3105         if (item.keySpace)
3106             type = metadataType(item.keySpace);
3107
3108         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3109     }
3110 #endif
3111 }
3112
3113 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3114 {
3115     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3116         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3117
3118     NSArray *assetTracks = [m_avAsset tracks];
3119
3120     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3121         AVAssetTrack* assetTrack = [obj assetTrack];
3122
3123         if ([assetTracks containsObject:assetTrack])
3124             return YES;
3125
3126         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3127         if (!hasLoadedMediaSelectionGroups())
3128             return NO;
3129
3130         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3131             return NO;
3132
3133         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3134             return NO;
3135
3136         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3137             return NO;
3138
3139         return YES;
3140     }]];
3141
3142     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3143         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3144
3145     m_cachedTotalBytes = 0;
3146
3147     tracksChanged();
3148     updateStates();
3149 }
3150
3151 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3152 {
3153     m_cachedHasEnabledAudio = hasEnabledAudio;
3154
3155     tracksChanged();
3156     updateStates();
3157 }
3158
3159 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3160 {
3161     m_cachedPresentationSize = size;
3162
3163     sizeChanged();
3164     updateStates();
3165 }
3166
3167 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3168 {
3169     m_cachedDuration = duration;
3170
3171     invalidateCachedDuration();
3172 }
3173
3174 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3175 {
3176     m_cachedRate = rate;
3177
3178     updateStates();
3179     rateChanged();
3180 }
3181
3182 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3183
3184 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3185 {
3186     playbackTargetIsWirelessChanged();
3187 }
3188
3189 #endif
3190
3191 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3192 {
3193     m_cachedCanPlayFastForward = newValue;
3194 }
3195
3196 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3197 {
3198     m_cachedCanPlayFastReverse = newValue;
3199 }
3200
3201 void MediaPlayerPrivateAVFoundationObjC::setShouldDisableSleep(bool flag)
3202 {
3203 #if PLATFORM(IOS) && !PLATFORM(IOS_SIMULATOR)
3204     [m_avPlayer _setPreventsSleepDuringVideoPlayback:flag];
3205 #else
3206     UNUSED_PARAM(flag);
3207 #endif
3208 }
3209
3210 NSArray* assetMetadataKeyNames()
3211 {
3212     static NSArray* keys = [[NSArray alloc] initWithObjects:
3213         @"duration",
3214         @"naturalSize",
3215         @"preferredTransform",
3216         @"preferredVolume",
3217         @"preferredRate",
3218         @"playable",
3219         @"resolvedURL",
3220         @"tracks",
3221         @"availableMediaCharacteristicsWithMediaSelectionOptions",
3222     nil];
3223     return keys;
3224 }
3225
3226 NSArray* itemKVOProperties()
3227 {
3228     static NSArray* keys = [[NSArray alloc] initWithObjects:
3229         @"presentationSize",
3230         @"status",
3231         @"asset",
3232         @"tracks",
3233         @"seekableTimeRanges",
3234         @"loadedTimeRanges",
3235         @"playbackLikelyToKeepUp",
3236         @"playbackBufferFull",
3237         @"playbackBufferEmpty",
3238         @"duration",
3239         @"hasEnabledAudio",
3240         @"timedMetadata",
3241         @"canPlayFastForward",
3242         @"canPlayFastReverse",
3243     nil];
3244     return keys;
3245 }
3246
3247 NSArray* assetTrackMetadataKeyNames()
3248 {
3249     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3250     return keys;
3251 }
3252
3253 NSArray* playerKVOProperties()
3254 {
3255     static NSArray* keys = [[NSArray alloc] initWithObjects:
3256         @"rate",
3257 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3258         @"externalPlaybackActive",
3259         @"allowsExternalPlayback",
3260 #endif
3261 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
3262         @"outputObscuredDueToInsufficientExternalProtection",
3263 #endif
3264     nil];
3265     return keys;
3266 }
3267 } // namespace WebCore
3268
3269 @implementation WebCoreAVFMovieObserver
3270
3271 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3272 {
3273     self = [super init];
3274     if (!self)
3275         return nil;
3276     m_callback = callback;
3277     return self;
3278 }
3279
3280 - (void)disconnect
3281 {
3282     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3283     m_callback = nil;
3284 }
3285
3286 - (void)metadataLoaded
3287 {
3288     if (!m_callback)
3289         return;
3290     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3291 }
3292
3293 - (void)didEnd:(NSNotification *)unusedNotification
3294 {
3295     UNUSED_PARAM(unusedNotification);
3296     if (!m_callback)
3297         return;
3298     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3299 }
3300
3301 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3302 {
3303     UNUSED_PARAM(object);
3304     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3305
3306     if (!m_callback)
3307         return;
3308
3309     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3310     bool shouldLogValue = !willChange;
3311     WTF::Function<void ()> function;
3312
3313     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3314         if ([keyPath isEqualToString:@"readyForDisplay"])
3315             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3316     }
3317
3318     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3319         if ([keyPath isEqualToString:@"enabled"])
3320             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3321     }
3322
3323     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3324         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3325             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3326         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3327             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3328         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3329             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3330     }
3331
3332     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3333         // A value changed for an AVPlayerItem
3334         if ([keyPath isEqualToString:@"status"])
3335             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3336         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3337             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3338         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3339             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3340         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3341             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3342         else if ([keyPath isEqualToString:@"asset"]) {
3343             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3344             shouldLogValue = false;
3345         } else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3346             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3347         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3348             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3349         else if ([keyPath isEqualToString:@"tracks"]) {
3350             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3351             shouldLogValue = false;
3352         } else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3353             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3354         else if ([keyPath isEqualToString:@"presentationSize"])
3355             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3356         else if ([keyPath isEqualToString:@"duration"])
3357             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, PAL::toMediaTime([newValue CMTimeValue]));
3358         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3359             MediaTime now;
3360             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3361             if (CMTIME_IS_NUMERIC(itemTime))
3362                 now = std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
3363             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3364             shouldLogValue = false;
3365         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3366             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3367         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3368             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3369     }
3370
3371     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3372         // A value changed for an AVPlayer.
3373         if ([keyPath isEqualToString:@"rate"])
3374             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3375 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3376         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3377             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3378 #endif
3379 #if ENABLE(LEGACY_ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA)
3380         else if ([keyPath isEqualToString:@"outputObscuredDueToInsufficientExternalProtection"])
3381             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::outputObscuredDueToInsufficientExternalProtectionChanged, m_callback, [newValue boolValue]);
3382 #endif
3383     }
3384
3385 #if !RELEASE_LOG_DISABLED
3386     if (m_callback->logger().willLog(m_callback->logChannel(), WTFLogLevelDebug) && !([keyPath isEqualToString:@"loadedTimeRanges"] || [keyPath isEqualToString:@"seekableTimeRanges"])) {
3387         auto identifier = Logger::LogSiteIdentifier("MediaPlayerPrivateAVFoundation", "observeValueForKeyPath", m_callback->logIdentifier());
3388
3389         if (shouldLogValue) {
3390             if ([keyPath isEqualToString:@"duration"])
3391                 m_callback->logger().debug(m_callback->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", PAL::toMediaTime([newValue CMTimeValue]));
3392             else {
3393                 RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3394                 m_callback->logger().debug(m_callback->logChannel(), identifier, "did change '", [keyPath UTF8String], "' to ", [valueString.get() UTF8String]);
3395             }
3396         } else
3397             m_callback->logger().debug(m_callback->logChannel(), identifier, willChange ? "will" : "did", " change '", [keyPath UTF8String], "'");
3398     }
3399 #endif
3400
3401     if (!function)
3402         return;
3403
3404     auto weakThis = m_callback->createWeakPtr();
3405     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function = WTFMove(function)]{
3406         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3407         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3408         if (!weakThis)
3409             return;
3410         function();
3411     }));
3412 }
3413
3414 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3415
3416 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3417 {
3418     UNUSED_PARAM(output);
3419     UNUSED_PARAM(nativeSamples);
3420
3421     if (!m_callback)
3422         return;
3423
3424     RetainPtr<WebCoreAVFMovieObserver> protectedSelf = self;
3425     RetainPtr<NSArray> protectedStrings = strings;
3426     RetainPtr<NSArray> protectedNativeSamples = nativeSamples;
3427     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedStrings = WTFMove(protectedStrings), protectedNativeSamples = WTFMove(protectedNativeSamples), itemTime] {
3428         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3429         if (!callback)
3430             return;
3431         MediaTime time = std::max(PAL::toMediaTime(itemTime), MediaTime::zeroTime());
3432         callback->processCue(protectedStrings.get(), protectedNativeSamples.get(), time);
3433     });
3434 }
3435
3436 - (void)outputSequenceWasFlushed:(id)output
3437 {
3438     UNUSED_PARAM(output);
3439
3440     if (!m_callback)
3441         return;
3442     
3443     callOnMainThread([protectedSelf = RetainPtr<WebCoreAVFMovieObserver>(self)] {
3444         if (MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback)
3445             callback->flushCues();
3446     });
3447 }
3448
3449 #endif
3450
3451 @end
3452
3453 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3454
3455 @implementation WebCoreAVFLoaderDelegate
3456
3457 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3458 {
3459     self = [super init];
3460     if (!self)
3461         return nil;
3462     m_callback = callback;
3463     return self;
3464 }
3465
3466 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3467 {
3468     UNUSED_PARAM(resourceLoader);
3469     if (!m_callback)
3470         return NO;
3471
3472     RetainPtr<WebCoreAVFLoaderDelegate> protectedSelf = self;
3473     RetainPtr<AVAssetResourceLoadingRequest> protectedLoadingRequest = loadingRequest;
3474     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedLoadingRequest = WTFMove(protectedLoadingRequest)] {
3475         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3476         if (!callback) {
3477             [protectedLoadingRequest finishLoadingWithError:nil];
3478             return;
3479         }
3480
3481         if (!callback->shouldWaitForLoadingOfResource(protectedLoadingRequest.get()))
3482             [protectedLoadingRequest finishLoadingWithError:nil];
3483     });
3484
3485     return YES;
3486 }
3487
3488 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3489 {
3490     UNUSED_PARAM(resourceLoader);
3491     UNUSED_PARAM(challenge);
3492     ASSERT_NOT_REACHED();
3493     return NO;
3494 }
3495
3496 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3497 {
3498     UNUSED_PARAM(resourceLoader);
3499     if (!m_callback)
3500         return;
3501
3502     RetainPtr<WebCoreAVFLoaderDelegate> protectedSelf = self;
3503     RetainPtr<AVAssetResourceLoadingRequest> protectedLoadingRequest = loadingRequest;
3504     callOnMainThread([protectedSelf = WTFMove(protectedSelf), protectedLoadingRequest = WTFMove(protectedLoadingRequest)] {
3505         MediaPlayerPrivateAVFoundationObjC* callback = protectedSelf->m_callback;
3506         if (callback)
3507             callback->didCancelLoadingRequest(protectedLoadingRequest.get());