[Mac] short-circuit MIME type lookup when possible
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "FloatConversion.h"
41 #import "FloatConversion.h"
42 #import "GraphicsContext.h"
43 #import "GraphicsContextCG.h"
44 #import "InbandMetadataTextTrackPrivateAVF.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
47 #import "OutOfBandTextTrackPrivateAVF.h"
48 #import "URL.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTargetMac.h"
51 #import "MediaSelectionGroupAVFObjC.h"
52 #import "MediaTimeAVFoundation.h"
53 #import "PlatformTimeRanges.h"
54 #import "QuartzCoreSPI.h"
55 #import "SecurityOrigin.h"
56 #import "SerializedPlatformRepresentationMac.h"
57 #import "TextEncoding.h"
58 #import "TextTrackRepresentation.h"
59 #import "UUID.h"
60 #import "VideoTrackPrivateAVFObjC.h"
61 #import "WebCoreAVFResourceLoader.h"
62 #import "WebCoreCALayerExtras.h"
63 #import "WebCoreSystemInterface.h"
64 #import <functional>
65 #import <objc/runtime.h>
66 #import <runtime/DataView.h>
67 #import <runtime/JSCInlines.h>
68 #import <runtime/TypedArrayInlines.h>
69 #import <runtime/Uint16Array.h>
70 #import <runtime/Uint32Array.h>
71 #import <runtime/Uint8Array.h>
72 #import <wtf/CurrentTime.h>
73 #import <wtf/ListHashSet.h>
74 #import <wtf/NeverDestroyed.h>
75 #import <wtf/text/CString.h>
76 #import <wtf/text/StringBuilder.h>
77
78 #if ENABLE(AVF_CAPTIONS)
79 #include "TextTrack.h"
80 #endif
81
82 #import <AVFoundation/AVFoundation.h>
83 #if PLATFORM(IOS)
84 #import "WAKAppKitStubs.h"
85 #import <CoreImage/CoreImage.h>
86 #import <mach/mach_port.h>
87 #else
88 #import <Foundation/NSGeometry.h>
89 #import <QuartzCore/CoreImage.h>
90 #endif
91
92 #if USE(VIDEOTOOLBOX)
93 #import <CoreVideo/CoreVideo.h>
94 #import <VideoToolbox/VideoToolbox.h>
95 #endif
96
97 #if USE(CFNETWORK)
98 #include "CFNSURLConnectionSPI.h"
99 #endif
100
101 namespace std {
102 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
103     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
104 };
105 }
106
107 @interface WebVideoContainerLayer : CALayer
108 @end
109
110 @implementation WebVideoContainerLayer
111
112 - (void)setBounds:(CGRect)bounds
113 {
114     [super setBounds:bounds];
115     for (CALayer* layer in self.sublayers)
116         layer.frame = bounds;
117 }
118
119 - (void)setPosition:(CGPoint)position
120 {
121     if (!CATransform3DIsIdentity(self.transform)) {
122         // Pre-apply the transform added in the WebProcess to fix <rdar://problem/18316542> to the position.
123         position = CGPointApplyAffineTransform(position, CATransform3DGetAffineTransform(self.transform));
124     }
125     [super setPosition:position];
126 }
127 @end
128
129 #if ENABLE(AVF_CAPTIONS)
130 // Note: This must be defined before our SOFT_LINK macros:
131 @class AVMediaSelectionOption;
132 @interface AVMediaSelectionOption (OutOfBandExtensions)
133 @property (nonatomic, readonly) NSString* outOfBandSource;
134 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
135 @end
136 #endif
137
138 @interface AVURLAsset (WebKitExtensions)
139 @property (nonatomic, readonly) NSURL *resolvedURL;
140 @end
141
142 typedef AVPlayer AVPlayerType;
143 typedef AVPlayerItem AVPlayerItemType;
144 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
145 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
146 typedef AVMetadataItem AVMetadataItemType;
147 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
148 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
149
150 #pragma mark - Soft Linking
151
152 // Soft-linking headers must be included last since they #define functions, constants, etc.
153 #import "CoreMediaSoftLink.h"
154
155 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
156 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
157 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
158
159 #if USE(VIDEOTOOLBOX)
160 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
161 #endif
162
163 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
164 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
165 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
166 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
167 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
168 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
169 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
170 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
171
172 #if USE(VIDEOTOOLBOX)
173 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
174 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
175 #endif
176
177 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
178 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
179 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
180 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
181 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
182 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
183 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
184
185 SOFT_LINK_CLASS(CoreImage, CIContext)
186 SOFT_LINK_CLASS(CoreImage, CIImage)
187
188 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
189 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
200 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
201 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
202 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
203 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
204
205 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
206
207 #define AVPlayer getAVPlayerClass()
208 #define AVPlayerItem getAVPlayerItemClass()
209 #define AVPlayerLayer getAVPlayerLayerClass()
210 #define AVURLAsset getAVURLAssetClass()
211 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
212 #define AVMetadataItem getAVMetadataItemClass()
213
214 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
215 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
216 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
217 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
218 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
219 #define AVMediaTypeVideo getAVMediaTypeVideo()
220 #define AVMediaTypeAudio getAVMediaTypeAudio()
221 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
222 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
223 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
224 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
225 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
226 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
227 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
228 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
229 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
230 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
231
232 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
233 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
234 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
235
236 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
237 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
238 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
239
240 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
241 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
242 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
243 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
244
245 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
246 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
247 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
248 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
249 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
250 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
251 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
252 #endif
253
254 #if ENABLE(AVF_CAPTIONS)
255 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
262 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
263 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
264 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
265
266 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
267 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
268 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
269 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
270 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
271 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
272 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
273 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
274 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
275 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
276 #endif
277
278 #if ENABLE(DATACUE_VALUE)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
280 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
281 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
282 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
283 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
284
285 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
286 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
287 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
288 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
289 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
290 #endif
291
292 #if PLATFORM(IOS)
293 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
294 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
295 #endif
296
297 using namespace WebCore;
298
299 enum MediaPlayerAVFoundationObservationContext {
300     MediaPlayerAVFoundationObservationContextPlayerItem,
301     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
302     MediaPlayerAVFoundationObservationContextPlayer,
303     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
304 };
305
306 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
307 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
308 #else
309 @interface WebCoreAVFMovieObserver : NSObject
310 #endif
311 {
312     MediaPlayerPrivateAVFoundationObjC* m_callback;
313     int m_delayCallbacks;
314 }
315 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
316 -(void)disconnect;
317 -(void)metadataLoaded;
318 -(void)didEnd:(NSNotification *)notification;
319 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
320 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
321 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
322 - (void)outputSequenceWasFlushed:(id)output;
323 #endif
324 @end
325
326 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
327 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
328     MediaPlayerPrivateAVFoundationObjC* m_callback;
329 }
330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
332 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
333 @end
334 #endif
335
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
338     MediaPlayerPrivateAVFoundationObjC *m_callback;
339     dispatch_semaphore_t m_semaphore;
340 }
341 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
342 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
344 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
345 @end
346 #endif
347
348 namespace WebCore {
349
350 static NSArray *assetMetadataKeyNames();
351 static NSArray *itemKVOProperties();
352 static NSArray *assetTrackMetadataKeyNames();
353 static NSArray *playerKVOProperties();
354 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
355
356 #if !LOG_DISABLED
357 static const char *boolString(bool val)
358 {
359     return val ? "true" : "false";
360 }
361 #endif
362
363 #if ENABLE(ENCRYPTED_MEDIA_V2)
364 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
365 static PlayerToPrivateMapType& playerToPrivateMap()
366 {
367     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
368     return map;
369 };
370 #endif
371
372 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
373 static dispatch_queue_t globalLoaderDelegateQueue()
374 {
375     static dispatch_queue_t globalQueue;
376     static dispatch_once_t onceToken;
377     dispatch_once(&onceToken, ^{
378         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
379     });
380     return globalQueue;
381 }
382 #endif
383
384 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
385 static dispatch_queue_t globalPullDelegateQueue()
386 {
387     static dispatch_queue_t globalQueue;
388     static dispatch_once_t onceToken;
389     dispatch_once(&onceToken, ^{
390         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
391     });
392     return globalQueue;
393 }
394 #endif
395
396 #if USE(CFNETWORK)
397 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
398 public:
399     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
400     {
401         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
402     }
403
404     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
405     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
406
407 private:
408     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
409         : m_challenge(challenge)
410     {
411         ASSERT(m_challenge);
412     }
413
414     virtual void refAuthenticationClient() override { ref(); }
415     virtual void derefAuthenticationClient() override { deref(); }
416
417     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential)
418     {
419         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
420     }
421
422     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&)
423     {
424         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
425     }
426
427     virtual void receivedCancellation(const AuthenticationChallenge&)
428     {
429         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
430     }
431
432     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&)
433     {
434         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
435             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
436     }
437
438     virtual void receivedChallengeRejection(const AuthenticationChallenge&)
439     {
440         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
441             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
442     }
443
444     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
445 };
446 #endif
447
448 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
449 {
450     if (isAvailable())
451         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
452             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
453 }
454
455 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
456     : MediaPlayerPrivateAVFoundation(player)
457     , m_weakPtrFactory(this)
458 #if PLATFORM(IOS)
459     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
460 #endif
461     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
462     , m_videoFrameHasDrawn(false)
463     , m_haveCheckedPlayability(false)
464 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
465     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
466     , m_videoOutputSemaphore(nullptr)
467 #endif
468 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
469     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
470 #endif
471     , m_currentTextTrack(0)
472     , m_cachedRate(0)
473     , m_cachedTotalBytes(0)
474     , m_pendingStatusChanges(0)
475     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
476     , m_cachedLikelyToKeepUp(false)
477     , m_cachedBufferEmpty(false)
478     , m_cachedBufferFull(false)
479     , m_cachedHasEnabledAudio(false)
480     , m_shouldBufferData(true)
481     , m_cachedIsReadyForDisplay(false)
482     , m_haveBeenAskedToCreateLayer(false)
483 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
484     , m_allowsWirelessVideoPlayback(true)
485 #endif
486 {
487 #if ENABLE(ENCRYPTED_MEDIA_V2)
488     playerToPrivateMap().set(player, this);
489 #endif
490 }
491
492 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
493 {
494 #if ENABLE(ENCRYPTED_MEDIA_V2)
495     playerToPrivateMap().remove(player());
496 #endif
497 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
498     [m_loaderDelegate.get() setCallback:0];
499     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
500
501     for (auto& pair : m_resourceLoaderMap)
502         pair.value->invalidate();
503 #endif
504 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
505     [m_videoOutputDelegate setCallback:0];
506     [m_videoOutput setDelegate:nil queue:0];
507     if (m_videoOutputSemaphore)
508         dispatch_release(m_videoOutputSemaphore);
509 #endif
510
511     if (m_videoLayer)
512         destroyVideoLayer();
513
514     cancelLoad();
515 }
516
517 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
518 {
519     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
520     tearDownVideoRendering();
521
522     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
523     [m_objcObserver.get() disconnect];
524
525     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
526     setIgnoreLoadStateChanges(true);
527     if (m_avAsset) {
528         [m_avAsset.get() cancelLoading];
529         m_avAsset = nil;
530     }
531
532     clearTextTracks();
533
534 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
535     if (m_legibleOutput) {
536         if (m_avPlayerItem)
537             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
538         m_legibleOutput = nil;
539     }
540 #endif
541
542     if (m_avPlayerItem) {
543         for (NSString *keyName in itemKVOProperties())
544             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
545         
546         m_avPlayerItem = nil;
547     }
548     if (m_avPlayer) {
549         if (m_timeObserver)
550             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
551         m_timeObserver = nil;
552
553         for (NSString *keyName in playerKVOProperties())
554             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
555         m_avPlayer = nil;
556     }
557
558     // Reset cached properties
559     m_pendingStatusChanges = 0;
560     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
561     m_cachedSeekableRanges = nullptr;
562     m_cachedLoadedRanges = nullptr;
563     m_cachedHasEnabledAudio = false;
564     m_cachedPresentationSize = FloatSize();
565     m_cachedDuration = MediaTime::zeroTime();
566
567     for (AVPlayerItemTrack *track in m_cachedTracks.get())
568         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
569     m_cachedTracks = nullptr;
570
571 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
572     if (m_provider) {
573         m_provider->setPlayerItem(nullptr);
574         m_provider->setAudioTrack(nullptr);
575     }
576 #endif
577
578     setIgnoreLoadStateChanges(false);
579 }
580
581 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
582 {
583     return m_haveBeenAskedToCreateLayer;
584 }
585
586 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
587 {
588 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
589     if (m_videoOutput)
590         return true;
591 #endif
592     return m_imageGenerator;
593 }
594
595 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
596 {
597 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
598     createVideoOutput();
599 #else
600     createImageGenerator();
601 #endif
602 }
603
604 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
605 {
606     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
607
608     if (!m_avAsset || m_imageGenerator)
609         return;
610
611     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
612
613     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
614     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
615     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
616     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
617
618     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
619 }
620
621 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
622 {
623 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
624     destroyVideoOutput();
625 #endif
626     destroyImageGenerator();
627 }
628
629 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
630 {
631     if (!m_imageGenerator)
632         return;
633
634     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
635
636     m_imageGenerator = 0;
637 }
638
639 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
640 {
641     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
642         return;
643
644     auto weakThis = createWeakPtr();
645     callOnMainThread([this, weakThis] {
646         if (!weakThis)
647             return;
648
649         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
650             return;
651         m_haveBeenAskedToCreateLayer = true;
652
653         if (!m_videoLayer)
654             createAVPlayerLayer();
655
656 #if USE(VIDEOTOOLBOX)
657         if (!m_videoOutput)
658             createVideoOutput();
659 #endif
660
661         player()->client().mediaPlayerRenderingModeChanged(player());
662     });
663 }
664
665 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
666 {
667     if (!m_avPlayer)
668         return;
669
670     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
671     [m_videoLayer setPlayer:m_avPlayer.get()];
672     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
673 #ifndef NDEBUG
674     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
675 #endif
676     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
677     updateVideoLayerGravity();
678     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
679     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
680     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
681
682 #if PLATFORM(IOS)
683     [m_videoLayer web_disableAllActions];
684     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
685 #ifndef NDEBUG
686     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
687 #endif
688     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
689     if (m_videoFullscreenLayer) {
690         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
691         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
692     } else {
693         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
694         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
695     }
696     if ([m_videoLayer respondsToSelector:@selector(setEnterOptimizedFullscreenModeEnabled:)])
697         [m_videoLayer setEnterOptimizedFullscreenModeEnabled:(player()->fullscreenMode() & MediaPlayer::VideoFullscreenModeOptimized)];
698 #else
699     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
700 #endif
701 }
702
703 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
704 {
705     if (!m_videoLayer)
706         return;
707
708     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
709
710     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
711     [m_videoLayer.get() setPlayer:nil];
712
713 #if PLATFORM(IOS)
714     if (m_videoFullscreenLayer)
715         [m_videoLayer removeFromSuperlayer];
716     m_videoInlineLayer = nil;
717 #endif
718
719     m_videoLayer = nil;
720 }
721
722 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
723 {
724     if (currentRenderingMode() == MediaRenderingToLayer)
725         return m_cachedIsReadyForDisplay;
726
727     return m_videoFrameHasDrawn;
728 }
729
730 #if ENABLE(AVF_CAPTIONS)
731 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
732 {
733     // FIXME: Match these to correct types:
734     if (kind == PlatformTextTrack::Caption)
735         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
736
737     if (kind == PlatformTextTrack::Subtitle)
738         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
739
740     if (kind == PlatformTextTrack::Description)
741         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
742
743     if (kind == PlatformTextTrack::Forced)
744         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
745
746     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
747 }
748     
749 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
750 {
751     trackModeChanged();
752 }
753     
754 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
755 {
756     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
757     
758     for (auto& textTrack : m_textTracks) {
759         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
760             continue;
761         
762         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
763         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
764         
765         for (auto& track : outOfBandTrackSources) {
766             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
767             
768             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
769                 continue;
770             
771             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
772             if (track->mode() == PlatformTextTrack::Hidden)
773                 mode = InbandTextTrackPrivate::Hidden;
774             else if (track->mode() == PlatformTextTrack::Disabled)
775                 mode = InbandTextTrackPrivate::Disabled;
776             else if (track->mode() == PlatformTextTrack::Showing)
777                 mode = InbandTextTrackPrivate::Showing;
778             
779             textTrack->setMode(mode);
780             break;
781         }
782     }
783 }
784 #endif
785
786
787 static NSURL *canonicalURL(const String& url)
788 {
789     NSURL *cocoaURL = URL(ParsedURLString, url);
790     if (url.isEmpty())
791         return cocoaURL;
792
793     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
794     if (!request)
795         return cocoaURL;
796
797     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
798     if (!canonicalRequest)
799         return cocoaURL;
800
801     return [canonicalRequest URL];
802 }
803
804 #if PLATFORM(IOS)
805 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
806 {
807     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
808     [properties setDictionary:@{
809         NSHTTPCookieName: cookie.name,
810         NSHTTPCookieValue: cookie.value,
811         NSHTTPCookieDomain: cookie.domain,
812         NSHTTPCookiePath: cookie.path,
813         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
814     }];
815     if (cookie.secure)
816         [properties setObject:@YES forKey:NSHTTPCookieSecure];
817     if (cookie.session)
818         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
819
820     return [NSHTTPCookie cookieWithProperties:properties.get()];
821 }
822 #endif
823
824 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
825 {
826     if (m_avAsset)
827         return;
828
829     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
830
831     setDelayCallbacks(true);
832
833     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
834
835     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
836
837     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
838
839     String referrer = player()->referrer();
840     if (!referrer.isEmpty())
841         [headerFields.get() setObject:referrer forKey:@"Referer"];
842
843     String userAgent = player()->userAgent();
844     if (!userAgent.isEmpty())
845         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
846
847     if ([headerFields.get() count])
848         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
849
850     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
851         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
852
853 #if PLATFORM(IOS)
854     // FIXME: rdar://problem/20354688
855     String identifier = player()->sourceApplicationIdentifier();
856     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
857         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
858 #endif
859
860 #if ENABLE(AVF_CAPTIONS)
861     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
862     if (!outOfBandTrackSources.isEmpty()) {
863         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
864         for (auto& trackSource : outOfBandTrackSources) {
865             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
866             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
867             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
868             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
869             [outOfBandTracks.get() addObject:@{
870                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
871                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
872                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
873                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
874                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
875                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
876             }];
877         }
878
879         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
880     }
881 #endif
882
883 #if PLATFORM(IOS)
884     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
885     if (!networkInterfaceName.isEmpty())
886         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
887 #endif
888
889 #if PLATFORM(IOS)
890     Vector<Cookie> cookies;
891     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
892         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
893         for (auto& cookie : cookies)
894             [nsCookies addObject:toNSHTTPCookie(cookie)];
895
896         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
897     }
898 #endif
899
900     NSURL *cocoaURL = canonicalURL(url);
901     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
902
903 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
904     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
905 #endif
906
907     m_haveCheckedPlayability = false;
908
909     setDelayCallbacks(false);
910 }
911
912 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
913 {
914     if (!m_avPlayer)
915         return;
916
917     if (pthread_main_np()) {
918         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
919         return;
920     }
921
922     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
923     RetainPtr<AVPlayerItemType> strongItem = item;
924     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
925         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
926     });
927 }
928
929 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
930 {
931     if (m_avPlayer)
932         return;
933
934     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
935
936     setDelayCallbacks(true);
937
938     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
939     for (NSString *keyName in playerKVOProperties())
940         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
941
942 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
943     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
944 #endif
945
946 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
947     updateDisableExternalPlayback();
948     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
949 #endif
950
951 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
952     if (m_shouldPlayToPlaybackTarget)
953         setShouldPlayToPlaybackTarget(true);
954 #endif
955
956     if (player()->client().mediaPlayerIsVideo())
957         createAVPlayerLayer();
958
959     if (m_avPlayerItem)
960         setAVPlayerItem(m_avPlayerItem.get());
961
962     setDelayCallbacks(false);
963 }
964
965 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
966 {
967     if (m_avPlayerItem)
968         return;
969
970     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
971
972     setDelayCallbacks(true);
973
974     // Create the player item so we can load media data. 
975     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
976
977     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
978
979     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
980     for (NSString *keyName in itemKVOProperties())
981         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
982
983     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
984
985     if (m_avPlayer)
986         setAVPlayerItem(m_avPlayerItem.get());
987
988 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
989     const NSTimeInterval legibleOutputAdvanceInterval = 2;
990
991     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
992     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
993     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
994
995     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
996     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
997     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
998     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
999 #endif
1000
1001 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1002     if (m_provider) {
1003         m_provider->setPlayerItem(m_avPlayerItem.get());
1004         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
1005     }
1006 #endif
1007
1008     setDelayCallbacks(false);
1009 }
1010
1011 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1012 {
1013     if (m_haveCheckedPlayability)
1014         return;
1015     m_haveCheckedPlayability = true;
1016
1017     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1018     auto weakThis = createWeakPtr();
1019
1020     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1021         callOnMainThread([weakThis] {
1022             if (weakThis)
1023                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1024         });
1025     }];
1026 }
1027
1028 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1029 {
1030     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1031
1032     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1033     dispatch_group_enter(metadataLoadingGroup);
1034     auto weakThis = createWeakPtr();
1035     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1036
1037         callOnMainThread([weakThis, metadataLoadingGroup] {
1038             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1039                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1040                     dispatch_group_enter(metadataLoadingGroup);
1041                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1042                         dispatch_group_leave(metadataLoadingGroup);
1043                     }];
1044                 }
1045             }
1046             dispatch_group_leave(metadataLoadingGroup);
1047         });
1048     }];
1049
1050     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1051         callOnMainThread([weakThis] {
1052             if (weakThis)
1053                 [weakThis->m_objcObserver.get() metadataLoaded];
1054         });
1055
1056         dispatch_release(metadataLoadingGroup);
1057     });
1058 }
1059
1060 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1061 {
1062     if (!m_avPlayerItem)
1063         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1064
1065     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1066         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1067     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1068         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1069     if (m_cachedLikelyToKeepUp)
1070         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1071     if (m_cachedBufferFull)
1072         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1073     if (m_cachedBufferEmpty)
1074         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1075
1076     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1077 }
1078
1079 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1080 {
1081     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1082     PlatformMedia pm;
1083     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1084     pm.media.avfMediaPlayer = m_avPlayer.get();
1085     return pm;
1086 }
1087
1088 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1089 {
1090 #if PLATFORM(IOS)
1091     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1092 #else
1093     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1094 #endif
1095 }
1096
1097 #if PLATFORM(IOS)
1098 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1099 {
1100     if (m_videoFullscreenLayer == videoFullscreenLayer)
1101         return;
1102
1103     m_videoFullscreenLayer = videoFullscreenLayer;
1104
1105     [CATransaction begin];
1106     [CATransaction setDisableActions:YES];
1107     
1108     CALayer *oldRootLayer = videoFullscreenLayer;
1109     while (oldRootLayer.superlayer)
1110         oldRootLayer = oldRootLayer.superlayer;
1111
1112     CALayer *newRootLayer = nil;
1113     
1114     if (m_videoFullscreenLayer && m_videoLayer) {
1115         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1116         [m_videoLayer removeFromSuperlayer];
1117         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1118         newRootLayer = m_videoFullscreenLayer.get();
1119     } else if (m_videoInlineLayer && m_videoLayer) {
1120         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1121         [m_videoLayer removeFromSuperlayer];
1122         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1123         newRootLayer = m_videoInlineLayer.get();
1124     } else if (m_videoLayer)
1125         [m_videoLayer removeFromSuperlayer];
1126
1127     while (newRootLayer.superlayer)
1128         newRootLayer = newRootLayer.superlayer;
1129
1130     if (oldRootLayer && newRootLayer && oldRootLayer != newRootLayer) {
1131         mach_port_t fencePort = 0;
1132         for (CAContext *context in [CAContext allContexts]) {
1133             if (context.layer == oldRootLayer || context.layer == newRootLayer) {
1134                 if (!fencePort)
1135                     fencePort = [context createFencePort];
1136                 else
1137                     [context setFencePort:fencePort];
1138             }
1139         }
1140         mach_port_deallocate(mach_task_self(), fencePort);
1141     }
1142     [CATransaction commit];
1143
1144     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1145         syncTextTrackBounds();
1146         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1147     }
1148
1149     updateDisableExternalPlayback();
1150 }
1151
1152 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1153 {
1154     m_videoFullscreenFrame = frame;
1155     if (!m_videoFullscreenLayer)
1156         return;
1157
1158     if (m_videoLayer) {
1159         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1160         [CATransaction begin];
1161         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1162         [CATransaction commit];
1163         [m_videoLayer web_disableAllActions];
1164     }
1165     syncTextTrackBounds();
1166 }
1167
1168 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1169 {
1170     m_videoFullscreenGravity = gravity;
1171     if (!m_videoLayer)
1172         return;
1173
1174     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1175     if (gravity == MediaPlayer::VideoGravityResize)
1176         videoGravity = AVLayerVideoGravityResize;
1177     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1178         videoGravity = AVLayerVideoGravityResizeAspect;
1179     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1180         videoGravity = AVLayerVideoGravityResizeAspectFill;
1181     else
1182         ASSERT_NOT_REACHED();
1183
1184     [m_videoLayer setVideoGravity:videoGravity];
1185 }
1186
1187 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenMode(MediaPlayer::VideoFullscreenMode mode)
1188 {
1189     if (m_videoLayer && [m_videoLayer respondsToSelector:@selector(setEnterOptimizedFullscreenModeEnabled:)])
1190         [m_videoLayer setEnterOptimizedFullscreenModeEnabled:(mode & MediaPlayer::VideoFullscreenModeOptimized)];
1191 }
1192
1193 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1194 {
1195     if (m_currentMetaData)
1196         return m_currentMetaData.get();
1197     return nil;
1198 }
1199
1200 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1201 {
1202     if (!m_avPlayerItem)
1203         return emptyString();
1204     
1205     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1206     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1207
1208     return logString.get();
1209 }
1210
1211 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1212 {
1213     if (!m_avPlayerItem)
1214         return emptyString();
1215
1216     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1217     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1218
1219     return logString.get();
1220 }
1221 #endif
1222
1223 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1224 {
1225     [CATransaction begin];
1226     [CATransaction setDisableActions:YES];    
1227     if (m_videoLayer)
1228         [m_videoLayer.get() setHidden:!isVisible];
1229     [CATransaction commit];
1230 }
1231     
1232 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1233 {
1234     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1235     if (!metaDataAvailable())
1236         return;
1237
1238     setDelayCallbacks(true);
1239     m_cachedRate = requestedRate();
1240     [m_avPlayer.get() setRate:requestedRate()];
1241     setDelayCallbacks(false);
1242 }
1243
1244 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1245 {
1246     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1247     if (!metaDataAvailable())
1248         return;
1249
1250     setDelayCallbacks(true);
1251     m_cachedRate = 0;
1252     [m_avPlayer.get() setRate:0];
1253     setDelayCallbacks(false);
1254 }
1255
1256 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1257 {
1258     // Do not ask the asset for duration before it has been loaded or it will fetch the
1259     // answer synchronously.
1260     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1261         return MediaTime::invalidTime();
1262     
1263     CMTime cmDuration;
1264     
1265     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1266     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1267         cmDuration = [m_avPlayerItem.get() duration];
1268     else
1269         cmDuration = [m_avAsset.get() duration];
1270
1271     if (CMTIME_IS_NUMERIC(cmDuration))
1272         return toMediaTime(cmDuration);
1273
1274     if (CMTIME_IS_INDEFINITE(cmDuration))
1275         return MediaTime::positiveInfiniteTime();
1276
1277     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1278     return MediaTime::invalidTime();
1279 }
1280
1281 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1282 {
1283     if (!metaDataAvailable() || !m_avPlayerItem)
1284         return MediaTime::zeroTime();
1285
1286     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1287     if (CMTIME_IS_NUMERIC(itemTime))
1288         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1289
1290     return MediaTime::zeroTime();
1291 }
1292
1293 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1294 {
1295     // setCurrentTime generates several event callbacks, update afterwards.
1296     setDelayCallbacks(true);
1297
1298     if (m_metadataTrack)
1299         m_metadataTrack->flushPartialCues();
1300
1301     CMTime cmTime = toCMTime(time);
1302     CMTime cmBefore = toCMTime(negativeTolerance);
1303     CMTime cmAfter = toCMTime(positiveTolerance);
1304
1305     auto weakThis = createWeakPtr();
1306
1307     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1308         callOnMainThread([weakThis, finished] {
1309             auto _this = weakThis.get();
1310             if (!_this)
1311                 return;
1312
1313             _this->seekCompleted(finished);
1314         });
1315     }];
1316
1317     setDelayCallbacks(false);
1318 }
1319
1320 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1321 {
1322 #if PLATFORM(IOS)
1323     UNUSED_PARAM(volume);
1324     return;
1325 #else
1326     if (!metaDataAvailable())
1327         return;
1328
1329     [m_avPlayer.get() setVolume:volume];
1330 #endif
1331 }
1332
1333 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1334 {
1335     UNUSED_PARAM(closedCaptionsVisible);
1336
1337     if (!metaDataAvailable())
1338         return;
1339
1340     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1341 }
1342
1343 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1344 {
1345     setDelayCallbacks(true);
1346     m_cachedRate = rate;
1347     [m_avPlayer.get() setRate:rate];
1348     setDelayCallbacks(false);
1349 }
1350
1351 double MediaPlayerPrivateAVFoundationObjC::rate() const
1352 {
1353     if (!metaDataAvailable())
1354         return 0;
1355
1356     return m_cachedRate;
1357 }
1358
1359 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1360 {
1361     if (m_avPlayerItem)
1362         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1363 }
1364
1365 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1366 {
1367     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1368
1369     if (!m_avPlayerItem)
1370         return timeRanges;
1371
1372     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1373         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1374         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1375             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1376     }
1377     return timeRanges;
1378 }
1379
1380 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1381 {
1382     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1383         return MediaTime::zeroTime();
1384
1385     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1386     bool hasValidRange = false;
1387     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1388         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1389         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1390             continue;
1391
1392         hasValidRange = true;
1393         MediaTime startOfRange = toMediaTime(timeRange.start);
1394         if (minTimeSeekable > startOfRange)
1395             minTimeSeekable = startOfRange;
1396     }
1397     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1398 }
1399
1400 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1401 {
1402     if (!m_cachedSeekableRanges)
1403         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1404
1405     MediaTime maxTimeSeekable;
1406     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1407         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1408         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1409             continue;
1410         
1411         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1412         if (maxTimeSeekable < endOfRange)
1413             maxTimeSeekable = endOfRange;
1414     }
1415     return maxTimeSeekable;
1416 }
1417
1418 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1419 {
1420     if (!m_cachedLoadedRanges)
1421         return MediaTime::zeroTime();
1422
1423     MediaTime maxTimeLoaded;
1424     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1425         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1426         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1427             continue;
1428         
1429         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1430         if (maxTimeLoaded < endOfRange)
1431             maxTimeLoaded = endOfRange;
1432     }
1433
1434     return maxTimeLoaded;   
1435 }
1436
1437 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1438 {
1439     if (!metaDataAvailable())
1440         return 0;
1441
1442     if (m_cachedTotalBytes)
1443         return m_cachedTotalBytes;
1444
1445     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1446         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1447
1448     return m_cachedTotalBytes;
1449 }
1450
1451 void MediaPlayerPrivateAVFoundationObjC::setAsset(RetainPtr<id> asset)
1452 {
1453     m_avAsset = asset;
1454 }
1455
1456 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1457 {
1458     if (!m_avAsset)
1459         return MediaPlayerAVAssetStatusDoesNotExist;
1460
1461     for (NSString *keyName in assetMetadataKeyNames()) {
1462         NSError *error = nil;
1463         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1464 #if !LOG_DISABLED
1465         if (error)
1466             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1467 #endif
1468
1469         if (keyStatus < AVKeyValueStatusLoaded)
1470             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1471         
1472         if (keyStatus == AVKeyValueStatusFailed)
1473             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1474
1475         if (keyStatus == AVKeyValueStatusCancelled)
1476             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1477     }
1478
1479     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1480         return MediaPlayerAVAssetStatusPlayable;
1481
1482     return MediaPlayerAVAssetStatusLoaded;
1483 }
1484
1485 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1486 {
1487     if (!m_avAsset)
1488         return 0;
1489
1490     NSError *error = nil;
1491     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1492     return [error code];
1493 }
1494
1495 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
1496 {
1497     if (!metaDataAvailable() || context->paintingDisabled())
1498         return;
1499
1500     setDelayCallbacks(true);
1501     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1502
1503 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1504     if (videoOutputHasAvailableFrame())
1505         paintWithVideoOutput(context, rect);
1506     else
1507 #endif
1508         paintWithImageGenerator(context, rect);
1509
1510     END_BLOCK_OBJC_EXCEPTIONS;
1511     setDelayCallbacks(false);
1512
1513     m_videoFrameHasDrawn = true;
1514 }
1515
1516 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const FloatRect& rect)
1517 {
1518     if (!metaDataAvailable() || context->paintingDisabled())
1519         return;
1520
1521     // We can ignore the request if we are already rendering to a layer.
1522     if (currentRenderingMode() == MediaRenderingToLayer)
1523         return;
1524
1525     // paint() is best effort, so only paint if we already have an image generator or video output available.
1526     if (!hasContextRenderer())
1527         return;
1528
1529     paintCurrentFrameInContext(context, rect);
1530 }
1531
1532 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const FloatRect& rect)
1533 {
1534     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1535
1536     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1537     if (image) {
1538         GraphicsContextStateSaver stateSaver(*context);
1539         context->translate(rect.x(), rect.y() + rect.height());
1540         context->scale(FloatSize(1.0f, -1.0f));
1541         context->setImageInterpolationQuality(InterpolationLow);
1542         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1543         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1544         image = 0;
1545     }
1546 }
1547
1548 static bool unsupportedMIMEType(const String& type)
1549 {
1550     // AVFoundation will return non-video MIME types which it claims to support, but which we
1551     // do not support in the <video> element. Reject all non video/, audio/, and application/ types.
1552     if (!type.startsWith("video/", false) && !type.startsWith("audio/", false) && !type.startsWith("application/", false))
1553         return true;
1554
1555     // Reject types we know AVFoundation does not support that sites commonly ask about.
1556     if (equalIgnoringCase(type, "video/webm") || equalIgnoringCase(type, "audio/webm") || equalIgnoringCase(type, "video/x-webm"))
1557         return true;
1558
1559     if (equalIgnoringCase(type, "video/x-flv"))
1560         return true;
1561
1562     if (equalIgnoringCase(type, "audio/ogg") || equalIgnoringCase(type, "video/ogg") || equalIgnoringCase(type, "application/ogg"))
1563         return true;
1564
1565     if (equalIgnoringCase(type, "video/h264"))
1566         return true;
1567
1568     return false;
1569 }
1570
1571 static HashSet<String>& staticMimeTypeCache()
1572 {
1573     static NeverDestroyed<HashSet<String>> cache;
1574     static bool typeListInitialized = false;
1575
1576     if (typeListInitialized)
1577         return cache;
1578     typeListInitialized = true;
1579
1580     cache.get().add("application/vnd.apple.mpegurl");
1581     cache.get().add("application/x-mpegurl");
1582     cache.get().add("audio/3gpp");
1583     cache.get().add("audio/aac");
1584     cache.get().add("audio/aacp");
1585     cache.get().add("audio/aiff");
1586     cache.get().add("audio/basic");
1587     cache.get().add("audio/mp3");
1588     cache.get().add("audio/mp4");
1589     cache.get().add("audio/mpeg");
1590     cache.get().add("audio/mpeg3");
1591     cache.get().add("audio/mpegurl");
1592     cache.get().add("audio/mpg");
1593     cache.get().add("audio/wav");
1594     cache.get().add("audio/wave");
1595     cache.get().add("audio/x-aac");
1596     cache.get().add("audio/x-aiff");
1597     cache.get().add("audio/x-m4a");
1598     cache.get().add("audio/x-mpegurl");
1599     cache.get().add("audio/x-wav");
1600     cache.get().add("video/3gpp");
1601     cache.get().add("video/3gpp2");
1602     cache.get().add("video/mp4");
1603     cache.get().add("video/mpeg");
1604     cache.get().add("video/mpeg2");
1605     cache.get().add("video/mpg");
1606     cache.get().add("video/quicktime");
1607     cache.get().add("video/x-m4v");
1608     cache.get().add("video/x-mpeg");
1609     cache.get().add("video/x-mpg");
1610
1611     return cache;
1612
1613
1614 static HashSet<String>& avfMimeTypeCache()
1615 {
1616     static NeverDestroyed<HashSet<String>> cache;
1617     static bool typeListInitialized = false;
1618
1619     if (typeListInitialized)
1620         return cache;
1621     typeListInitialized = true;
1622
1623     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1624     for (NSString *mimeType in types)
1625         cache.get().add([mimeType lowercaseString]);
1626
1627     return cache;
1628
1629
1630 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1631 {
1632     if (!m_imageGenerator)
1633         createImageGenerator();
1634     ASSERT(m_imageGenerator);
1635
1636 #if !LOG_DISABLED
1637     double start = monotonicallyIncreasingTime();
1638 #endif
1639
1640     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1641     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1642     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1643
1644 #if !LOG_DISABLED
1645     double duration = monotonicallyIncreasingTime() - start;
1646     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1647 #endif
1648
1649     return image;
1650 }
1651
1652 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1653 {
1654     supportedTypes = avfMimeTypeCache();
1655
1656
1657 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1658 static bool keySystemIsSupported(const String& keySystem)
1659 {
1660     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0") || equalIgnoringCase(keySystem, "org.w3c.clearkey"))
1661         return true;
1662     return false;
1663 }
1664 #endif
1665
1666 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1667 {
1668 #if ENABLE(ENCRYPTED_MEDIA)
1669     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1670     // In addition to the steps in the current specification, this method must run the following steps:
1671
1672     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1673     //    If keySystem is null, continue to the next step.
1674     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1675         // "Clear Key" is only supported with HLS:
1676         if (equalIgnoringCase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringCase(parameters.type, "application/x-mpegurl"))
1677             return MediaPlayer::IsNotSupported;
1678
1679         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1680         if (!keySystemIsSupported(parameters.keySystem))
1681             return MediaPlayer::IsNotSupported;
1682
1683         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1684         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1685     }
1686
1687     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1688 #endif
1689
1690 #if ENABLE(MEDIA_SOURCE)
1691     if (parameters.isMediaSource)
1692         return MediaPlayer::IsNotSupported;
1693 #endif
1694
1695     if (unsupportedMIMEType(parameters.type))
1696         return MediaPlayer::IsNotSupported;
1697
1698     if (!staticMimeTypeCache().contains(parameters.type) && !avfMimeTypeCache().contains(parameters.type))
1699         return MediaPlayer::IsNotSupported;
1700
1701     // The spec says:
1702     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1703     if (parameters.codecs.isEmpty())
1704         return MediaPlayer::MayBeSupported;
1705
1706     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1707     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;
1708 }
1709
1710 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1711 {
1712 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1713     if (!keySystem.isEmpty()) {
1714         // "Clear Key" is only supported with HLS:
1715         if (equalIgnoringCase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringCase(mimeType, "application/x-mpegurl"))
1716             return MediaPlayer::IsNotSupported;
1717
1718         if (!keySystemIsSupported(keySystem))
1719             return false;
1720
1721         if (!mimeType.isEmpty() && unsupportedMIMEType(mimeType))
1722             return false;
1723
1724         if (!mimeType.isEmpty() && !staticMimeTypeCache().contains(mimeType) && !avfMimeTypeCache().contains(mimeType))
1725             return false;
1726
1727         return true;
1728     }
1729 #else
1730     UNUSED_PARAM(keySystem);
1731     UNUSED_PARAM(mimeType);
1732 #endif
1733     return false;
1734 }
1735
1736 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1737 #if ENABLE(ENCRYPTED_MEDIA_V2)
1738 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1739 {
1740     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1741         [infoRequest setContentLength:keyData->byteLength()];
1742         [infoRequest setByteRangeAccessSupported:YES];
1743     }
1744
1745     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1746         long long start = [dataRequest currentOffset];
1747         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1748
1749         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1750             [request finishLoadingWithError:nil];
1751             return;
1752         }
1753
1754         ASSERT(start <= std::numeric_limits<int>::max());
1755         ASSERT(end <= std::numeric_limits<int>::max());
1756         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1757         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1758         [dataRequest respondWithData:nsData.get()];
1759     }
1760
1761     [request finishLoading];
1762 }
1763 #endif
1764
1765 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1766 {
1767     String scheme = [[[avRequest request] URL] scheme];
1768     String keyURI = [[[avRequest request] URL] absoluteString];
1769
1770 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1771     if (scheme == "skd") {
1772         // Create an initData with the following layout:
1773         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1774         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1775         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1776         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1777         initDataView->set<uint32_t>(0, keyURISize, true);
1778
1779         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1780         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1781
1782 #if ENABLE(ENCRYPTED_MEDIA)
1783         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1784 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1785         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1786         if (!player()->keyNeeded(initData.get()))
1787 #endif
1788             return false;
1789
1790         m_keyURIToRequestMap.set(keyURI, avRequest);
1791         return true;
1792 #if ENABLE(ENCRYPTED_MEDIA_V2)
1793     } else if (scheme == "clearkey") {
1794         String keyID = [[[avRequest request] URL] resourceSpecifier];
1795         StringView keyIDView(keyID);
1796         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1797
1798         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1799         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1800
1801         auto keyData = player()->cachedKeyForKeyId(keyID);
1802         if (keyData) {
1803             fulfillRequestWithKeyData(avRequest, keyData.get());
1804             return false;
1805         }
1806
1807         if (!player()->keyNeeded(initData.get()))
1808             return false;
1809
1810         m_keyURIToRequestMap.set(keyID, avRequest);
1811         return true;
1812 #endif
1813     }
1814 #endif
1815
1816     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1817     m_resourceLoaderMap.add(avRequest, resourceLoader);
1818     resourceLoader->startLoading();
1819     return true;
1820 }
1821
1822 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1823 {
1824 #if USE(CFNETWORK)
1825     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1826     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1827     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1828 #else
1829     AuthenticationChallenge challenge(nsChallenge);
1830 #endif
1831
1832     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1833 }
1834
1835 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1836 {
1837     String scheme = [[[avRequest request] URL] scheme];
1838
1839     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1840
1841     if (resourceLoader)
1842         resourceLoader->stopLoading();
1843 }
1844
1845 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1846 {
1847     m_resourceLoaderMap.remove(avRequest);
1848 }
1849 #endif
1850
1851 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1852 {
1853     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1854 }
1855
1856 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1857 {
1858     if (!metaDataAvailable())
1859         return timeValue;
1860
1861     // FIXME - impossible to implement until rdar://8721510 is fixed.
1862     return timeValue;
1863 }
1864
1865 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1866 {
1867 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1868     return 0;
1869 #else
1870     return 5;
1871 #endif
1872 }
1873
1874 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1875 {
1876     if (!m_videoLayer)
1877         return;
1878
1879 #if PLATFORM(IOS)
1880     // Do not attempt to change the video gravity while in full screen mode.
1881     // See setVideoFullscreenGravity().
1882     if (m_videoFullscreenLayer)
1883         return;
1884 #endif
1885
1886     [CATransaction begin];
1887     [CATransaction setDisableActions:YES];    
1888     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1889     [m_videoLayer.get() setVideoGravity:gravity];
1890     [CATransaction commit];
1891 }
1892
1893 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1894 {
1895     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1896         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1897     }];
1898     if (index == NSNotFound)
1899         return nil;
1900     return [tracks objectAtIndex:index];
1901 }
1902
1903 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1904 {
1905     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1906     m_languageOfPrimaryAudioTrack = String();
1907
1908     if (!m_avAsset)
1909         return;
1910
1911     setDelayCharacteristicsChangedNotification(true);
1912
1913     bool haveCCTrack = false;
1914     bool hasCaptions = false;
1915
1916     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1917     // asked about those fairly fequently.
1918     if (!m_avPlayerItem) {
1919         // We don't have a player item yet, so check with the asset because some assets support inspection
1920         // prior to becoming ready to play.
1921         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1922         setHasVideo(firstEnabledVideoTrack);
1923         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1924 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1925         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1926 #endif
1927
1928         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1929     } else {
1930         bool hasVideo = false;
1931         bool hasAudio = false;
1932         bool hasMetaData = false;
1933         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1934             if ([track isEnabled]) {
1935                 AVAssetTrack *assetTrack = [track assetTrack];
1936                 NSString *mediaType = [assetTrack mediaType];
1937                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1938                     hasVideo = true;
1939                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1940                     hasAudio = true;
1941                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1942 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1943                     hasCaptions = true;
1944 #endif
1945                     haveCCTrack = true;
1946                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1947                     hasMetaData = true;
1948                 }
1949             }
1950         }
1951
1952 #if ENABLE(VIDEO_TRACK)
1953         updateAudioTracks();
1954         updateVideoTracks();
1955
1956 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1957         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1958         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1959 #endif
1960 #endif
1961
1962         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1963         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1964         // when it is not.
1965         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1966
1967         setHasAudio(hasAudio);
1968 #if ENABLE(DATACUE_VALUE)
1969         if (hasMetaData)
1970             processMetadataTrack();
1971 #endif
1972     }
1973
1974 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1975     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1976     if (legibleGroup && m_cachedTracks) {
1977         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1978         if (hasCaptions)
1979             processMediaSelectionOptions();
1980     }
1981 #endif
1982
1983 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1984     if (!hasCaptions && haveCCTrack)
1985         processLegacyClosedCaptionsTracks();
1986 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1987     if (haveCCTrack)
1988         processLegacyClosedCaptionsTracks();
1989 #endif
1990
1991     setHasClosedCaptions(hasCaptions);
1992
1993     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1994         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1995
1996     sizeChanged();
1997
1998     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1999         characteristicsChanged();
2000
2001 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2002     if (m_provider)
2003         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2004 #endif
2005
2006     setDelayCharacteristicsChangedNotification(false);
2007 }
2008
2009 #if ENABLE(VIDEO_TRACK)
2010 template <typename RefT, typename PassRefT>
2011 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2012 {
2013     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
2014         return [[[track assetTrack] mediaType] isEqualToString:trackType];
2015     }]]]);
2016     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
2017
2018     for (auto& oldItem : oldItems) {
2019         if (oldItem->playerItemTrack())
2020             [oldTracks addObject:oldItem->playerItemTrack()];
2021     }
2022
2023     // Find the added & removed AVPlayerItemTracks:
2024     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
2025     [removedTracks minusSet:newTracks.get()];
2026
2027     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
2028     [addedTracks minusSet:oldTracks.get()];
2029
2030     typedef Vector<RefT> ItemVector;
2031     ItemVector replacementItems;
2032     ItemVector addedItems;
2033     ItemVector removedItems;
2034     for (auto& oldItem : oldItems) {
2035         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
2036             removedItems.append(oldItem);
2037         else
2038             replacementItems.append(oldItem);
2039     }
2040
2041     for (AVPlayerItemTrack* track in addedTracks.get())
2042         addedItems.append(itemFactory(track));
2043
2044     replacementItems.appendVector(addedItems);
2045     oldItems.swap(replacementItems);
2046
2047     for (auto& removedItem : removedItems)
2048         (player->*removedFunction)(removedItem);
2049
2050     for (auto& addedItem : addedItems)
2051         (player->*addedFunction)(addedItem);
2052 }
2053
2054 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2055 template <typename RefT, typename PassRefT>
2056 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, const Vector<String>& characteristics, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
2057 {
2058     group->updateOptions(characteristics);
2059
2060     // Only add selection options which do not have an associated persistant track.
2061     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
2062     for (auto& option : group->options()) {
2063         if (!option)
2064             continue;
2065         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
2066         if (!avOption)
2067             continue;
2068         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
2069             newSelectionOptions.add(option);
2070     }
2071
2072     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
2073     for (auto& oldItem : oldItems) {
2074         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2075             oldSelectionOptions.add(option);
2076     }
2077
2078     // Find the added & removed AVMediaSelectionOptions:
2079     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2080     for (auto& oldOption : oldSelectionOptions) {
2081         if (!newSelectionOptions.contains(oldOption))
2082             removedSelectionOptions.add(oldOption);
2083     }
2084
2085     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2086     for (auto& newOption : newSelectionOptions) {
2087         if (!oldSelectionOptions.contains(newOption))
2088             addedSelectionOptions.add(newOption);
2089     }
2090
2091     typedef Vector<RefT> ItemVector;
2092     ItemVector replacementItems;
2093     ItemVector addedItems;
2094     ItemVector removedItems;
2095     for (auto& oldItem : oldItems) {
2096         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2097             removedItems.append(oldItem);
2098         else
2099             replacementItems.append(oldItem);
2100     }
2101
2102     for (auto& option : addedSelectionOptions)
2103         addedItems.append(itemFactory(*option.get()));
2104
2105     replacementItems.appendVector(addedItems);
2106     oldItems.swap(replacementItems);
2107     
2108     for (auto& removedItem : removedItems)
2109         (player->*removedFunction)(removedItem);
2110     
2111     for (auto& addedItem : addedItems)
2112         (player->*addedFunction)(addedItem);
2113 }
2114 #endif
2115
2116 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2117 {
2118 #if !LOG_DISABLED
2119     size_t count = m_audioTracks.size();
2120 #endif
2121
2122     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2123
2124 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2125     Vector<String> characteristics = player()->preferredAudioCharacteristics();
2126     if (!m_audibleGroup) {
2127         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2128             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, characteristics);
2129     }
2130
2131     if (m_audibleGroup)
2132         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, characteristics, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2133 #endif
2134
2135     for (auto& track : m_audioTracks)
2136         track->resetPropertiesFromTrack();
2137
2138 #if !LOG_DISABLED
2139     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2140 #endif
2141 }
2142
2143 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2144 {
2145 #if !LOG_DISABLED
2146     size_t count = m_videoTracks.size();
2147 #endif
2148
2149     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2150
2151 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2152     if (!m_visualGroup) {
2153         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2154             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group, Vector<String>());
2155     }
2156
2157     if (m_visualGroup)
2158         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, Vector<String>(), &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2159 #endif
2160
2161     for (auto& track : m_audioTracks)
2162         track->resetPropertiesFromTrack();
2163
2164 #if !LOG_DISABLED
2165     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2166 #endif
2167 }
2168
2169 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2170 {
2171 #if PLATFORM(IOS)
2172     if (m_videoFullscreenLayer)
2173         return true;
2174 #endif
2175     return false;
2176 }
2177
2178 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2179 {
2180 #if PLATFORM(IOS)
2181     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2182         return;
2183     
2184     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2185     [m_textTrackRepresentationLayer setFrame:textFrame];
2186 #endif
2187 }
2188
2189 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2190 {
2191 #if PLATFORM(IOS)
2192     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2193     if (representationLayer == m_textTrackRepresentationLayer) {
2194         syncTextTrackBounds();
2195         return;
2196     }
2197
2198     if (m_textTrackRepresentationLayer)
2199         [m_textTrackRepresentationLayer removeFromSuperlayer];
2200
2201     m_textTrackRepresentationLayer = representationLayer;
2202
2203     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2204         syncTextTrackBounds();
2205         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2206     }
2207
2208 #else
2209     UNUSED_PARAM(representation);
2210 #endif
2211 }
2212 #endif // ENABLE(VIDEO_TRACK)
2213
2214 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2215 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2216 {
2217     if (!m_provider) {
2218         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2219         m_provider->setAudioTrack(firstEnabledTrack(safeAVAssetTracksForAudibleMedia()));
2220     }
2221
2222     return m_provider.get();
2223 }
2224 #endif
2225
2226 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2227 {
2228     if (!m_avAsset)
2229         return;
2230
2231     setNaturalSize(m_cachedPresentationSize);
2232 }
2233     
2234 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2235 {
2236     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2237         return false;
2238     
2239     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2240     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2241     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2242 }
2243
2244 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2245 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2246 {
2247     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2248
2249     if (!m_avPlayerItem || m_videoOutput)
2250         return;
2251
2252 #if USE(VIDEOTOOLBOX)
2253     NSDictionary* attributes = nil;
2254 #else
2255     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2256                                 nil];
2257 #endif
2258     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2259     ASSERT(m_videoOutput);
2260
2261     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2262
2263     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2264
2265     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2266 }
2267
2268 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2269 {
2270     if (!m_videoOutput)
2271         return;
2272
2273     if (m_avPlayerItem)
2274         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2275     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2276
2277     m_videoOutput = 0;
2278 }
2279
2280 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2281 {
2282     if (!m_videoOutput)
2283         createVideoOutput();
2284     ASSERT(m_videoOutput);
2285
2286 #if !LOG_DISABLED
2287     double start = monotonicallyIncreasingTime();
2288 #endif
2289
2290     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2291
2292     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2293         return 0;
2294
2295     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2296     if (!buffer)
2297         return 0;
2298
2299 #if USE(VIDEOTOOLBOX)
2300     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2301     if (!m_pixelTransferSession) {
2302         VTPixelTransferSessionRef session = 0;
2303         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2304         m_pixelTransferSession = adoptCF(session);
2305     }
2306
2307     CVPixelBufferRef outputBuffer;
2308     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2309     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2310     buffer = adoptCF(outputBuffer);
2311 #endif
2312
2313 #if !LOG_DISABLED
2314     double duration = monotonicallyIncreasingTime() - start;
2315     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2316 #endif
2317
2318     return buffer;
2319 }
2320
2321 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2322 {
2323     if (!m_avPlayerItem)
2324         return false;
2325
2326     if (m_lastImage)
2327         return true;
2328
2329     if (!m_videoOutput)
2330         createVideoOutput();
2331
2332     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2333 }
2334
2335 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2336 {
2337     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2338     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2339     return CVPixelBufferGetBaseAddress(pixelBuffer);
2340 }
2341
2342 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2343 {
2344     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2345     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2346 }
2347
2348 static void CVPixelBufferReleaseInfoCallback(void* info)
2349 {
2350     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2351     CFRelease(pixelBuffer);
2352 }
2353
2354 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2355 {
2356     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2357     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2358
2359     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2360     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2361     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2362     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2363     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2364
2365     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2366     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2367     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2368
2369     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2370 }
2371
2372 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2373 {
2374     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2375
2376     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2377     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2378     // should be displayed.
2379     if (pixelBuffer)
2380         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2381 }
2382
2383 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const FloatRect& outputRect)
2384 {
2385     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2386         waitForVideoOutputMediaDataWillChange();
2387
2388     updateLastImage();
2389
2390     if (!m_lastImage)
2391         return;
2392
2393     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2394     if (!firstEnabledVideoTrack)
2395         return;
2396
2397     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2398
2399     GraphicsContextStateSaver stateSaver(*context);
2400     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2401     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2402     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2403
2404     context->concatCTM(videoTransform);
2405     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2406
2407     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2408     // video frame, destroy it now that it is no longer needed.
2409     if (m_imageGenerator)
2410         destroyImageGenerator();
2411
2412 }
2413
2414 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2415 {
2416     updateLastImage();
2417     return m_lastImage.get();
2418 }
2419
2420 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2421 {
2422     if (!m_videoOutputSemaphore)
2423         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2424
2425     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2426
2427     // Wait for 1 second.
2428     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2429
2430     if (result)
2431         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2432 }
2433
2434 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2435 {
2436     dispatch_semaphore_signal(m_videoOutputSemaphore);
2437 }
2438 #endif
2439
2440 #if ENABLE(ENCRYPTED_MEDIA)
2441 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2442 {
2443     if (!keySystemIsSupported(keySystem))
2444         return MediaPlayer::KeySystemNotSupported;
2445
2446     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2447     String keyURI;
2448     String keyID;
2449     RefPtr<Uint8Array> certificate;
2450     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2451         return MediaPlayer::InvalidPlayerState;
2452
2453     if (!m_keyURIToRequestMap.contains(keyURI))
2454         return MediaPlayer::InvalidPlayerState;
2455
2456     String sessionID = createCanonicalUUIDString();
2457
2458     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2459
2460     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2461     NSString* assetStr = keyID;
2462     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2463     NSError* error = 0;
2464     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2465
2466     if (!keyRequest) {
2467         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2468         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2469         return MediaPlayer::NoError;
2470     }
2471
2472     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2473     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2474     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2475
2476     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2477     m_sessionIDToRequestMap.set(sessionID, avRequest);
2478     m_keyURIToRequestMap.remove(keyURI);
2479
2480     return MediaPlayer::NoError;
2481 }
2482
2483 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2484 {
2485     if (!keySystemIsSupported(keySystem))
2486         return MediaPlayer::KeySystemNotSupported;
2487
2488     if (!m_sessionIDToRequestMap.contains(sessionID))
2489         return MediaPlayer::InvalidPlayerState;
2490
2491     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2492     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2493     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2494     [avRequest.get() finishLoading];
2495     m_sessionIDToRequestMap.remove(sessionID);
2496
2497     player()->keyAdded(keySystem, sessionID);
2498
2499     UNUSED_PARAM(initDataPtr);
2500     UNUSED_PARAM(initDataLength);
2501     return MediaPlayer::NoError;
2502 }
2503
2504 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2505 {
2506     if (!keySystemIsSupported(keySystem))
2507         return MediaPlayer::KeySystemNotSupported;
2508
2509     if (!m_sessionIDToRequestMap.contains(sessionID))
2510         return MediaPlayer::InvalidPlayerState;
2511
2512     m_sessionIDToRequestMap.remove(sessionID);
2513     return MediaPlayer::NoError;
2514 }
2515 #endif
2516
2517 #if ENABLE(ENCRYPTED_MEDIA_V2)
2518 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2519 {
2520     return m_keyURIToRequestMap.take(keyURI);
2521 }
2522
2523 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2524 {
2525     Vector<String> fulfilledKeyIds;
2526
2527     for (auto& pair : m_keyURIToRequestMap) {
2528         const String& keyId = pair.key;
2529         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2530
2531         auto keyData = player()->cachedKeyForKeyId(keyId);
2532         if (!keyData)
2533             continue;
2534
2535         fulfillRequestWithKeyData(request.get(), keyData.get());
2536         fulfilledKeyIds.append(keyId);
2537     }
2538
2539     for (auto& keyId : fulfilledKeyIds)
2540         m_keyURIToRequestMap.remove(keyId);
2541 }
2542
2543 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2544 {
2545     if (!keySystemIsSupported(keySystem))
2546         return nullptr;
2547
2548     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2549 }
2550 #endif
2551
2552 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2553 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2554 {
2555 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2556     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2557 #endif
2558
2559     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2560     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2561
2562         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2563         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2564             continue;
2565
2566         bool newCCTrack = true;
2567         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2568             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2569                 continue;
2570
2571             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2572             if (track->avPlayerItemTrack() == playerItemTrack) {
2573                 removedTextTracks.remove(i - 1);
2574                 newCCTrack = false;
2575                 break;
2576             }
2577         }
2578
2579         if (!newCCTrack)
2580             continue;
2581         
2582         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2583     }
2584
2585     processNewAndRemovedTextTracks(removedTextTracks);
2586 }
2587 #endif
2588
2589 NSArray* MediaPlayerPrivateAVFoundationObjC::safeAVAssetTracksForAudibleMedia()
2590 {
2591     if (!m_avAsset)
2592         return nil;
2593
2594     if ([m_avAsset.get() statusOfValueForKey:@"tracks" error:NULL] != AVKeyValueStatusLoaded)
2595         return nil;
2596
2597     return [m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible];
2598 }
2599
2600 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2601 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2602 {
2603     if (!m_avAsset)
2604         return false;
2605
2606     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2607         return false;
2608
2609     return true;
2610 }
2611
2612 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2613 {
2614     if (!hasLoadedMediaSelectionGroups())
2615         return nil;
2616
2617     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2618 }
2619
2620 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2621 {
2622     if (!hasLoadedMediaSelectionGroups())
2623         return nil;
2624
2625     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2626 }
2627
2628 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2629 {
2630     if (!hasLoadedMediaSelectionGroups())
2631         return nil;
2632
2633     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2634 }
2635
2636 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2637 {
2638     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2639     if (!legibleGroup) {
2640         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2641         return;
2642     }
2643
2644     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2645     // but set the selected legible track to nil so text tracks will not be automatically configured.
2646     if (!m_textTracks.size())
2647         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2648
2649     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2650     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2651     for (AVMediaSelectionOptionType *option in legibleOptions) {
2652         bool newTrack = true;
2653         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2654             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2655                 continue;
2656             
2657             RetainPtr<AVMediaSelectionOptionType> currentOption;
2658 #if ENABLE(AVF_CAPTIONS)
2659             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2660                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2661                 currentOption = track->mediaSelectionOption();
2662             } else
2663 #endif
2664             {
2665                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2666                 currentOption = track->mediaSelectionOption();
2667             }
2668             
2669             if ([currentOption.get() isEqual:option]) {
2670                 removedTextTracks.remove(i - 1);
2671                 newTrack = false;
2672                 break;
2673             }
2674         }
2675         if (!newTrack)
2676             continue;
2677
2678 #if ENABLE(AVF_CAPTIONS)
2679         if ([option outOfBandSource]) {
2680             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2681             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2682             continue;
2683         }
2684 #endif
2685
2686         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2687     }
2688
2689     processNewAndRemovedTextTracks(removedTextTracks);
2690 }
2691
2692 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2693 {
2694     if (m_metadataTrack)
2695         return;
2696
2697     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2698     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2699     player()->addTextTrack(m_metadataTrack);
2700 }
2701
2702 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2703 {
2704     if (!m_currentTextTrack)
2705         return;
2706
2707     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2708 }
2709
2710 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2711 {
2712     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2713
2714     if (!m_currentTextTrack)
2715         return;
2716     
2717     m_currentTextTrack->resetCueValues();
2718 }
2719 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2720
2721 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2722 {
2723     if (m_currentTextTrack == track)
2724         return;
2725
2726     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2727         
2728     m_currentTextTrack = track;
2729
2730     if (track) {
2731         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2732             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2733 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2734 #if ENABLE(AVF_CAPTIONS)
2735         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2736             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2737 #endif
2738         else
2739             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2740 #endif
2741     } else {
2742 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2743         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2744 #endif
2745         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2746     }
2747
2748 }
2749
2750 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2751 {
2752     if (!m_languageOfPrimaryAudioTrack.isNull())
2753         return m_languageOfPrimaryAudioTrack;
2754
2755     if (!m_avPlayerItem.get())
2756         return emptyString();
2757
2758 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2759     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2760     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2761     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2762     if (currentlySelectedAudibleOption) {
2763         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2764         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2765
2766         return m_languageOfPrimaryAudioTrack;
2767     }
2768 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2769
2770     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2771     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2772     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2773     if (!tracks || [tracks count] != 1) {
2774         m_languageOfPrimaryAudioTrack = emptyString();
2775         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2776         return m_languageOfPrimaryAudioTrack;
2777     }
2778
2779     AVAssetTrack *track = [tracks objectAtIndex:0];
2780     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2781
2782 #if !LOG_DISABLED
2783     if (m_languageOfPrimaryAudioTrack == emptyString())
2784         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2785     else
2786         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2787 #endif
2788
2789     return m_languageOfPrimaryAudioTrack;
2790 }
2791
2792 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2793 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2794 {
2795     if (!m_avPlayer)
2796         return false;
2797
2798     bool wirelessTarget = m_avPlayer.get().externalPlaybackActive;
2799     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2800
2801     return wirelessTarget;
2802 }
2803
2804 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2805 {
2806     if (!m_avPlayer)
2807         return MediaPlayer::TargetTypeNone;
2808
2809 #if PLATFORM(IOS)
2810     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2811     case wkExternalPlaybackTypeNone:
2812         return MediaPlayer::TargetTypeNone;
2813     case wkExternalPlaybackTypeAirPlay:
2814         return MediaPlayer::TargetTypeAirPlay;
2815     case wkExternalPlaybackTypeTVOut:
2816         return MediaPlayer::TargetTypeTVOut;
2817     }
2818
2819     ASSERT_NOT_REACHED();
2820     return MediaPlayer::TargetTypeNone;
2821
2822 #else
2823     return MediaPlayer::TargetTypeAirPlay;
2824 #endif
2825 }
2826
2827 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2828 {
2829     if (!m_avPlayer)
2830         return emptyString();
2831
2832     String wirelessTargetName;
2833 #if !PLATFORM(IOS)
2834     if (m_outputContext)
2835         wirelessTargetName = m_outputContext.get().deviceName;
2836 #else
2837     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2838 #endif
2839     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2840
2841     return wirelessTargetName;
2842 }
2843
2844 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2845 {
2846     if (!m_avPlayer)
2847         return !m_allowsWirelessVideoPlayback;
2848
2849     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2850     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2851
2852     return !m_allowsWirelessVideoPlayback;
2853 }
2854
2855 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2856 {
2857     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2858     m_allowsWirelessVideoPlayback = !disabled;
2859     if (!m_avPlayer)
2860         return;
2861
2862     setDelayCallbacks(true);
2863     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2864     setDelayCallbacks(false);
2865 }
2866
2867 #if !PLATFORM(IOS)
2868 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(Ref<MediaPlaybackTarget>&& target)
2869 {
2870     MediaPlaybackTargetMac* macTarget = toMediaPlaybackTargetMac(&target.get());
2871
2872     m_outputContext = macTarget->outputContext();
2873     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p, device name = %s", this, m_outputContext.get(), [m_outputContext.get().deviceName UTF8String]);
2874
2875     if (!m_outputContext || !m_outputContext.get().deviceName)
2876         setShouldPlayToPlaybackTarget(false);
2877 }
2878
2879 void MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(bool shouldPlay)
2880 {
2881     m_shouldPlayToPlaybackTarget = shouldPlay;
2882
2883     if (!m_avPlayer)
2884         return;
2885
2886     AVOutputContext *newContext = shouldPlay ? m_outputContext.get() : nil;
2887     RetainPtr<AVOutputContext> currentContext = m_avPlayer.get().outputContext;
2888
2889     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setShouldPlayToPlaybackTarget(%p) - target = %p, shouldPlay = %s", this, newContext, boolString(shouldPlay));
2890
2891     if ((!newContext && !currentContext.get()) || [currentContext.get() isEqual:newContext])
2892         return;
2893
2894     setDelayCallbacks(true);
2895     m_avPlayer.get().outputContext = newContext;
2896     setDelayCallbacks(false);
2897 }
2898 #endif // !PLATFORM(IOS)
2899
2900 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2901 {
2902     if (!m_avPlayer)
2903         return;
2904
2905 #if PLATFORM(IOS)
2906     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2907 #endif
2908 }
2909 #endif
2910
2911 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2912 {
2913     m_cachedItemStatus = status;
2914
2915     updateStates();
2916 }
2917
2918 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2919 {
2920     m_pendingStatusChanges++;
2921 }
2922
2923 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2924 {
2925     m_cachedLikelyToKeepUp = likelyToKeepUp;
2926
2927     ASSERT(m_pendingStatusChanges);
2928     if (!--m_pendingStatusChanges)
2929         updateStates();
2930 }
2931
2932 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2933 {
2934     m_pendingStatusChanges++;
2935 }
2936
2937 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2938 {
2939     m_cachedBufferEmpty = bufferEmpty;
2940
2941     ASSERT(m_pendingStatusChanges);
2942     if (!--m_pendingStatusChanges)
2943         updateStates();
2944 }
2945
2946 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2947 {
2948     m_pendingStatusChanges++;
2949 }
2950
2951 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2952 {
2953     m_cachedBufferFull = bufferFull;
2954
2955     ASSERT(m_pendingStatusChanges);
2956     if (!--m_pendingStatusChanges)
2957         updateStates();
2958 }
2959
2960 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2961 {
2962     m_cachedSeekableRanges = seekableRanges;
2963
2964     seekableTimeRangesChanged();
2965     updateStates();
2966 }
2967
2968 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2969 {
2970     m_cachedLoadedRanges = loadedRanges;
2971
2972     loadedTimeRangesChanged();
2973     updateStates();
2974 }
2975
2976 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2977 {
2978     m_cachedIsReadyForDisplay = isReady;
2979     if (!hasVideo() && isReady)
2980         tracksChanged();
2981     updateStates();
2982 }
2983
2984 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2985 {
2986     tracksChanged();
2987     updateStates();
2988 }
2989
2990 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2991 {
2992     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2993     if (m_shouldBufferData == shouldBuffer)
2994         return;
2995
2996     m_shouldBufferData = shouldBuffer;
2997     
2998     if (!m_avPlayer)
2999         return;
3000
3001     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
3002 }
3003
3004 #if ENABLE(DATACUE_VALUE)
3005 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
3006 {
3007     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
3008     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
3009     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
3010     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
3011     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
3012
3013     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
3014         return quickTimeUserData;
3015     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
3016         return isoUserData;
3017     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
3018         return quickTimeMetadata;
3019     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
3020         return iTunesMetadata;
3021     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
3022         return id3Metadata;
3023
3024     return emptyAtom;
3025 }
3026 #endif
3027
3028 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
3029 {
3030     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
3031
3032     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
3033
3034 #if ENABLE(DATACUE_VALUE)
3035     if (seeking())
3036         return;
3037
3038     if (!m_metadataTrack)
3039         processMetadataTrack();
3040
3041     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
3042         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
3043         return;
3044     }
3045
3046     // Set the duration of all incomplete cues before adding new ones.
3047     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
3048     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3049         MediaTime start = toMediaTime(item.time);
3050         if (start < earliestStartTime)
3051             earliestStartTime = start;
3052     }
3053     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
3054
3055     for (AVMetadataItemType *item in m_currentMetaData.get()) {
3056         MediaTime start = toMediaTime(item.time);
3057         MediaTime end = MediaTime::positiveInfiniteTime();
3058         if (CMTIME_IS_VALID(item.duration))
3059             end = start + toMediaTime(item.duration);
3060
3061         AtomicString type = nullAtom;
3062         if (item.keySpace)
3063             type = metadataType(item.keySpace);
3064
3065         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
3066     }
3067 #endif
3068 }
3069
3070 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3071 {
3072     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3073         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3074
3075     NSArray *assetTracks = [m_avAsset tracks];
3076
3077     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3078         AVAssetTrack* assetTrack = [obj assetTrack];
3079
3080         if ([assetTracks containsObject:assetTrack])
3081             return YES;
3082
3083         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3084         if (!hasLoadedMediaSelectionGroups())
3085             return NO;
3086
3087         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3088             return NO;
3089
3090         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3091             return NO;
3092
3093         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3094             return NO;
3095
3096         return YES;
3097     }]];
3098
3099     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3100         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3101
3102     m_cachedTotalBytes = 0;
3103
3104     tracksChanged();
3105     updateStates();
3106 }
3107
3108 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3109 {
3110     m_cachedHasEnabledAudio = hasEnabledAudio;
3111
3112     tracksChanged();
3113     updateStates();
3114 }
3115
3116 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3117 {
3118     m_cachedPresentationSize = size;
3119
3120     sizeChanged();
3121     updateStates();
3122 }
3123
3124 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3125 {
3126     m_cachedDuration = duration;
3127
3128     invalidateCachedDuration();
3129 }
3130
3131 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3132 {
3133     m_cachedRate = rate;
3134
3135     updateStates();
3136     rateChanged();
3137 }
3138     
3139 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3140 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3141 {
3142     playbackTargetIsWirelessChanged();
3143 }
3144 #endif
3145
3146 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3147 {
3148     m_cachedCanPlayFastForward = newValue;
3149 }
3150
3151 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3152 {
3153     m_cachedCanPlayFastReverse = newValue;
3154 }
3155
3156 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3157 {
3158     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3159         return MediaPlayerPrivateAVFoundation::resolvedURL();
3160
3161     return URL([m_avAsset resolvedURL]);
3162 }
3163
3164 NSArray* assetMetadataKeyNames()
3165 {
3166     static NSArray* keys;
3167     if (!keys) {
3168         keys = [[NSArray alloc] initWithObjects:@"duration",
3169                     @"naturalSize",
3170                     @"preferredTransform",
3171                     @"preferredVolume",
3172                     @"preferredRate",
3173                     @"playable",
3174                     @"resolvedURL",
3175                     @"tracks",
3176                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3177                    nil];
3178     }
3179     return keys;
3180 }
3181
3182 NSArray* itemKVOProperties()
3183 {
3184     static NSArray* keys;
3185     if (!keys) {
3186         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3187                 @"status",
3188                 @"asset",
3189                 @"tracks",
3190                 @"seekableTimeRanges",
3191                 @"loadedTimeRanges",
3192                 @"playbackLikelyToKeepUp",
3193                 @"playbackBufferFull",
3194                 @"playbackBufferEmpty",
3195                 @"duration",
3196                 @"hasEnabledAudio",
3197                 @"timedMetadata",
3198                 @"canPlayFastForward",
3199                 @"canPlayFastReverse",
3200                 nil];
3201     }
3202     return keys;
3203 }
3204
3205 NSArray* assetTrackMetadataKeyNames()
3206 {
3207     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3208     return keys;
3209 }
3210
3211 NSArray* playerKVOProperties()
3212 {
3213     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3214 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3215                             @"externalPlaybackActive", @"allowsExternalPlayback",
3216 #endif
3217                             nil];
3218     return keys;
3219 }
3220 } // namespace WebCore
3221
3222 @implementation WebCoreAVFMovieObserver
3223
3224 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3225 {
3226     self = [super init];
3227     if (!self)
3228         return nil;
3229     m_callback = callback;
3230     return self;
3231 }
3232
3233 - (void)disconnect
3234 {
3235     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3236     m_callback = 0;
3237 }
3238
3239 - (void)metadataLoaded
3240 {
3241     if (!m_callback)
3242         return;
3243
3244     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3245 }
3246
3247 - (void)didEnd:(NSNotification *)unusedNotification
3248 {
3249     UNUSED_PARAM(unusedNotification);
3250     if (!m_callback)
3251         return;
3252     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3253 }
3254
3255 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3256 {
3257     UNUSED_PARAM(object);
3258     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3259
3260     if (!m_callback)
3261         return;
3262
3263     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3264
3265 #if !LOG_DISABLED
3266     if (willChange)
3267         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3268     else {
3269         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3270         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3271     }
3272 #endif
3273
3274     std::function<void ()> function;
3275
3276     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3277         if ([keyPath isEqualToString:@"readyForDisplay"])
3278             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3279     }
3280
3281     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3282         if ([keyPath isEqualToString:@"enabled"])
3283             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3284     }
3285
3286     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3287         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3288             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3289         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3290             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3291         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3292             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3293     }
3294
3295     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3296         // A value changed for an AVPlayerItem
3297         if ([keyPath isEqualToString:@"status"])
3298             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3299         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3300             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3301         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3302             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3303         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3304             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3305         else if ([keyPath isEqualToString:@"asset"])
3306             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<id>(newValue));
3307         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3308             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3309         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3310             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3311         else if ([keyPath isEqualToString:@"tracks"])
3312             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3313         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3314             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3315         else if ([keyPath isEqualToString:@"presentationSize"])
3316             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3317         else if ([keyPath isEqualToString:@"duration"])
3318             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3319         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3320             MediaTime now;
3321             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3322             if (CMTIME_IS_NUMERIC(itemTime))
3323                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3324             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3325         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3326             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3327         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3328             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3329     }
3330
3331     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3332         // A value changed for an AVPlayer.
3333         if ([keyPath isEqualToString:@"rate"])
3334             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3335 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3336         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3337             function = std::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3338 #endif
3339     }
3340     
3341     if (!function)
3342         return;
3343
3344     auto weakThis = m_callback->createWeakPtr();
3345     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3346         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3347         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3348         if (!weakThis)
3349             return;
3350         function();
3351     }));
3352 }
3353
3354 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3355 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3356 {
3357     UNUSED_PARAM(output);
3358     UNUSED_PARAM(nativeSamples);
3359
3360     if (!m_callback)
3361         return;
3362
3363     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3364     RetainPtr<NSArray> strongStrings = strings;
3365     RetainPtr<NSArray> strongSamples = nativeSamples;
3366     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3367         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3368         if (!callback)
3369             return;
3370         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3371     });
3372 }
3373
3374 - (void)outputSequenceWasFlushed:(id)output
3375 {
3376     UNUSED_PARAM(output);
3377
3378     if (!m_callback)
3379         return;
3380     
3381     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3382     callOnMainThread([strongSelf] {
3383         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3384             callback->flushCues();
3385     });
3386 }
3387 #endif
3388
3389 @end
3390
3391 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3392 @implementation WebCoreAVFLoaderDelegate
3393
3394 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3395 {
3396     self = [super init];
3397     if (!self)
3398         return nil;
3399     m_callback = callback;
3400     return self;
3401 }
3402
3403 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3404 {
3405     UNUSED_PARAM(resourceLoader);
3406     if (!m_callback)
3407         return NO;
3408
3409     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3410     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3411     callOnMainThread([strongSelf, strongRequest] {
3412         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3413         if (!callback) {
3414             [strongRequest finishLoadingWithError:nil];
3415             return;
3416         }
3417
3418         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3419             [strongRequest finishLoadingWithError:nil];
3420     });
3421
3422     return YES;
3423 }
3424
3425 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3426 {
3427     UNUSED_PARAM(resourceLoader);
3428     if (!m_callback)
3429         return NO;
3430
3431     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3432         return NO;
3433
3434     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3435     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3436     callOnMainThread([strongSelf, strongChallenge] {
3437         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3438         if (!callback) {
3439             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3440             return;
3441         }
3442
3443         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3444             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3445     });
3446
3447     return YES;
3448 }
3449
3450 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3451 {
3452     UNUSED_PARAM(resourceLoader);
3453     if (!m_callback)
3454         return;
3455
3456     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3457     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3458     callOnMainThread([strongSelf, strongRequest] {
3459         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3460         if (callback)
3461             callback->didCancelLoadingRequest(strongRequest.get());
3462     });
3463 }
3464
3465 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3466 {
3467     m_callback = callback;