[iOS] Unable to play .mp4 file over http with basic http authentication
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioSourceProviderAVFObjC.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "Cookie.h"
38 #import "ExceptionCodePlaceholder.h"
39 #import "FloatConversion.h"
40 #import "FloatConversion.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandMetadataTextTrackPrivateAVF.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
46 #import "OutOfBandTextTrackPrivateAVF.h"
47 #import "URL.h"
48 #import "Logging.h"
49 #import "MediaSelectionGroupAVFObjC.h"
50 #import "MediaTimeAVFoundation.h"
51 #import "PlatformTimeRanges.h"
52 #import "QuartzCoreSPI.h"
53 #import "SecurityOrigin.h"
54 #import "SerializedPlatformRepresentationMac.h"
55 #import "SoftLinking.h"
56 #import "TextEncoding.h"
57 #import "TextTrackRepresentation.h"
58 #import "UUID.h"
59 #import "VideoTrackPrivateAVFObjC.h"
60 #import "WebCoreAVFResourceLoader.h"
61 #import "WebCoreCALayerExtras.h"
62 #import "WebCoreSystemInterface.h"
63 #import <objc/runtime.h>
64 #import <runtime/DataView.h>
65 #import <runtime/JSCInlines.h>
66 #import <runtime/TypedArrayInlines.h>
67 #import <runtime/Uint16Array.h>
68 #import <runtime/Uint32Array.h>
69 #import <runtime/Uint8Array.h>
70 #import <wtf/CurrentTime.h>
71 #import <wtf/Functional.h>
72 #import <wtf/ListHashSet.h>
73 #import <wtf/NeverDestroyed.h>
74 #import <wtf/text/CString.h>
75 #import <wtf/text/StringBuilder.h>
76
77 #if ENABLE(AVF_CAPTIONS)
78 #include "TextTrack.h"
79 #endif
80
81 #import <AVFoundation/AVFoundation.h>
82 #if PLATFORM(IOS)
83 #import "WAKAppKitStubs.h"
84 #import <CoreImage/CoreImage.h>
85 #import <mach/mach_port.h>
86 #else
87 #import <Foundation/NSGeometry.h>
88 #import <QuartzCore/CoreImage.h>
89 #endif
90 #import <CoreMedia/CoreMedia.h>
91
92 #if USE(VIDEOTOOLBOX)
93 #import <CoreVideo/CoreVideo.h>
94 #import <VideoToolbox/VideoToolbox.h>
95 #endif
96
97 #if USE(CFNETWORK)
98 #include "CFNSURLConnectionSPI.h"
99 #endif
100
101 namespace std {
102 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
103     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
104 };
105 }
106
107 @interface WebVideoContainerLayer : CALayer
108 @end
109
110 @implementation WebVideoContainerLayer
111
112 - (void)setBounds:(CGRect)bounds
113 {
114     [super setBounds:bounds];
115     for (CALayer* layer in self.sublayers)
116         layer.frame = bounds;
117 }
118 @end
119
120 #if ENABLE(AVF_CAPTIONS)
121 // Note: This must be defined before our SOFT_LINK macros:
122 @class AVMediaSelectionOption;
123 @interface AVMediaSelectionOption (OutOfBandExtensions)
124 @property (nonatomic, readonly) NSString* outOfBandSource;
125 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
126 @end
127 #endif
128
129 #if PLATFORM(IOS)
130 @class AVPlayerItem;
131 @interface AVPlayerItem (WebKitExtensions)
132 @property (nonatomic, copy) NSString* dataYouTubeID;
133 @end
134 #endif
135
136 @interface AVURLAsset (WebKitExtensions)
137 @property (nonatomic, readonly) NSURL *resolvedURL;
138 @end
139
140 typedef AVPlayer AVPlayerType;
141 typedef AVPlayerItem AVPlayerItemType;
142 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
143 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
144 typedef AVMetadataItem AVMetadataItemType;
145 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
146 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
147
148 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
149 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
150 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
151 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
152
153 #if USE(VIDEOTOOLBOX)
154 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
155 #endif
156
157 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
158 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
159 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
160 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
161
162 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
163 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
164 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
165 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
166 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
167 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
168 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
169 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
170
171 #if USE(VIDEOTOOLBOX)
172 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
173 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
174 #endif
175
176 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
177 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
178 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
179 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
180 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
181 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
182 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
183
184 SOFT_LINK_CLASS(CoreImage, CIContext)
185 SOFT_LINK_CLASS(CoreImage, CIImage)
186
187 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
200 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
201
202 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
203
204 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
205
206 #define AVPlayer getAVPlayerClass()
207 #define AVPlayerItem getAVPlayerItemClass()
208 #define AVPlayerLayer getAVPlayerLayerClass()
209 #define AVURLAsset getAVURLAssetClass()
210 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
211 #define AVMetadataItem getAVMetadataItemClass()
212
213 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
214 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
215 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
216 #define AVMediaTypeVideo getAVMediaTypeVideo()
217 #define AVMediaTypeAudio getAVMediaTypeAudio()
218 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
219 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
220 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
221 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
222 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
223 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
224 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
225 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
226 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
227 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
228
229 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
230 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
231 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
232
233 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
234 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
235 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
236
237 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
238 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
239 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
240 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
241
242 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
243 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
244 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
245 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
246 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
247 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
248 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
249 #endif
250
251 #if ENABLE(AVF_CAPTIONS)
252 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
262
263 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
264 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
265 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
266 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
267 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
268 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
269 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
270 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
271 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
272 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
273 #endif
274
275 #if ENABLE(DATACUE_VALUE)
276 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
277 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
278 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
280 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
281
282 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
283 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
284 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
285 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
286 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
287 #endif
288
289 #if PLATFORM(IOS)
290 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
291
292 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
293 #endif
294
295 #define kCMTimeZero getkCMTimeZero()
296
297 using namespace WebCore;
298
299 enum MediaPlayerAVFoundationObservationContext {
300     MediaPlayerAVFoundationObservationContextPlayerItem,
301     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
302     MediaPlayerAVFoundationObservationContextPlayer,
303     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
304 };
305
306 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
307 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
308 #else
309 @interface WebCoreAVFMovieObserver : NSObject
310 #endif
311 {
312     MediaPlayerPrivateAVFoundationObjC* m_callback;
313     int m_delayCallbacks;
314 }
315 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
316 -(void)disconnect;
317 -(void)metadataLoaded;
318 -(void)didEnd:(NSNotification *)notification;
319 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
320 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
321 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
322 - (void)outputSequenceWasFlushed:(id)output;
323 #endif
324 @end
325
326 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
327 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
328     MediaPlayerPrivateAVFoundationObjC* m_callback;
329 }
330 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
332 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
333 @end
334 #endif
335
336 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
337 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
338     MediaPlayerPrivateAVFoundationObjC *m_callback;
339     dispatch_semaphore_t m_semaphore;
340 }
341 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
342 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
343 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
344 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
345 @end
346 #endif
347
348 namespace WebCore {
349
350 static NSArray *assetMetadataKeyNames();
351 static NSArray *itemKVOProperties();
352 static NSArray* assetTrackMetadataKeyNames();
353
354 #if !LOG_DISABLED
355 static const char *boolString(bool val)
356 {
357     return val ? "true" : "false";
358 }
359 #endif
360
361 #if ENABLE(ENCRYPTED_MEDIA_V2)
362 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
363 static PlayerToPrivateMapType& playerToPrivateMap()
364 {
365     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
366     return map;
367 };
368 #endif
369
370 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
371 static dispatch_queue_t globalLoaderDelegateQueue()
372 {
373     static dispatch_queue_t globalQueue;
374     static dispatch_once_t onceToken;
375     dispatch_once(&onceToken, ^{
376         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
377     });
378     return globalQueue;
379 }
380 #endif
381
382 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
383 static dispatch_queue_t globalPullDelegateQueue()
384 {
385     static dispatch_queue_t globalQueue;
386     static dispatch_once_t onceToken;
387     dispatch_once(&onceToken, ^{
388         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
389     });
390     return globalQueue;
391 }
392 #endif
393
394 #if USE(CFNETWORK)
395 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
396 public:
397     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
398     {
399         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
400     }
401
402     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
403     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
404
405 private:
406     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
407         : m_challenge(challenge)
408     {
409         ASSERT(m_challenge);
410     }
411
412     virtual void refAuthenticationClient() override { ref(); }
413     virtual void derefAuthenticationClient() override { deref(); }
414
415     virtual void receivedCredential(const AuthenticationChallenge&, const Credential&)
416     {
417         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
418     }
419
420     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&)
421     {
422         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
423     }
424
425     virtual void receivedCancellation(const AuthenticationChallenge&)
426     {
427         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
428     }
429
430     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&)
431     {
432         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
433             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
434     }
435
436     virtual void receivedChallengeRejection(const AuthenticationChallenge&)
437     {
438         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
439             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
440     }
441
442     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
443 };
444 #endif
445
446 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
447
448     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
449 }
450
451 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
452 {
453     if (isAvailable())
454         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
455 }
456
457 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
458     : MediaPlayerPrivateAVFoundation(player)
459     , m_weakPtrFactory(this)
460 #if PLATFORM(IOS)
461     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
462 #endif
463     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
464     , m_videoFrameHasDrawn(false)
465     , m_haveCheckedPlayability(false)
466 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
467     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
468     , m_videoOutputSemaphore(nullptr)
469 #endif
470 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
471     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
472 #endif
473     , m_currentTextTrack(0)
474     , m_cachedRate(0)
475     , m_cachedTotalBytes(0)
476     , m_pendingStatusChanges(0)
477     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
478     , m_cachedLikelyToKeepUp(false)
479     , m_cachedBufferEmpty(false)
480     , m_cachedBufferFull(false)
481     , m_cachedHasEnabledAudio(false)
482     , m_shouldBufferData(true)
483     , m_cachedIsReadyForDisplay(false)
484     , m_haveBeenAskedToCreateLayer(false)
485 #if ENABLE(IOS_AIRPLAY)
486     , m_allowsWirelessVideoPlayback(true)
487 #endif
488 {
489 #if ENABLE(ENCRYPTED_MEDIA_V2)
490     playerToPrivateMap().set(player, this);
491 #endif
492 }
493
494 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
495 {
496 #if ENABLE(ENCRYPTED_MEDIA_V2)
497     playerToPrivateMap().remove(player());
498 #endif
499 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
500     [m_loaderDelegate.get() setCallback:0];
501     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
502
503     for (auto& pair : m_resourceLoaderMap)
504         pair.value->invalidate();
505 #endif
506 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
507     [m_videoOutputDelegate setCallback:0];
508     [m_videoOutput setDelegate:nil queue:0];
509     if (m_videoOutputSemaphore)
510         dispatch_release(m_videoOutputSemaphore);
511 #endif
512
513     if (m_videoLayer)
514         destroyVideoLayer();
515
516     cancelLoad();
517 }
518
519 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
520 {
521     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
522     tearDownVideoRendering();
523
524     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
525     [m_objcObserver.get() disconnect];
526
527     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
528     setIgnoreLoadStateChanges(true);
529     if (m_avAsset) {
530         [m_avAsset.get() cancelLoading];
531         m_avAsset = nil;
532     }
533
534     clearTextTracks();
535
536 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
537     if (m_legibleOutput) {
538         if (m_avPlayerItem)
539             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
540         m_legibleOutput = nil;
541     }
542 #endif
543
544     if (m_avPlayerItem) {
545         for (NSString *keyName in itemKVOProperties())
546             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
547         
548         m_avPlayerItem = nil;
549     }
550     if (m_avPlayer) {
551         if (m_timeObserver)
552             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
553         m_timeObserver = nil;
554         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
555 #if ENABLE(IOS_AIRPLAY)
556         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
557 #endif
558         m_avPlayer = nil;
559     }
560
561     // Reset cached properties
562     m_pendingStatusChanges = 0;
563     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
564     m_cachedSeekableRanges = nullptr;
565     m_cachedLoadedRanges = nullptr;
566     m_cachedHasEnabledAudio = false;
567     m_cachedPresentationSize = FloatSize();
568     m_cachedDuration = MediaTime::zeroTime();
569
570     for (AVPlayerItemTrack *track in m_cachedTracks.get())
571         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
572     m_cachedTracks = nullptr;
573
574 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
575     if (m_provider)
576         m_provider->setPlayerItem(nullptr);
577 #endif
578
579     setIgnoreLoadStateChanges(false);
580 }
581
582 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
583 {
584     return m_haveBeenAskedToCreateLayer;
585 }
586
587 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
588 {
589 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
590     if (m_videoOutput)
591         return true;
592 #endif
593     return m_imageGenerator;
594 }
595
596 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
597 {
598 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
599     createVideoOutput();
600 #else
601     createImageGenerator();
602 #endif
603 }
604
605 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
606 {
607     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
608
609     if (!m_avAsset || m_imageGenerator)
610         return;
611
612     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
613
614     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
615     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
616     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
617     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
618
619     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
620 }
621
622 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
623 {
624 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
625     destroyVideoOutput();
626 #endif
627     destroyImageGenerator();
628 }
629
630 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
631 {
632     if (!m_imageGenerator)
633         return;
634
635     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
636
637     m_imageGenerator = 0;
638 }
639
640 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
641 {
642     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
643         return;
644
645     auto weakThis = createWeakPtr();
646     callOnMainThread([this, weakThis] {
647         if (!weakThis)
648             return;
649
650         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
651             return;
652         m_haveBeenAskedToCreateLayer = true;
653
654         if (!m_videoLayer)
655             createAVPlayerLayer();
656
657 #if USE(VIDEOTOOLBOX)
658         if (!m_videoOutput)
659             createVideoOutput();
660 #endif
661
662         player()->client().mediaPlayerRenderingModeChanged(player());
663     });
664 }
665
666 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
667 {
668     if (!m_avPlayer)
669         return;
670
671     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
672     [m_videoLayer setPlayer:m_avPlayer.get()];
673     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
674 #ifndef NDEBUG
675     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
676 #endif
677     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
678     updateVideoLayerGravity();
679     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
680     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
681     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
682
683 #if PLATFORM(IOS)
684     [m_videoLayer web_disableAllActions];
685     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
686 #ifndef NDEBUG
687     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
688 #endif
689     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
690     if (m_videoFullscreenLayer) {
691         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
692         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
693     } else {
694         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
695         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
696     }
697 #else
698     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
699 #endif
700 }
701
702 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
703 {
704     if (!m_videoLayer)
705         return;
706
707     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
708
709     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
710     [m_videoLayer.get() setPlayer:nil];
711
712 #if PLATFORM(IOS)
713     if (m_videoFullscreenLayer)
714         [m_videoLayer removeFromSuperlayer];
715     m_videoInlineLayer = nil;
716 #endif
717
718     m_videoLayer = nil;
719 }
720
721 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
722 {
723     if (currentRenderingMode() == MediaRenderingToLayer)
724         return m_cachedIsReadyForDisplay;
725
726     return m_videoFrameHasDrawn;
727 }
728
729 #if ENABLE(AVF_CAPTIONS)
730 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
731 {
732     // FIXME: Match these to correct types:
733     if (kind == PlatformTextTrack::Caption)
734         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
735
736     if (kind == PlatformTextTrack::Subtitle)
737         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
738
739     if (kind == PlatformTextTrack::Description)
740         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
741
742     if (kind == PlatformTextTrack::Forced)
743         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
744
745     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
746 }
747     
748 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
749 {
750     trackModeChanged();
751 }
752     
753 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
754 {
755     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
756     
757     for (auto& textTrack : m_textTracks) {
758         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
759             continue;
760         
761         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
762         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
763         
764         for (auto& track : outOfBandTrackSources) {
765             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
766             
767             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
768                 continue;
769             
770             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
771             if (track->mode() == PlatformTextTrack::Hidden)
772                 mode = InbandTextTrackPrivate::Hidden;
773             else if (track->mode() == PlatformTextTrack::Disabled)
774                 mode = InbandTextTrackPrivate::Disabled;
775             else if (track->mode() == PlatformTextTrack::Showing)
776                 mode = InbandTextTrackPrivate::Showing;
777             
778             textTrack->setMode(mode);
779             break;
780         }
781     }
782 }
783 #endif
784
785
786 static NSURL *canonicalURL(const String& url)
787 {
788     NSURL *cocoaURL = URL(ParsedURLString, url);
789     if (url.isEmpty())
790         return cocoaURL;
791
792     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
793     if (!request)
794         return cocoaURL;
795
796     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
797     if (!canonicalRequest)
798         return cocoaURL;
799
800     return [canonicalRequest URL];
801 }
802
803 #if PLATFORM(IOS)
804 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
805 {
806     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
807     [properties setDictionary:@{
808         NSHTTPCookieName: cookie.name,
809         NSHTTPCookieValue: cookie.value,
810         NSHTTPCookieDomain: cookie.domain,
811         NSHTTPCookiePath: cookie.path,
812         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
813     }];
814     if (cookie.secure)
815         [properties setObject:@YES forKey:NSHTTPCookieSecure];
816     if (cookie.session)
817         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
818
819     return [NSHTTPCookie cookieWithProperties:properties.get()];
820 }
821 #endif
822
823 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
824 {
825     if (m_avAsset)
826         return;
827
828     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
829
830     setDelayCallbacks(true);
831
832     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
833
834     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
835
836     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
837
838     String referrer = player()->referrer();
839     if (!referrer.isEmpty())
840         [headerFields.get() setObject:referrer forKey:@"Referer"];
841
842     String userAgent = player()->userAgent();
843     if (!userAgent.isEmpty())
844         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
845
846     if ([headerFields.get() count])
847         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
848
849     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
850         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
851
852     String identifier = player()->sourceApplicationIdentifier();
853     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
854         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
855
856 #if ENABLE(AVF_CAPTIONS)
857     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
858     if (!outOfBandTrackSources.isEmpty()) {
859         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
860         for (auto& trackSource : outOfBandTrackSources) {
861             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
862             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
863             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
864             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
865             [outOfBandTracks.get() addObject:@{
866                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
867                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
868                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
869                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
870                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
871                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
872             }];
873         }
874
875         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
876     }
877 #endif
878
879 #if PLATFORM(IOS)
880     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
881     if (!networkInterfaceName.isEmpty())
882         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
883 #endif
884
885 #if PLATFORM(IOS)
886     Vector<Cookie> cookies;
887     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
888         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
889         for (auto& cookie : cookies)
890             [nsCookies addObject:toNSHTTPCookie(cookie)];
891
892         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
893     }
894 #endif
895
896     NSURL *cocoaURL = canonicalURL(url);
897     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
898
899 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
900     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
901 #endif
902
903     m_haveCheckedPlayability = false;
904
905     setDelayCallbacks(false);
906 }
907
908 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
909 {
910     if (!m_avPlayer)
911         return;
912
913     if (pthread_main_np()) {
914         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
915         return;
916     }
917
918     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
919     RetainPtr<AVPlayerItemType> strongItem = item;
920     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
921         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
922     });
923 }
924
925 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
926 {
927     if (m_avPlayer)
928         return;
929
930     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
931
932     setDelayCallbacks(true);
933
934     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
935     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
936 #if ENABLE(IOS_AIRPLAY)
937     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
938     updateDisableExternalPlayback();
939 #endif
940
941 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
942     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
943 #endif
944
945 #if ENABLE(IOS_AIRPLAY)
946     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
947 #endif
948
949     if (player()->client().mediaPlayerIsVideo())
950         createAVPlayerLayer();
951
952     if (m_avPlayerItem)
953         setAVPlayerItem(m_avPlayerItem.get());
954
955     setDelayCallbacks(false);
956 }
957
958 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
959 {
960     if (m_avPlayerItem)
961         return;
962
963     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
964
965     setDelayCallbacks(true);
966
967     // Create the player item so we can load media data. 
968     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
969
970     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
971
972     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
973     for (NSString *keyName in itemKVOProperties())
974         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
975
976     if (m_avPlayer)
977         setAVPlayerItem(m_avPlayerItem.get());
978
979 #if PLATFORM(IOS)
980     AtomicString value;
981     if (player()->doesHaveAttribute("data-youtube-id", &value))
982         [m_avPlayerItem.get() setDataYouTubeID: value];
983  #endif
984
985 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
986     const NSTimeInterval legibleOutputAdvanceInterval = 2;
987
988     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
989     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
990     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
991
992     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
993     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
994     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
995     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
996 #endif
997
998 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
999     if (m_provider)
1000         m_provider->setPlayerItem(m_avPlayerItem.get());
1001 #endif
1002
1003     setDelayCallbacks(false);
1004 }
1005
1006 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1007 {
1008     if (m_haveCheckedPlayability)
1009         return;
1010     m_haveCheckedPlayability = true;
1011
1012     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1013     auto weakThis = createWeakPtr();
1014
1015     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1016         callOnMainThread([weakThis] {
1017             if (weakThis)
1018                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1019         });
1020     }];
1021 }
1022
1023 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1024 {
1025     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1026
1027     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1028     dispatch_group_enter(metadataLoadingGroup);
1029     auto weakThis = createWeakPtr();
1030     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1031
1032         callOnMainThread([weakThis, metadataLoadingGroup] {
1033             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1034                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1035                     dispatch_group_enter(metadataLoadingGroup);
1036                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1037                         dispatch_group_leave(metadataLoadingGroup);
1038                     }];
1039                 }
1040             }
1041             dispatch_group_leave(metadataLoadingGroup);
1042         });
1043     }];
1044
1045     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1046         callOnMainThread([weakThis] {
1047             if (weakThis)
1048                 [weakThis->m_objcObserver.get() metadataLoaded];
1049         });
1050
1051         dispatch_release(metadataLoadingGroup);
1052     });
1053 }
1054
1055 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1056 {
1057     if (!m_avPlayerItem)
1058         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1059
1060     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1061         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1062     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1063         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1064     if (m_cachedLikelyToKeepUp)
1065         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1066     if (m_cachedBufferFull)
1067         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1068     if (m_cachedBufferEmpty)
1069         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1070
1071     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1072 }
1073
1074 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1075 {
1076     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1077     PlatformMedia pm;
1078     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1079     pm.media.avfMediaPlayer = m_avPlayer.get();
1080     return pm;
1081 }
1082
1083 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1084 {
1085 #if PLATFORM(IOS)
1086     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1087 #else
1088     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1089 #endif
1090 }
1091
1092 #if PLATFORM(IOS)
1093 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1094 {
1095     if (m_videoFullscreenLayer == videoFullscreenLayer)
1096         return;
1097
1098     m_videoFullscreenLayer = videoFullscreenLayer;
1099
1100     [CATransaction begin];
1101     [CATransaction setDisableActions:YES];
1102     
1103     CALayer *oldRootLayer = videoFullscreenLayer;
1104     while (oldRootLayer.superlayer)
1105         oldRootLayer = oldRootLayer.superlayer;
1106
1107     CALayer *newRootLayer = nil;
1108     
1109     if (m_videoFullscreenLayer && m_videoLayer) {
1110         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1111         [m_videoLayer removeFromSuperlayer];
1112         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1113         newRootLayer = m_videoFullscreenLayer.get();
1114     } else if (m_videoInlineLayer && m_videoLayer) {
1115         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1116         [m_videoLayer removeFromSuperlayer];
1117         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1118         newRootLayer = m_videoInlineLayer.get();
1119     } else if (m_videoLayer)
1120         [m_videoLayer removeFromSuperlayer];
1121
1122     while (newRootLayer.superlayer)
1123         newRootLayer = newRootLayer.superlayer;
1124
1125     if (oldRootLayer && newRootLayer && oldRootLayer != newRootLayer) {
1126         mach_port_t fencePort = 0;
1127         for (CAContext *context in [CAContext allContexts]) {
1128             if (context.layer == oldRootLayer || context.layer == newRootLayer) {
1129                 if (!fencePort)
1130                     fencePort = [context createFencePort];
1131                 else
1132                     [context setFencePort:fencePort];
1133             }
1134         }
1135         mach_port_deallocate(mach_task_self(), fencePort);
1136     }
1137     [CATransaction commit];
1138
1139     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1140         syncTextTrackBounds();
1141         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1142     }
1143 #if ENABLE(IOS_AIRPLAY)
1144     updateDisableExternalPlayback();
1145 #endif
1146 }
1147
1148 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1149 {
1150     m_videoFullscreenFrame = frame;
1151     if (!m_videoFullscreenLayer)
1152         return;
1153
1154     if (m_videoLayer) {
1155         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1156         [CATransaction begin];
1157         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1158         [CATransaction commit];
1159         [m_videoLayer web_disableAllActions];
1160     }
1161     syncTextTrackBounds();
1162 }
1163
1164 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1165 {
1166     m_videoFullscreenGravity = gravity;
1167     if (!m_videoLayer)
1168         return;
1169
1170     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1171     if (gravity == MediaPlayer::VideoGravityResize)
1172         videoGravity = AVLayerVideoGravityResize;
1173     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1174         videoGravity = AVLayerVideoGravityResizeAspect;
1175     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1176         videoGravity = AVLayerVideoGravityResizeAspectFill;
1177     else
1178         ASSERT_NOT_REACHED();
1179
1180     [m_videoLayer setVideoGravity:videoGravity];
1181 }
1182
1183 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1184 {
1185     if (m_currentMetaData)
1186         return m_currentMetaData.get();
1187     return nil;
1188 }
1189
1190 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1191 {
1192     if (!m_avPlayerItem)
1193         return emptyString();
1194     
1195     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1196     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1197
1198     return logString.get();
1199 }
1200
1201 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1202 {
1203     if (!m_avPlayerItem)
1204         return emptyString();
1205
1206     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1207     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1208
1209     return logString.get();
1210 }
1211 #endif
1212
1213 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1214 {
1215     [CATransaction begin];
1216     [CATransaction setDisableActions:YES];    
1217     if (m_videoLayer)
1218         [m_videoLayer.get() setHidden:!isVisible];
1219     [CATransaction commit];
1220 }
1221     
1222 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1223 {
1224     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1225     if (!metaDataAvailable())
1226         return;
1227
1228     setDelayCallbacks(true);
1229     m_cachedRate = requestedRate();
1230     [m_avPlayer.get() setRate:requestedRate()];
1231     setDelayCallbacks(false);
1232 }
1233
1234 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1235 {
1236     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1237     if (!metaDataAvailable())
1238         return;
1239
1240     setDelayCallbacks(true);
1241     m_cachedRate = 0;
1242     [m_avPlayer.get() setRate:0];
1243     setDelayCallbacks(false);
1244 }
1245
1246 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1247 {
1248     // Do not ask the asset for duration before it has been loaded or it will fetch the
1249     // answer synchronously.
1250     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1251         return MediaTime::invalidTime();
1252     
1253     CMTime cmDuration;
1254     
1255     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1256     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1257         cmDuration = [m_avPlayerItem.get() duration];
1258     else
1259         cmDuration = [m_avAsset.get() duration];
1260
1261     if (CMTIME_IS_NUMERIC(cmDuration))
1262         return toMediaTime(cmDuration);
1263
1264     if (CMTIME_IS_INDEFINITE(cmDuration))
1265         return MediaTime::positiveInfiniteTime();
1266
1267     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1268     return MediaTime::invalidTime();
1269 }
1270
1271 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1272 {
1273     if (!metaDataAvailable() || !m_avPlayerItem)
1274         return MediaTime::zeroTime();
1275
1276     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1277     if (CMTIME_IS_NUMERIC(itemTime))
1278         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1279
1280     return MediaTime::zeroTime();
1281 }
1282
1283 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1284 {
1285     // setCurrentTime generates several event callbacks, update afterwards.
1286     setDelayCallbacks(true);
1287
1288     if (m_metadataTrack)
1289         m_metadataTrack->flushPartialCues();
1290
1291     CMTime cmTime = toCMTime(time);
1292     CMTime cmBefore = toCMTime(negativeTolerance);
1293     CMTime cmAfter = toCMTime(positiveTolerance);
1294
1295     auto weakThis = createWeakPtr();
1296
1297     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1298         callOnMainThread([weakThis, finished] {
1299             auto _this = weakThis.get();
1300             if (!_this)
1301                 return;
1302
1303             _this->seekCompleted(finished);
1304         });
1305     }];
1306
1307     setDelayCallbacks(false);
1308 }
1309
1310 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1311 {
1312 #if PLATFORM(IOS)
1313     UNUSED_PARAM(volume);
1314     return;
1315 #else
1316     if (!metaDataAvailable())
1317         return;
1318
1319     [m_avPlayer.get() setVolume:volume];
1320 #endif
1321 }
1322
1323 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1324 {
1325     UNUSED_PARAM(closedCaptionsVisible);
1326
1327     if (!metaDataAvailable())
1328         return;
1329
1330     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1331 }
1332
1333 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1334
1335 {
1336     setDelayCallbacks(true);
1337     m_cachedRate = rate;
1338     [m_avPlayer.get() setRate:rate];
1339     setDelayCallbacks(false);
1340 }
1341
1342 double MediaPlayerPrivateAVFoundationObjC::rate() const
1343 {
1344     if (!metaDataAvailable())
1345         return 0;
1346
1347     return m_cachedRate;
1348 }
1349
1350 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1351 {
1352     auto timeRanges = PlatformTimeRanges::create();
1353
1354     if (!m_avPlayerItem)
1355         return timeRanges;
1356
1357     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1358         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1359         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1360             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1361     }
1362     return timeRanges;
1363 }
1364
1365 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1366 {
1367     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1368         return MediaTime::zeroTime();
1369
1370     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1371     bool hasValidRange = false;
1372     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1373         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1374         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1375             continue;
1376
1377         hasValidRange = true;
1378         MediaTime startOfRange = toMediaTime(timeRange.start);
1379         if (minTimeSeekable > startOfRange)
1380             minTimeSeekable = startOfRange;
1381     }
1382     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1383 }
1384
1385 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1386 {
1387     if (!m_cachedSeekableRanges)
1388         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1389
1390     MediaTime maxTimeSeekable;
1391     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1392         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1393         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1394             continue;
1395         
1396         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1397         if (maxTimeSeekable < endOfRange)
1398             maxTimeSeekable = endOfRange;
1399     }
1400     return maxTimeSeekable;
1401 }
1402
1403 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1404 {
1405     if (!m_cachedLoadedRanges)
1406         return MediaTime::zeroTime();
1407
1408     MediaTime maxTimeLoaded;
1409     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1410         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1411         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1412             continue;
1413         
1414         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1415         if (maxTimeLoaded < endOfRange)
1416             maxTimeLoaded = endOfRange;
1417     }
1418
1419     return maxTimeLoaded;   
1420 }
1421
1422 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1423 {
1424     if (!metaDataAvailable())
1425         return 0;
1426
1427     if (m_cachedTotalBytes)
1428         return m_cachedTotalBytes;
1429
1430     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1431         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1432
1433     return m_cachedTotalBytes;
1434 }
1435
1436 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1437 {
1438     m_avAsset = asset;
1439 }
1440
1441 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1442 {
1443     if (!m_avAsset)
1444         return MediaPlayerAVAssetStatusDoesNotExist;
1445
1446     for (NSString *keyName in assetMetadataKeyNames()) {
1447         NSError *error = nil;
1448         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1449 #if !LOG_DISABLED
1450         if (error)
1451             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1452 #endif
1453
1454         if (keyStatus < AVKeyValueStatusLoaded)
1455             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1456         
1457         if (keyStatus == AVKeyValueStatusFailed)
1458             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1459
1460         if (keyStatus == AVKeyValueStatusCancelled)
1461             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1462     }
1463
1464     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1465         return MediaPlayerAVAssetStatusPlayable;
1466
1467     return MediaPlayerAVAssetStatusLoaded;
1468 }
1469
1470 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1471 {
1472     if (!m_avAsset)
1473         return 0;
1474
1475     NSError *error = nil;
1476     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1477     return [error code];
1478 }
1479
1480 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1481 {
1482     if (!metaDataAvailable() || context->paintingDisabled())
1483         return;
1484
1485     setDelayCallbacks(true);
1486     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1487
1488 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1489     if (videoOutputHasAvailableFrame())
1490         paintWithVideoOutput(context, rect);
1491     else
1492 #endif
1493         paintWithImageGenerator(context, rect);
1494
1495     END_BLOCK_OBJC_EXCEPTIONS;
1496     setDelayCallbacks(false);
1497
1498     m_videoFrameHasDrawn = true;
1499 }
1500
1501 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1502 {
1503     if (!metaDataAvailable() || context->paintingDisabled())
1504         return;
1505
1506     // We can ignore the request if we are already rendering to a layer.
1507     if (currentRenderingMode() == MediaRenderingToLayer)
1508         return;
1509
1510     // paint() is best effort, so only paint if we already have an image generator or video output available.
1511     if (!hasContextRenderer())
1512         return;
1513
1514     paintCurrentFrameInContext(context, rect);
1515 }
1516
1517 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1518 {
1519     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1520
1521     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1522     if (image) {
1523         GraphicsContextStateSaver stateSaver(*context);
1524         context->translate(rect.x(), rect.y() + rect.height());
1525         context->scale(FloatSize(1.0f, -1.0f));
1526         context->setImageInterpolationQuality(InterpolationLow);
1527         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1528         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1529         image = 0;
1530     }
1531 }
1532
1533 static HashSet<String> mimeTypeCache()
1534 {
1535     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1536     static bool typeListInitialized = false;
1537
1538     if (typeListInitialized)
1539         return cache;
1540     typeListInitialized = true;
1541
1542     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1543     for (NSString *mimeType in types)
1544         cache.add([mimeType lowercaseString]);
1545
1546     return cache;
1547
1548
1549 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1550 {
1551     if (!m_imageGenerator)
1552         createImageGenerator();
1553     ASSERT(m_imageGenerator);
1554
1555 #if !LOG_DISABLED
1556     double start = monotonicallyIncreasingTime();
1557 #endif
1558
1559     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1560     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1561     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1562
1563 #if !LOG_DISABLED
1564     double duration = monotonicallyIncreasingTime() - start;
1565     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1566 #endif
1567
1568     return image;
1569 }
1570
1571 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1572 {
1573     supportedTypes = mimeTypeCache();
1574
1575
1576 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1577 static bool keySystemIsSupported(const String& keySystem)
1578 {
1579     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0") || equalIgnoringCase(keySystem, "org.w3c.clearkey"))
1580         return true;
1581     return false;
1582 }
1583 #endif
1584
1585 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1586 {
1587 #if ENABLE(ENCRYPTED_MEDIA)
1588     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1589     // In addition to the steps in the current specification, this method must run the following steps:
1590
1591     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1592     //    If keySystem is null, continue to the next step.
1593     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1594         // "Clear Key" is only supported with HLS:
1595         if (equalIgnoringCase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringCase(parameters.type, "application/x-mpegurl"))
1596             return MediaPlayer::IsNotSupported;
1597
1598         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1599         if (!keySystemIsSupported(parameters.keySystem))
1600             return MediaPlayer::IsNotSupported;
1601
1602         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1603         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1604     }
1605
1606     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1607 #endif
1608
1609 #if ENABLE(MEDIA_SOURCE)
1610     if (parameters.isMediaSource)
1611         return MediaPlayer::IsNotSupported;
1612 #endif
1613
1614     if (!mimeTypeCache().contains(parameters.type))
1615         return MediaPlayer::IsNotSupported;
1616
1617     // The spec says:
1618     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1619     if (parameters.codecs.isEmpty())
1620         return MediaPlayer::MayBeSupported;
1621
1622     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1623     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1624 }
1625
1626 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1627 {
1628 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1629     if (!keySystem.isEmpty()) {
1630         // "Clear Key" is only supported with HLS:
1631         if (equalIgnoringCase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringCase(mimeType, "application/x-mpegurl"))
1632             return MediaPlayer::IsNotSupported;
1633
1634         if (!keySystemIsSupported(keySystem))
1635             return false;
1636
1637         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1638             return false;
1639
1640         return true;
1641     }
1642 #else
1643     UNUSED_PARAM(keySystem);
1644     UNUSED_PARAM(mimeType);
1645 #endif
1646     return false;
1647 }
1648
1649 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1650 #if ENABLE(ENCRYPTED_MEDIA_V2)
1651 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1652 {
1653     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1654         [infoRequest setContentLength:keyData->byteLength()];
1655         [infoRequest setByteRangeAccessSupported:YES];
1656     }
1657
1658     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1659         long long start = [dataRequest currentOffset];
1660         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1661
1662         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1663             [request finishLoadingWithError:nil];
1664             return;
1665         }
1666
1667         ASSERT(start <= std::numeric_limits<int>::max());
1668         ASSERT(end <= std::numeric_limits<int>::max());
1669         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1670         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1671         [dataRequest respondWithData:nsData.get()];
1672     }
1673
1674     [request finishLoading];
1675 }
1676 #endif
1677
1678 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1679 {
1680     String scheme = [[[avRequest request] URL] scheme];
1681     String keyURI = [[[avRequest request] URL] absoluteString];
1682
1683 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1684     if (scheme == "skd") {
1685         // Create an initData with the following layout:
1686         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1687         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1688         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1689         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1690         initDataView->set<uint32_t>(0, keyURISize, true);
1691
1692         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1693         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1694
1695 #if ENABLE(ENCRYPTED_MEDIA)
1696         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1697 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1698         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1699         if (!player()->keyNeeded(initData.get()))
1700 #endif
1701             return false;
1702
1703         m_keyURIToRequestMap.set(keyURI, avRequest);
1704         return true;
1705 #if ENABLE(ENCRYPTED_MEDIA_V2)
1706     } else if (scheme == "clearkey") {
1707         String keyID = [[[avRequest request] URL] resourceSpecifier];
1708         StringView keyIDView(keyID);
1709         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1710
1711         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1712         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1713
1714         auto keyData = player()->cachedKeyForKeyId(keyID);
1715         if (keyData) {
1716             fulfillRequestWithKeyData(avRequest, keyData.get());
1717             return false;
1718         }
1719
1720         if (!player()->keyNeeded(initData.get()))
1721             return false;
1722
1723         m_keyURIToRequestMap.set(keyID, avRequest);
1724         return true;
1725 #endif
1726     }
1727 #endif
1728
1729     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1730     m_resourceLoaderMap.add(avRequest, resourceLoader);
1731     resourceLoader->startLoading();
1732     return true;
1733 }
1734
1735 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1736 {
1737 #if USE(CFNETWORK)
1738     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1739     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1740     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1741 #else
1742     AuthenticationChallenge challenge(nsChallenge);
1743 #endif
1744
1745     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1746 }
1747
1748 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1749 {
1750     String scheme = [[[avRequest request] URL] scheme];
1751
1752     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1753
1754     if (resourceLoader)
1755         resourceLoader->stopLoading();
1756 }
1757
1758 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1759 {
1760     m_resourceLoaderMap.remove(avRequest);
1761 }
1762 #endif
1763
1764 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1765 {
1766     return AVFoundationLibrary() && CoreMediaLibrary();
1767 }
1768
1769 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1770 {
1771     if (!metaDataAvailable())
1772         return timeValue;
1773
1774     // FIXME - impossible to implement until rdar://8721510 is fixed.
1775     return timeValue;
1776 }
1777
1778 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1779 {
1780 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1781     return 0;
1782 #else
1783     return 5;
1784 #endif
1785 }
1786
1787 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1788 {
1789     if (!m_videoLayer)
1790         return;
1791
1792 #if PLATFORM(IOS)
1793     // Do not attempt to change the video gravity while in full screen mode.
1794     // See setVideoFullscreenGravity().
1795     if (m_videoFullscreenLayer)
1796         return;
1797 #endif
1798
1799     [CATransaction begin];
1800     [CATransaction setDisableActions:YES];    
1801     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1802     [m_videoLayer.get() setVideoGravity:gravity];
1803     [CATransaction commit];
1804 }
1805
1806 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1807 {
1808     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1809         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1810     }];
1811     if (index == NSNotFound)
1812         return nil;
1813     return [tracks objectAtIndex:index];
1814 }
1815
1816 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1817 {
1818     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1819     m_languageOfPrimaryAudioTrack = String();
1820
1821     if (!m_avAsset)
1822         return;
1823
1824     setDelayCharacteristicsChangedNotification(true);
1825
1826     bool haveCCTrack = false;
1827     bool hasCaptions = false;
1828
1829     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1830     // asked about those fairly fequently.
1831     if (!m_avPlayerItem) {
1832         // We don't have a player item yet, so check with the asset because some assets support inspection
1833         // prior to becoming ready to play.
1834         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1835         setHasVideo(firstEnabledVideoTrack);
1836         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1837 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1838         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1839 #endif
1840
1841         presentationSizeDidChange(firstEnabledVideoTrack ? IntSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : IntSize());
1842     } else {
1843         bool hasVideo = false;
1844         bool hasAudio = false;
1845         bool hasMetaData = false;
1846         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1847             if ([track isEnabled]) {
1848                 AVAssetTrack *assetTrack = [track assetTrack];
1849                 NSString *mediaType = [assetTrack mediaType];
1850                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1851                     hasVideo = true;
1852                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1853                     hasAudio = true;
1854                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1855 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1856                     hasCaptions = true;
1857 #endif
1858                     haveCCTrack = true;
1859                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1860                     hasMetaData = true;
1861                 }
1862             }
1863         }
1864
1865 #if ENABLE(VIDEO_TRACK)
1866         updateAudioTracks();
1867         updateVideoTracks();
1868
1869 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1870         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1871         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1872 #endif
1873 #endif
1874
1875         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1876         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1877         // when it is not.
1878         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1879
1880         setHasAudio(hasAudio);
1881 #if ENABLE(DATACUE_VALUE)
1882         if (hasMetaData)
1883             processMetadataTrack();
1884 #endif
1885     }
1886
1887 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1888     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1889     if (legibleGroup && m_cachedTracks) {
1890         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1891         if (hasCaptions)
1892             processMediaSelectionOptions();
1893     }
1894 #endif
1895
1896 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1897     if (!hasCaptions && haveCCTrack)
1898         processLegacyClosedCaptionsTracks();
1899 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1900     if (haveCCTrack)
1901         processLegacyClosedCaptionsTracks();
1902 #endif
1903
1904     setHasClosedCaptions(hasCaptions);
1905
1906     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1907         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1908
1909     sizeChanged();
1910
1911     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1912         characteristicsChanged();
1913
1914     setDelayCharacteristicsChangedNotification(false);
1915 }
1916
1917 #if ENABLE(VIDEO_TRACK)
1918 template <typename RefT, typename PassRefT>
1919 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1920 {
1921     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1922         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1923     }]]]);
1924     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1925
1926     for (auto& oldItem : oldItems) {
1927         if (oldItem->playerItemTrack())
1928             [oldTracks addObject:oldItem->playerItemTrack()];
1929     }
1930
1931     // Find the added & removed AVPlayerItemTracks:
1932     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1933     [removedTracks minusSet:newTracks.get()];
1934
1935     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1936     [addedTracks minusSet:oldTracks.get()];
1937
1938     typedef Vector<RefT> ItemVector;
1939     ItemVector replacementItems;
1940     ItemVector addedItems;
1941     ItemVector removedItems;
1942     for (auto& oldItem : oldItems) {
1943         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1944             removedItems.append(oldItem);
1945         else
1946             replacementItems.append(oldItem);
1947     }
1948
1949     for (AVPlayerItemTrack* track in addedTracks.get())
1950         addedItems.append(itemFactory(track));
1951
1952     replacementItems.appendVector(addedItems);
1953     oldItems.swap(replacementItems);
1954
1955     for (auto& removedItem : removedItems)
1956         (player->*removedFunction)(removedItem);
1957
1958     for (auto& addedItem : addedItems)
1959         (player->*addedFunction)(addedItem);
1960 }
1961
1962 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1963 template <typename RefT, typename PassRefT>
1964 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1965 {
1966     group->updateOptions();
1967
1968     // Only add selection options which do not have an associated persistant track.
1969     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1970     for (auto& option : group->options()) {
1971         if (!option)
1972             continue;
1973         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1974         if (!avOption)
1975             continue;
1976         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1977             newSelectionOptions.add(option);
1978     }
1979
1980     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1981     for (auto& oldItem : oldItems) {
1982         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1983             oldSelectionOptions.add(option);
1984     }
1985
1986     // Find the added & removed AVMediaSelectionOptions:
1987     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1988     for (auto& oldOption : oldSelectionOptions) {
1989         if (!newSelectionOptions.contains(oldOption))
1990             removedSelectionOptions.add(oldOption);
1991     }
1992
1993     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1994     for (auto& newOption : newSelectionOptions) {
1995         if (!oldSelectionOptions.contains(newOption))
1996             addedSelectionOptions.add(newOption);
1997     }
1998
1999     typedef Vector<RefT> ItemVector;
2000     ItemVector replacementItems;
2001     ItemVector addedItems;
2002     ItemVector removedItems;
2003     for (auto& oldItem : oldItems) {
2004         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2005             removedItems.append(oldItem);
2006         else
2007             replacementItems.append(oldItem);
2008     }
2009
2010     for (auto& option : addedSelectionOptions)
2011         addedItems.append(itemFactory(*option.get()));
2012
2013     replacementItems.appendVector(addedItems);
2014     oldItems.swap(replacementItems);
2015     
2016     for (auto& removedItem : removedItems)
2017         (player->*removedFunction)(removedItem);
2018     
2019     for (auto& addedItem : addedItems)
2020         (player->*addedFunction)(addedItem);
2021 }
2022 #endif
2023
2024 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2025 {
2026 #if !LOG_DISABLED
2027     size_t count = m_audioTracks.size();
2028 #endif
2029
2030     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2031
2032 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2033     if (!m_audibleGroup) {
2034         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2035             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2036     }
2037
2038     if (m_audibleGroup)
2039         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2040 #endif
2041
2042     for (auto& track : m_audioTracks)
2043         track->resetPropertiesFromTrack();
2044
2045 #if !LOG_DISABLED
2046     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2047 #endif
2048 }
2049
2050 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2051 {
2052 #if !LOG_DISABLED
2053     size_t count = m_videoTracks.size();
2054 #endif
2055
2056     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2057
2058 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2059     if (!m_visualGroup) {
2060         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2061             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2062     }
2063
2064     if (m_visualGroup)
2065         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2066 #endif
2067
2068     for (auto& track : m_audioTracks)
2069         track->resetPropertiesFromTrack();
2070
2071 #if !LOG_DISABLED
2072     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2073 #endif
2074 }
2075
2076 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2077 {
2078 #if PLATFORM(IOS)
2079     if (m_videoFullscreenLayer)
2080         return true;
2081 #endif
2082     return false;
2083 }
2084
2085 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2086 {
2087 #if PLATFORM(IOS)
2088     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2089         return;
2090     
2091     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2092     [m_textTrackRepresentationLayer setFrame:textFrame];
2093 #endif
2094 }
2095
2096 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2097 {
2098 #if PLATFORM(IOS)
2099     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2100     if (representationLayer == m_textTrackRepresentationLayer) {
2101         syncTextTrackBounds();
2102         return;
2103     }
2104
2105     if (m_textTrackRepresentationLayer)
2106         [m_textTrackRepresentationLayer removeFromSuperlayer];
2107
2108     m_textTrackRepresentationLayer = representationLayer;
2109
2110     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2111         syncTextTrackBounds();
2112         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2113     }
2114
2115 #else
2116     UNUSED_PARAM(representation);
2117 #endif
2118 }
2119 #endif // ENABLE(VIDEO_TRACK)
2120
2121 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2122 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2123 {
2124     if (!m_provider)
2125         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2126     return m_provider.get();
2127 }
2128 #endif
2129
2130 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2131 {
2132     if (!m_avAsset)
2133         return;
2134
2135     setNaturalSize(roundedIntSize(m_cachedPresentationSize));
2136 }
2137     
2138 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2139 {
2140     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2141         return false;
2142     
2143     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2144     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2145     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2146 }
2147
2148 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2149 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2150 {
2151     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2152
2153     if (!m_avPlayerItem || m_videoOutput)
2154         return;
2155
2156 #if USE(VIDEOTOOLBOX)
2157     NSDictionary* attributes = nil;
2158 #else
2159     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2160                                 nil];
2161 #endif
2162     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2163     ASSERT(m_videoOutput);
2164
2165     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2166
2167     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2168
2169     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2170 }
2171
2172 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2173 {
2174     if (!m_videoOutput)
2175         return;
2176
2177     if (m_avPlayerItem)
2178         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2179     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2180
2181     m_videoOutput = 0;
2182 }
2183
2184 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2185 {
2186     if (!m_videoOutput)
2187         createVideoOutput();
2188     ASSERT(m_videoOutput);
2189
2190 #if !LOG_DISABLED
2191     double start = monotonicallyIncreasingTime();
2192 #endif
2193
2194     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2195
2196     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2197         return 0;
2198
2199     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2200     if (!buffer)
2201         return 0;
2202
2203 #if USE(VIDEOTOOLBOX)
2204     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2205     if (!m_pixelTransferSession) {
2206         VTPixelTransferSessionRef session = 0;
2207         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2208         m_pixelTransferSession = adoptCF(session);
2209     }
2210
2211     CVPixelBufferRef outputBuffer;
2212     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2213     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2214     buffer = adoptCF(outputBuffer);
2215 #endif
2216
2217 #if !LOG_DISABLED
2218     double duration = monotonicallyIncreasingTime() - start;
2219     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2220 #endif
2221
2222     return buffer;
2223 }
2224
2225 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2226 {
2227     if (!m_avPlayerItem)
2228         return false;
2229
2230     if (m_lastImage)
2231         return true;
2232
2233     if (!m_videoOutput)
2234         createVideoOutput();
2235
2236     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2237 }
2238
2239 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2240 {
2241     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2242     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2243     return CVPixelBufferGetBaseAddress(pixelBuffer);
2244 }
2245
2246 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2247 {
2248     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2249     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2250 }
2251
2252 static void CVPixelBufferReleaseInfoCallback(void* info)
2253 {
2254     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2255     CFRelease(pixelBuffer);
2256 }
2257
2258 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2259 {
2260     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2261     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2262
2263     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2264     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2265     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2266     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2267     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2268
2269     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2270     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2271     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2272
2273     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2274 }
2275
2276 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2277 {
2278     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2279
2280     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2281     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2282     // should be displayed.
2283     if (pixelBuffer)
2284         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2285 }
2286
2287 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
2288 {
2289     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2290         waitForVideoOutputMediaDataWillChange();
2291
2292     updateLastImage();
2293
2294     if (!m_lastImage)
2295         return;
2296
2297     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2298     if (!firstEnabledVideoTrack)
2299         return;
2300
2301     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2302
2303     GraphicsContextStateSaver stateSaver(*context);
2304     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2305     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2306     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2307
2308     context->concatCTM(videoTransform);
2309     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2310
2311     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2312     // video frame, destroy it now that it is no longer needed.
2313     if (m_imageGenerator)
2314         destroyImageGenerator();
2315
2316 }
2317
2318 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2319 {
2320     updateLastImage();
2321     return m_lastImage.get();
2322 }
2323
2324 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2325 {
2326     if (!m_videoOutputSemaphore)
2327         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2328
2329     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2330
2331     // Wait for 1 second.
2332     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2333
2334     if (result)
2335         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2336 }
2337
2338 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2339 {
2340     dispatch_semaphore_signal(m_videoOutputSemaphore);
2341 }
2342 #endif
2343
2344 #if ENABLE(ENCRYPTED_MEDIA)
2345 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2346 {
2347     if (!keySystemIsSupported(keySystem))
2348         return MediaPlayer::KeySystemNotSupported;
2349
2350     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2351     String keyURI;
2352     String keyID;
2353     RefPtr<Uint8Array> certificate;
2354     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2355         return MediaPlayer::InvalidPlayerState;
2356
2357     if (!m_keyURIToRequestMap.contains(keyURI))
2358         return MediaPlayer::InvalidPlayerState;
2359
2360     String sessionID = createCanonicalUUIDString();
2361
2362     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2363
2364     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2365     NSString* assetStr = keyID;
2366     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2367     NSError* error = 0;
2368     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2369
2370     if (!keyRequest) {
2371         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2372         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2373         return MediaPlayer::NoError;
2374     }
2375
2376     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2377     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2378     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2379
2380     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2381     m_sessionIDToRequestMap.set(sessionID, avRequest);
2382     m_keyURIToRequestMap.remove(keyURI);
2383
2384     return MediaPlayer::NoError;
2385 }
2386
2387 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2388 {
2389     if (!keySystemIsSupported(keySystem))
2390         return MediaPlayer::KeySystemNotSupported;
2391
2392     if (!m_sessionIDToRequestMap.contains(sessionID))
2393         return MediaPlayer::InvalidPlayerState;
2394
2395     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2396     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2397     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2398     [avRequest.get() finishLoading];
2399     m_sessionIDToRequestMap.remove(sessionID);
2400
2401     player()->keyAdded(keySystem, sessionID);
2402
2403     UNUSED_PARAM(initDataPtr);
2404     UNUSED_PARAM(initDataLength);
2405     return MediaPlayer::NoError;
2406 }
2407
2408 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2409 {
2410     if (!keySystemIsSupported(keySystem))
2411         return MediaPlayer::KeySystemNotSupported;
2412
2413     if (!m_sessionIDToRequestMap.contains(sessionID))
2414         return MediaPlayer::InvalidPlayerState;
2415
2416     m_sessionIDToRequestMap.remove(sessionID);
2417     return MediaPlayer::NoError;
2418 }
2419 #endif
2420
2421 #if ENABLE(ENCRYPTED_MEDIA_V2)
2422 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2423 {
2424     return m_keyURIToRequestMap.take(keyURI);
2425 }
2426
2427 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2428 {
2429     Vector<String> fulfilledKeyIds;
2430
2431     for (auto& pair : m_keyURIToRequestMap) {
2432         const String& keyId = pair.key;
2433         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2434
2435         auto keyData = player()->cachedKeyForKeyId(keyId);
2436         if (!keyData)
2437             continue;
2438
2439         fulfillRequestWithKeyData(request.get(), keyData.get());
2440         fulfilledKeyIds.append(keyId);
2441     }
2442
2443     for (auto& keyId : fulfilledKeyIds)
2444         m_keyURIToRequestMap.remove(keyId);
2445 }
2446
2447 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2448 {
2449     if (!keySystemIsSupported(keySystem))
2450         return nullptr;
2451
2452     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2453 }
2454 #endif
2455
2456 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2457 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2458 {
2459 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2460     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2461 #endif
2462
2463     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2464     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2465
2466         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2467         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2468             continue;
2469
2470         bool newCCTrack = true;
2471         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2472             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2473                 continue;
2474
2475             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2476             if (track->avPlayerItemTrack() == playerItemTrack) {
2477                 removedTextTracks.remove(i - 1);
2478                 newCCTrack = false;
2479                 break;
2480             }
2481         }
2482
2483         if (!newCCTrack)
2484             continue;
2485         
2486         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2487     }
2488
2489     processNewAndRemovedTextTracks(removedTextTracks);
2490 }
2491 #endif
2492
2493 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2494 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2495 {
2496     if (!m_avAsset)
2497         return false;
2498
2499     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2500         return false;
2501
2502     return true;
2503 }
2504
2505 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2506 {
2507     if (!hasLoadedMediaSelectionGroups())
2508         return nil;
2509
2510     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2511 }
2512
2513 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2514 {
2515     if (!hasLoadedMediaSelectionGroups())
2516         return nil;
2517
2518     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2519 }
2520
2521 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2522 {
2523     if (!hasLoadedMediaSelectionGroups())
2524         return nil;
2525
2526     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2527 }
2528
2529 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2530 {
2531     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2532     if (!legibleGroup) {
2533         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2534         return;
2535     }
2536
2537     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2538     // but set the selected legible track to nil so text tracks will not be automatically configured.
2539     if (!m_textTracks.size())
2540         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2541
2542     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2543     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2544     for (AVMediaSelectionOptionType *option in legibleOptions) {
2545         bool newTrack = true;
2546         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2547             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2548                 continue;
2549             
2550             RetainPtr<AVMediaSelectionOptionType> currentOption;
2551 #if ENABLE(AVF_CAPTIONS)
2552             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2553                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2554                 currentOption = track->mediaSelectionOption();
2555             } else
2556 #endif
2557             {
2558                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2559                 currentOption = track->mediaSelectionOption();
2560             }
2561             
2562             if ([currentOption.get() isEqual:option]) {
2563                 removedTextTracks.remove(i - 1);
2564                 newTrack = false;
2565                 break;
2566             }
2567         }
2568         if (!newTrack)
2569             continue;
2570
2571 #if ENABLE(AVF_CAPTIONS)
2572         if ([option outOfBandSource]) {
2573             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2574             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2575             continue;
2576         }
2577 #endif
2578
2579         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2580     }
2581
2582     processNewAndRemovedTextTracks(removedTextTracks);
2583 }
2584
2585 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2586 {
2587     if (m_metadataTrack)
2588         return;
2589
2590     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2591     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2592     player()->addTextTrack(m_metadataTrack);
2593 }
2594
2595 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2596 {
2597     if (!m_currentTextTrack)
2598         return;
2599
2600     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2601 }
2602
2603 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2604 {
2605     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2606
2607     if (!m_currentTextTrack)
2608         return;
2609     
2610     m_currentTextTrack->resetCueValues();
2611 }
2612 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2613
2614 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2615 {
2616     if (m_currentTextTrack == track)
2617         return;
2618
2619     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2620         
2621     m_currentTextTrack = track;
2622
2623     if (track) {
2624         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2625             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2626 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2627 #if ENABLE(AVF_CAPTIONS)
2628         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2629             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2630 #endif
2631         else
2632             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2633 #endif
2634     } else {
2635 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2636         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2637 #endif
2638         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2639     }
2640
2641 }
2642
2643 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2644 {
2645     if (!m_languageOfPrimaryAudioTrack.isNull())
2646         return m_languageOfPrimaryAudioTrack;
2647
2648     if (!m_avPlayerItem.get())
2649         return emptyString();
2650
2651 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2652     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2653     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2654     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2655     if (currentlySelectedAudibleOption) {
2656         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2657         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2658
2659         return m_languageOfPrimaryAudioTrack;
2660     }
2661 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2662
2663     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2664     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2665     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2666     if (!tracks || [tracks count] != 1) {
2667         m_languageOfPrimaryAudioTrack = emptyString();
2668         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2669         return m_languageOfPrimaryAudioTrack;
2670     }
2671
2672     AVAssetTrack *track = [tracks objectAtIndex:0];
2673     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2674
2675 #if !LOG_DISABLED
2676     if (m_languageOfPrimaryAudioTrack == emptyString())
2677         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2678     else
2679         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2680 #endif
2681
2682     return m_languageOfPrimaryAudioTrack;
2683 }
2684
2685 #if ENABLE(IOS_AIRPLAY) && PLATFORM(IOS)
2686 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2687 {
2688     if (!m_avPlayer)
2689         return false;
2690
2691     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2692     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2693     return wirelessTarget;
2694 }
2695
2696 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2697 {
2698     if (!m_avPlayer)
2699         return MediaPlayer::TargetTypeNone;
2700
2701     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2702     case wkExternalPlaybackTypeNone:
2703         return MediaPlayer::TargetTypeNone;
2704     case wkExternalPlaybackTypeAirPlay:
2705         return MediaPlayer::TargetTypeAirPlay;
2706     case wkExternalPlaybackTypeTVOut:
2707         return MediaPlayer::TargetTypeTVOut;
2708     }
2709
2710     ASSERT_NOT_REACHED();
2711     return MediaPlayer::TargetTypeNone;
2712 }
2713
2714 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2715 {
2716     if (!m_avPlayer)
2717         return emptyString();
2718     
2719     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2720     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2721
2722     return wirelessTargetName;
2723 }
2724
2725 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2726 {
2727     if (!m_avPlayer)
2728         return !m_allowsWirelessVideoPlayback;
2729     
2730     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2731     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2732
2733     return !m_allowsWirelessVideoPlayback;
2734 }
2735
2736 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2737 {
2738     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2739     m_allowsWirelessVideoPlayback = !disabled;
2740     if (!m_avPlayer)
2741         return;
2742     
2743     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2744 }
2745
2746 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2747 {
2748     if (!m_avPlayer)
2749         return;
2750
2751     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2752 }
2753 #endif
2754
2755 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2756 {
2757     m_cachedItemStatus = status;
2758
2759     updateStates();
2760 }
2761
2762 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2763 {
2764     m_pendingStatusChanges++;
2765 }
2766
2767 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2768 {
2769     m_cachedLikelyToKeepUp = likelyToKeepUp;
2770
2771     ASSERT(m_pendingStatusChanges);
2772     if (!--m_pendingStatusChanges)
2773         updateStates();
2774 }
2775
2776 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2777 {
2778     m_pendingStatusChanges++;
2779 }
2780
2781 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2782 {
2783     m_cachedBufferEmpty = bufferEmpty;
2784
2785     ASSERT(m_pendingStatusChanges);
2786     if (!--m_pendingStatusChanges)
2787         updateStates();
2788 }
2789
2790 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2791 {
2792     m_pendingStatusChanges++;
2793 }
2794
2795 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2796 {
2797     m_cachedBufferFull = bufferFull;
2798
2799     ASSERT(m_pendingStatusChanges);
2800     if (!--m_pendingStatusChanges)
2801         updateStates();
2802 }
2803
2804 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2805 {
2806     m_cachedSeekableRanges = seekableRanges;
2807
2808     seekableTimeRangesChanged();
2809     updateStates();
2810 }
2811
2812 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2813 {
2814     m_cachedLoadedRanges = loadedRanges;
2815
2816     loadedTimeRangesChanged();
2817     updateStates();
2818 }
2819
2820 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2821 {
2822     m_cachedIsReadyForDisplay = isReady;
2823     if (!hasVideo() && isReady)
2824         tracksChanged();
2825     updateStates();
2826 }
2827
2828 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2829 {
2830     tracksChanged();
2831     updateStates();
2832 }
2833
2834 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2835 {
2836     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2837     if (m_shouldBufferData == shouldBuffer)
2838         return;
2839
2840     m_shouldBufferData = shouldBuffer;
2841     
2842     if (!m_avPlayer)
2843         return;
2844
2845     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2846 }
2847
2848 #if ENABLE(DATACUE_VALUE)
2849 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2850 {
2851     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2852     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2853     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2854     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2855     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2856
2857     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2858         return quickTimeUserData;
2859     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2860         return isoUserData;
2861     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2862         return quickTimeMetadata;
2863     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2864         return iTunesMetadata;
2865     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2866         return id3Metadata;
2867
2868     return emptyAtom;
2869 }
2870 #endif
2871
2872 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2873 {
2874     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2875
2876     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2877
2878 #if ENABLE(DATACUE_VALUE)
2879     if (seeking())
2880         return;
2881
2882     if (!m_metadataTrack)
2883         processMetadataTrack();
2884
2885     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2886         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2887         return;
2888     }
2889
2890     // Set the duration of all incomplete cues before adding new ones.
2891     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2892     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2893         MediaTime start = toMediaTime(item.time);
2894         if (start < earliestStartTime)
2895             earliestStartTime = start;
2896     }
2897     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2898
2899     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2900         MediaTime start = toMediaTime(item.time);
2901         MediaTime end = MediaTime::positiveInfiniteTime();
2902         if (CMTIME_IS_VALID(item.duration))
2903             end = start + toMediaTime(item.duration);
2904
2905         AtomicString type = nullAtom;
2906         if (item.keySpace)
2907             type = metadataType(item.keySpace);
2908
2909         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2910     }
2911 #endif
2912 }
2913
2914 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2915 {
2916     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2917         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2918
2919     NSArray *assetTracks = [m_avAsset tracks];
2920
2921     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2922         AVAssetTrack* assetTrack = [obj assetTrack];
2923
2924         if ([assetTracks containsObject:assetTrack])
2925             return YES;
2926
2927         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2928         if (!hasLoadedMediaSelectionGroups())
2929             return NO;
2930
2931         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2932             return NO;
2933
2934         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2935             return NO;
2936
2937         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2938             return NO;
2939
2940         return YES;
2941     }]];
2942
2943     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2944         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2945
2946     m_cachedTotalBytes = 0;
2947
2948     tracksChanged();
2949     updateStates();
2950 }
2951
2952 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2953 {
2954     m_cachedHasEnabledAudio = hasEnabledAudio;
2955
2956     tracksChanged();
2957     updateStates();
2958 }
2959
2960 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2961 {
2962     m_cachedPresentationSize = size;
2963
2964     sizeChanged();
2965     updateStates();
2966 }
2967
2968 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
2969 {
2970     m_cachedDuration = duration;
2971
2972     invalidateCachedDuration();
2973 }
2974
2975 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2976 {
2977     m_cachedRate = rate;
2978
2979     updateStates();
2980     rateChanged();
2981 }
2982     
2983 #if ENABLE(IOS_AIRPLAY)
2984 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2985 {
2986     playbackTargetIsWirelessChanged();
2987 }
2988 #endif
2989
2990 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
2991 {
2992     m_cachedCanPlayFastForward = newValue;
2993 }
2994
2995 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
2996 {
2997     m_cachedCanPlayFastReverse = newValue;
2998 }
2999
3000 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3001 {
3002     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3003         return MediaPlayerPrivateAVFoundation::resolvedURL();
3004
3005     return URL([m_avAsset resolvedURL]);
3006 }
3007
3008 NSArray* assetMetadataKeyNames()
3009 {
3010     static NSArray* keys;
3011     if (!keys) {
3012         keys = [[NSArray alloc] initWithObjects:@"duration",
3013                     @"naturalSize",
3014                     @"preferredTransform",
3015                     @"preferredVolume",
3016                     @"preferredRate",
3017                     @"playable",
3018                     @"resolvedURL",
3019                     @"tracks",
3020                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3021                    nil];
3022     }
3023     return keys;
3024 }
3025
3026 NSArray* itemKVOProperties()
3027 {
3028     static NSArray* keys;
3029     if (!keys) {
3030         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3031                 @"status",
3032                 @"asset",
3033                 @"tracks",
3034                 @"seekableTimeRanges",
3035                 @"loadedTimeRanges",
3036                 @"playbackLikelyToKeepUp",
3037                 @"playbackBufferFull",
3038                 @"playbackBufferEmpty",
3039                 @"duration",
3040                 @"hasEnabledAudio",
3041                 @"timedMetadata",
3042                 @"canPlayFastForward",
3043                 @"canPlayFastReverse",
3044                 nil];
3045     }
3046     return keys;
3047 }
3048
3049 NSArray* assetTrackMetadataKeyNames()
3050 {
3051     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3052     return keys;
3053 }
3054
3055 } // namespace WebCore
3056
3057 @implementation WebCoreAVFMovieObserver
3058
3059 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3060 {
3061     self = [super init];
3062     if (!self)
3063         return nil;
3064     m_callback = callback;
3065     return self;
3066 }
3067
3068 - (void)disconnect
3069 {
3070     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3071     m_callback = 0;
3072 }
3073
3074 - (void)metadataLoaded
3075 {
3076     if (!m_callback)
3077         return;
3078
3079     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3080 }
3081
3082 - (void)didEnd:(NSNotification *)unusedNotification
3083 {
3084     UNUSED_PARAM(unusedNotification);
3085     if (!m_callback)
3086         return;
3087     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3088 }
3089
3090 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3091 {
3092     UNUSED_PARAM(object);
3093     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3094
3095     if (!m_callback)
3096         return;
3097
3098     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3099
3100 #if !LOG_DISABLED
3101     if (willChange)
3102         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3103     else {
3104         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3105         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3106     }
3107 #endif
3108
3109     WTF::Function<void ()> function;
3110
3111     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3112         if ([keyPath isEqualToString:@"readyForDisplay"])
3113             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3114     }
3115
3116     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3117         if ([keyPath isEqualToString:@"enabled"])
3118             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3119     }
3120
3121     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3122         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3123             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3124         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3125             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3126         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3127             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3128     }
3129
3130     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3131         // A value changed for an AVPlayerItem
3132         if ([keyPath isEqualToString:@"status"])
3133             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3134         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3135             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3136         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3137             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3138         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3139             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3140         else if ([keyPath isEqualToString:@"asset"])
3141             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
3142         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3143             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3144         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3145             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3146         else if ([keyPath isEqualToString:@"tracks"])
3147             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3148         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3149             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3150         else if ([keyPath isEqualToString:@"presentationSize"])
3151             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3152         else if ([keyPath isEqualToString:@"duration"])
3153             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3154         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3155             MediaTime now;
3156             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3157             if (CMTIME_IS_NUMERIC(itemTime))
3158                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3159             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3160         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3161             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3162         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3163             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3164     }
3165
3166     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3167         // A value changed for an AVPlayer.
3168         if ([keyPath isEqualToString:@"rate"])
3169             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3170 #if ENABLE(IOS_AIRPLAY)
3171         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
3172             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3173 #endif
3174     }
3175     
3176     if (function.isNull())
3177         return;
3178
3179     auto weakThis = m_callback->createWeakPtr();
3180     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3181         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3182         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3183         if (!weakThis)
3184             return;
3185         function();
3186     }));
3187 }
3188
3189 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3190 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3191 {
3192     UNUSED_PARAM(output);
3193     UNUSED_PARAM(nativeSamples);
3194
3195     if (!m_callback)
3196         return;
3197
3198     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3199     RetainPtr<NSArray> strongStrings = strings;
3200     RetainPtr<NSArray> strongSamples = nativeSamples;
3201     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3202         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3203         if (!callback)
3204             return;
3205         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3206     });
3207 }
3208
3209 - (void)outputSequenceWasFlushed:(id)output
3210 {
3211     UNUSED_PARAM(output);
3212
3213     if (!m_callback)
3214         return;
3215     
3216     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3217     callOnMainThread([strongSelf] {
3218         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3219             callback->flushCues();
3220     });
3221 }
3222 #endif
3223
3224 @end
3225
3226 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3227 @implementation WebCoreAVFLoaderDelegate
3228
3229 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3230 {
3231     self = [super init];
3232     if (!self)
3233         return nil;
3234     m_callback = callback;
3235     return self;
3236 }
3237
3238 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3239 {
3240     UNUSED_PARAM(resourceLoader);
3241     if (!m_callback)
3242         return NO;
3243
3244     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3245     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3246     callOnMainThread([strongSelf, strongRequest] {
3247         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3248         if (!callback) {
3249             [strongRequest finishLoadingWithError:nil];
3250             return;
3251         }
3252
3253         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3254             [strongRequest finishLoadingWithError:nil];
3255     });
3256
3257     return YES;
3258 }
3259
3260 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3261 {
3262     UNUSED_PARAM(resourceLoader);
3263     if (!m_callback)
3264         return NO;
3265
3266     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3267         return NO;
3268
3269     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3270     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3271     callOnMainThread([strongSelf, strongChallenge] {
3272         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3273         if (!callback) {
3274             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3275             return;
3276         }
3277
3278         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3279             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3280     });
3281
3282     return YES;
3283 }
3284
3285 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3286 {
3287     UNUSED_PARAM(resourceLoader);
3288     if (!m_callback)
3289         return;
3290
3291     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3292     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3293     callOnMainThread([strongSelf, strongRequest] {
3294         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3295         if (callback)
3296             callback->didCancelLoadingRequest(strongRequest.get());
3297     });
3298 }
3299
3300 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3301 {
3302     m_callback = callback;
3303 }
3304 @end
3305 #endif
3306
3307 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3308 @implementation WebCoreAVFPullDelegate
3309 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3310 {
3311     self = [super init];
3312     if (self)
3313         m_callback = callback;
3314     return self;
3315 }
3316
3317 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3318 {
3319     m_callback = callback;
3320 }
3321
3322 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3323 {
3324     if (m_callback)
3325         m_callback->outputMediaDataWillChange(output);
3326 }
3327
3328 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3329 {
3330     UNUSED_PARAM(output);
3331     // No-op.
3332 }
3333 @end
3334 #endif
3335
3336 #endif