[Mac][WebAudio] Update the AVAudioMix in the AudioSourceProviderAVFObjC when the...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "FloatConversion.h"
41 #import "FloatConversion.h"
42 #import "GraphicsContext.h"
43 #import "GraphicsContextCG.h"
44 #import "InbandMetadataTextTrackPrivateAVF.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
47 #import "OutOfBandTextTrackPrivateAVF.h"
48 #import "URL.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTarget.h"
51 #import "MediaSelectionGroupAVFObjC.h"
52 #import "MediaTimeAVFoundation.h"
53 #import "PlatformTimeRanges.h"
54 #import "QuartzCoreSPI.h"
55 #import "SecurityOrigin.h"
56 #import "SerializedPlatformRepresentationMac.h"
57 #import "TextEncoding.h"
58 #import "TextTrackRepresentation.h"
59 #import "UUID.h"
60 #import "VideoTrackPrivateAVFObjC.h"
61 #import "WebCoreAVFResourceLoader.h"
62 #import "WebCoreCALayerExtras.h"
63 #import "WebCoreSystemInterface.h"
64 #import <objc/runtime.h>
65 #import <runtime/DataView.h>
66 #import <runtime/JSCInlines.h>
67 #import <runtime/TypedArrayInlines.h>
68 #import <runtime/Uint16Array.h>
69 #import <runtime/Uint32Array.h>
70 #import <runtime/Uint8Array.h>
71 #import <wtf/CurrentTime.h>
72 #import <wtf/Functional.h>
73 #import <wtf/ListHashSet.h>
74 #import <wtf/NeverDestroyed.h>
75 #import <wtf/text/CString.h>
76 #import <wtf/text/StringBuilder.h>
77
78 #if ENABLE(AVF_CAPTIONS)
79 #include "TextTrack.h"
80 #endif
81
82 #import <AVFoundation/AVFoundation.h>
83 #if PLATFORM(IOS)
84 #import "WAKAppKitStubs.h"
85 #import <CoreImage/CoreImage.h>
86 #import <mach/mach_port.h>
87 #else
88 #import <Foundation/NSGeometry.h>
89 #import <QuartzCore/CoreImage.h>
90 #endif
91
92 #if USE(VIDEOTOOLBOX)
93 #import <CoreVideo/CoreVideo.h>
94 #import <VideoToolbox/VideoToolbox.h>
95 #endif
96
97 #if USE(CFNETWORK)
98 #include "CFNSURLConnectionSPI.h"
99 #endif
100
101 namespace std {
102 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
103     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
104 };
105 }
106
107 @interface WebVideoContainerLayer : CALayer
108 @end
109
110 @implementation WebVideoContainerLayer
111
112 - (void)setBounds:(CGRect)bounds
113 {
114     [super setBounds:bounds];
115     for (CALayer* layer in self.sublayers)
116         layer.frame = bounds;
117 }
118 @end
119
120 #if ENABLE(AVF_CAPTIONS)
121 // Note: This must be defined before our SOFT_LINK macros:
122 @class AVMediaSelectionOption;
123 @interface AVMediaSelectionOption (OutOfBandExtensions)
124 @property (nonatomic, readonly) NSString* outOfBandSource;
125 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
126 @end
127 #endif
128
129 #if PLATFORM(IOS)
130 @class AVPlayerItem;
131 @interface AVPlayerItem (WebKitExtensions)
132 @property (nonatomic, copy) NSString* dataYouTubeID;
133 @end
134 #endif
135
136 @interface AVURLAsset (WebKitExtensions)
137 @property (nonatomic, readonly) NSURL *resolvedURL;
138 @end
139
140 typedef AVPlayer AVPlayerType;
141 typedef AVPlayerItem AVPlayerItemType;
142 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
143 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
144 typedef AVMetadataItem AVMetadataItemType;
145 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
146 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
147
148 #pragma mark - Soft Linking
149
150 // Soft-linking headers must be included last since they #define functions, constants, etc.
151 #import "CoreMediaSoftLink.h"
152
153 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
154 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
155 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
156
157 #if USE(VIDEOTOOLBOX)
158 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
159 #endif
160
161 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
162 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
163 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
164 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
165 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
166 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
167 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
168 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
169
170 #if USE(VIDEOTOOLBOX)
171 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
172 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
173 #endif
174
175 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
176 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
177 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
178 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
179 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
180 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
181 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
182
183 SOFT_LINK_CLASS(CoreImage, CIContext)
184 SOFT_LINK_CLASS(CoreImage, CIImage)
185
186 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
187 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
188 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
200 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
201 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
202
203 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
204
205 #define AVPlayer getAVPlayerClass()
206 #define AVPlayerItem getAVPlayerItemClass()
207 #define AVPlayerLayer getAVPlayerLayerClass()
208 #define AVURLAsset getAVURLAssetClass()
209 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
210 #define AVMetadataItem getAVMetadataItemClass()
211
212 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
213 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
214 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
215 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
216 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
217 #define AVMediaTypeVideo getAVMediaTypeVideo()
218 #define AVMediaTypeAudio getAVMediaTypeAudio()
219 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
220 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
221 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
222 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
223 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
224 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
225 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
226 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
227 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
228 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
229
230 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
231 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
232 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
233
234 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
235 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
236 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
237
238 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
239 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
240 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
241 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
242
243 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
244 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
245 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
246 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
247 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
248 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
249 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
250 #endif
251
252 #if ENABLE(AVF_CAPTIONS)
253 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
262 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
263
264 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
265 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
266 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
267 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
268 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
269 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
270 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
271 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
272 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
273 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
274 #endif
275
276 #if ENABLE(DATACUE_VALUE)
277 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
278 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
280 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
281 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
282
283 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
284 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
285 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
286 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
287 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
288 #endif
289
290 #if PLATFORM(IOS)
291 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
292 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
293 #endif
294
295 using namespace WebCore;
296
297 enum MediaPlayerAVFoundationObservationContext {
298     MediaPlayerAVFoundationObservationContextPlayerItem,
299     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
300     MediaPlayerAVFoundationObservationContextPlayer,
301     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
302 };
303
304 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
305 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
306 #else
307 @interface WebCoreAVFMovieObserver : NSObject
308 #endif
309 {
310     MediaPlayerPrivateAVFoundationObjC* m_callback;
311     int m_delayCallbacks;
312 }
313 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
314 -(void)disconnect;
315 -(void)metadataLoaded;
316 -(void)didEnd:(NSNotification *)notification;
317 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
318 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
319 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
320 - (void)outputSequenceWasFlushed:(id)output;
321 #endif
322 @end
323
324 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
325 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
326     MediaPlayerPrivateAVFoundationObjC* m_callback;
327 }
328 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
329 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
330 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 @end
332 #endif
333
334 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
335 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
336     MediaPlayerPrivateAVFoundationObjC *m_callback;
337     dispatch_semaphore_t m_semaphore;
338 }
339 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
340 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
341 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
342 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
343 @end
344 #endif
345
346 namespace WebCore {
347
348 static NSArray *assetMetadataKeyNames();
349 static NSArray *itemKVOProperties();
350 static NSArray *assetTrackMetadataKeyNames();
351 static NSArray *playerKVOProperties();
352 static AVAssetTrack* firstEnabledTrack(NSArray* tracks);
353
354 #if !LOG_DISABLED
355 static const char *boolString(bool val)
356 {
357     return val ? "true" : "false";
358 }
359 #endif
360
361 #if ENABLE(ENCRYPTED_MEDIA_V2)
362 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
363 static PlayerToPrivateMapType& playerToPrivateMap()
364 {
365     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
366     return map;
367 };
368 #endif
369
370 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
371 static dispatch_queue_t globalLoaderDelegateQueue()
372 {
373     static dispatch_queue_t globalQueue;
374     static dispatch_once_t onceToken;
375     dispatch_once(&onceToken, ^{
376         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
377     });
378     return globalQueue;
379 }
380 #endif
381
382 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
383 static dispatch_queue_t globalPullDelegateQueue()
384 {
385     static dispatch_queue_t globalQueue;
386     static dispatch_once_t onceToken;
387     dispatch_once(&onceToken, ^{
388         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
389     });
390     return globalQueue;
391 }
392 #endif
393
394 #if USE(CFNETWORK)
395 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
396 public:
397     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
398     {
399         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
400     }
401
402     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
403     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
404
405 private:
406     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
407         : m_challenge(challenge)
408     {
409         ASSERT(m_challenge);
410     }
411
412     virtual void refAuthenticationClient() override { ref(); }
413     virtual void derefAuthenticationClient() override { deref(); }
414
415     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential)
416     {
417         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
418     }
419
420     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&)
421     {
422         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
423     }
424
425     virtual void receivedCancellation(const AuthenticationChallenge&)
426     {
427         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
428     }
429
430     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&)
431     {
432         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
433             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
434     }
435
436     virtual void receivedChallengeRejection(const AuthenticationChallenge&)
437     {
438         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
439             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
440     }
441
442     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
443 };
444 #endif
445
446 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
447 {
448     if (isAvailable())
449         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
450             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
451 }
452
453 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
454     : MediaPlayerPrivateAVFoundation(player)
455     , m_weakPtrFactory(this)
456 #if PLATFORM(IOS)
457     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
458 #endif
459     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
460     , m_videoFrameHasDrawn(false)
461     , m_haveCheckedPlayability(false)
462 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
463     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
464     , m_videoOutputSemaphore(nullptr)
465 #endif
466 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
467     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
468 #endif
469     , m_currentTextTrack(0)
470     , m_cachedRate(0)
471     , m_cachedTotalBytes(0)
472     , m_pendingStatusChanges(0)
473     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
474     , m_cachedLikelyToKeepUp(false)
475     , m_cachedBufferEmpty(false)
476     , m_cachedBufferFull(false)
477     , m_cachedHasEnabledAudio(false)
478     , m_shouldBufferData(true)
479     , m_cachedIsReadyForDisplay(false)
480     , m_haveBeenAskedToCreateLayer(false)
481 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
482     , m_allowsWirelessVideoPlayback(true)
483 #endif
484 {
485 #if ENABLE(ENCRYPTED_MEDIA_V2)
486     playerToPrivateMap().set(player, this);
487 #endif
488 }
489
490 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
491 {
492 #if ENABLE(ENCRYPTED_MEDIA_V2)
493     playerToPrivateMap().remove(player());
494 #endif
495 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
496     [m_loaderDelegate.get() setCallback:0];
497     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
498
499     for (auto& pair : m_resourceLoaderMap)
500         pair.value->invalidate();
501 #endif
502 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
503     [m_videoOutputDelegate setCallback:0];
504     [m_videoOutput setDelegate:nil queue:0];
505     if (m_videoOutputSemaphore)
506         dispatch_release(m_videoOutputSemaphore);
507 #endif
508
509     if (m_videoLayer)
510         destroyVideoLayer();
511
512     cancelLoad();
513 }
514
515 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
516 {
517     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
518     tearDownVideoRendering();
519
520     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
521     [m_objcObserver.get() disconnect];
522
523     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
524     setIgnoreLoadStateChanges(true);
525     if (m_avAsset) {
526         [m_avAsset.get() cancelLoading];
527         m_avAsset = nil;
528     }
529
530     clearTextTracks();
531
532 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
533     if (m_legibleOutput) {
534         if (m_avPlayerItem)
535             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
536         m_legibleOutput = nil;
537     }
538 #endif
539
540     if (m_avPlayerItem) {
541         for (NSString *keyName in itemKVOProperties())
542             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
543         
544         m_avPlayerItem = nil;
545     }
546     if (m_avPlayer) {
547         if (m_timeObserver)
548             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
549         m_timeObserver = nil;
550
551         for (NSString *keyName in playerKVOProperties())
552             [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
553         m_avPlayer = nil;
554     }
555
556     // Reset cached properties
557     m_pendingStatusChanges = 0;
558     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
559     m_cachedSeekableRanges = nullptr;
560     m_cachedLoadedRanges = nullptr;
561     m_cachedHasEnabledAudio = false;
562     m_cachedPresentationSize = FloatSize();
563     m_cachedDuration = MediaTime::zeroTime();
564
565     for (AVPlayerItemTrack *track in m_cachedTracks.get())
566         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
567     m_cachedTracks = nullptr;
568
569 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
570     if (m_provider) {
571         m_provider->setPlayerItem(nullptr);
572         m_provider->setAudioTrack(nullptr);
573     }
574 #endif
575
576     setIgnoreLoadStateChanges(false);
577 }
578
579 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
580 {
581     return m_haveBeenAskedToCreateLayer;
582 }
583
584 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
585 {
586 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
587     if (m_videoOutput)
588         return true;
589 #endif
590     return m_imageGenerator;
591 }
592
593 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
594 {
595 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
596     createVideoOutput();
597 #else
598     createImageGenerator();
599 #endif
600 }
601
602 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
603 {
604     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
605
606     if (!m_avAsset || m_imageGenerator)
607         return;
608
609     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
610
611     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
612     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
613     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
614     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
615
616     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
617 }
618
619 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
620 {
621 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
622     destroyVideoOutput();
623 #endif
624     destroyImageGenerator();
625 }
626
627 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
628 {
629     if (!m_imageGenerator)
630         return;
631
632     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
633
634     m_imageGenerator = 0;
635 }
636
637 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
638 {
639     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
640         return;
641
642     auto weakThis = createWeakPtr();
643     callOnMainThread([this, weakThis] {
644         if (!weakThis)
645             return;
646
647         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
648             return;
649         m_haveBeenAskedToCreateLayer = true;
650
651         if (!m_videoLayer)
652             createAVPlayerLayer();
653
654 #if USE(VIDEOTOOLBOX)
655         if (!m_videoOutput)
656             createVideoOutput();
657 #endif
658
659         player()->client().mediaPlayerRenderingModeChanged(player());
660     });
661 }
662
663 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
664 {
665     if (!m_avPlayer)
666         return;
667
668     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
669     [m_videoLayer setPlayer:m_avPlayer.get()];
670     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
671 #ifndef NDEBUG
672     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
673 #endif
674     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
675     updateVideoLayerGravity();
676     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
677     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
678     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
679
680 #if PLATFORM(IOS)
681     [m_videoLayer web_disableAllActions];
682     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
683 #ifndef NDEBUG
684     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
685 #endif
686     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
687     if (m_videoFullscreenLayer) {
688         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
689         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
690     } else {
691         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
692         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
693     }
694 #else
695     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
696 #endif
697 }
698
699 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
700 {
701     if (!m_videoLayer)
702         return;
703
704     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
705
706     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
707     [m_videoLayer.get() setPlayer:nil];
708
709 #if PLATFORM(IOS)
710     if (m_videoFullscreenLayer)
711         [m_videoLayer removeFromSuperlayer];
712     m_videoInlineLayer = nil;
713 #endif
714
715     m_videoLayer = nil;
716 }
717
718 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
719 {
720     if (currentRenderingMode() == MediaRenderingToLayer)
721         return m_cachedIsReadyForDisplay;
722
723     return m_videoFrameHasDrawn;
724 }
725
726 #if ENABLE(AVF_CAPTIONS)
727 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
728 {
729     // FIXME: Match these to correct types:
730     if (kind == PlatformTextTrack::Caption)
731         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
732
733     if (kind == PlatformTextTrack::Subtitle)
734         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
735
736     if (kind == PlatformTextTrack::Description)
737         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
738
739     if (kind == PlatformTextTrack::Forced)
740         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
741
742     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
743 }
744     
745 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
746 {
747     trackModeChanged();
748 }
749     
750 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
751 {
752     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
753     
754     for (auto& textTrack : m_textTracks) {
755         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
756             continue;
757         
758         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
759         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
760         
761         for (auto& track : outOfBandTrackSources) {
762             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
763             
764             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
765                 continue;
766             
767             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
768             if (track->mode() == PlatformTextTrack::Hidden)
769                 mode = InbandTextTrackPrivate::Hidden;
770             else if (track->mode() == PlatformTextTrack::Disabled)
771                 mode = InbandTextTrackPrivate::Disabled;
772             else if (track->mode() == PlatformTextTrack::Showing)
773                 mode = InbandTextTrackPrivate::Showing;
774             
775             textTrack->setMode(mode);
776             break;
777         }
778     }
779 }
780 #endif
781
782
783 static NSURL *canonicalURL(const String& url)
784 {
785     NSURL *cocoaURL = URL(ParsedURLString, url);
786     if (url.isEmpty())
787         return cocoaURL;
788
789     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
790     if (!request)
791         return cocoaURL;
792
793     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
794     if (!canonicalRequest)
795         return cocoaURL;
796
797     return [canonicalRequest URL];
798 }
799
800 #if PLATFORM(IOS)
801 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
802 {
803     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
804     [properties setDictionary:@{
805         NSHTTPCookieName: cookie.name,
806         NSHTTPCookieValue: cookie.value,
807         NSHTTPCookieDomain: cookie.domain,
808         NSHTTPCookiePath: cookie.path,
809         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
810     }];
811     if (cookie.secure)
812         [properties setObject:@YES forKey:NSHTTPCookieSecure];
813     if (cookie.session)
814         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
815
816     return [NSHTTPCookie cookieWithProperties:properties.get()];
817 }
818 #endif
819
820 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
821 {
822     if (m_avAsset)
823         return;
824
825     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
826
827     setDelayCallbacks(true);
828
829     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
830
831     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
832
833     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
834
835     String referrer = player()->referrer();
836     if (!referrer.isEmpty())
837         [headerFields.get() setObject:referrer forKey:@"Referer"];
838
839     String userAgent = player()->userAgent();
840     if (!userAgent.isEmpty())
841         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
842
843     if ([headerFields.get() count])
844         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
845
846     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
847         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
848
849 #if PLATFORM(IOS)
850     // FIXME: rdar://problem/20354688
851     String identifier = player()->sourceApplicationIdentifier();
852     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
853         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
854 #endif
855
856 #if ENABLE(AVF_CAPTIONS)
857     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
858     if (!outOfBandTrackSources.isEmpty()) {
859         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
860         for (auto& trackSource : outOfBandTrackSources) {
861             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
862             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
863             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
864             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
865             [outOfBandTracks.get() addObject:@{
866                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
867                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
868                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
869                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
870                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
871                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
872             }];
873         }
874
875         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
876     }
877 #endif
878
879 #if PLATFORM(IOS)
880     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
881     if (!networkInterfaceName.isEmpty())
882         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
883 #endif
884
885 #if PLATFORM(IOS)
886     Vector<Cookie> cookies;
887     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
888         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
889         for (auto& cookie : cookies)
890             [nsCookies addObject:toNSHTTPCookie(cookie)];
891
892         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
893     }
894 #endif
895
896     NSURL *cocoaURL = canonicalURL(url);
897     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
898
899 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
900     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
901 #endif
902
903     m_haveCheckedPlayability = false;
904
905     setDelayCallbacks(false);
906 }
907
908 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
909 {
910     if (!m_avPlayer)
911         return;
912
913     if (pthread_main_np()) {
914         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
915         return;
916     }
917
918     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
919     RetainPtr<AVPlayerItemType> strongItem = item;
920     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
921         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
922     });
923 }
924
925 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
926 {
927     if (m_avPlayer)
928         return;
929
930     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
931
932     setDelayCallbacks(true);
933
934     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
935     for (NSString *keyName in playerKVOProperties())
936         [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
937
938 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
939     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
940 #endif
941
942 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
943     updateDisableExternalPlayback();
944     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
945
946 #if !PLATFORM(IOS)
947     if (m_outputContext)
948         m_avPlayer.get().outputContext = m_outputContext.get();
949 #endif
950 #endif
951
952     if (player()->client().mediaPlayerIsVideo())
953         createAVPlayerLayer();
954
955     if (m_avPlayerItem)
956         setAVPlayerItem(m_avPlayerItem.get());
957
958     setDelayCallbacks(false);
959 }
960
961 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
962 {
963     if (m_avPlayerItem)
964         return;
965
966     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
967
968     setDelayCallbacks(true);
969
970     // Create the player item so we can load media data. 
971     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
972
973     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
974
975     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
976     for (NSString *keyName in itemKVOProperties())
977         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
978
979     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
980
981     if (m_avPlayer)
982         setAVPlayerItem(m_avPlayerItem.get());
983
984 #if PLATFORM(IOS)
985     AtomicString value;
986     if (player()->doesHaveAttribute("data-youtube-id", &value))
987         [m_avPlayerItem.get() setDataYouTubeID: value];
988 #endif
989
990 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
991     const NSTimeInterval legibleOutputAdvanceInterval = 2;
992
993     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
994     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
995     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
996
997     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
998     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
999     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1000     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1001 #endif
1002
1003 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1004     if (m_provider) {
1005         m_provider->setPlayerItem(m_avPlayerItem.get());
1006         m_provider->setAudioTrack(firstEnabledTrack([m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1007     }
1008 #endif
1009
1010     setDelayCallbacks(false);
1011 }
1012
1013 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1014 {
1015     if (m_haveCheckedPlayability)
1016         return;
1017     m_haveCheckedPlayability = true;
1018
1019     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1020     auto weakThis = createWeakPtr();
1021
1022     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1023         callOnMainThread([weakThis] {
1024             if (weakThis)
1025                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1026         });
1027     }];
1028 }
1029
1030 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1031 {
1032     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1033
1034     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1035     dispatch_group_enter(metadataLoadingGroup);
1036     auto weakThis = createWeakPtr();
1037     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1038
1039         callOnMainThread([weakThis, metadataLoadingGroup] {
1040             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1041                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1042                     dispatch_group_enter(metadataLoadingGroup);
1043                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1044                         dispatch_group_leave(metadataLoadingGroup);
1045                     }];
1046                 }
1047             }
1048             dispatch_group_leave(metadataLoadingGroup);
1049         });
1050     }];
1051
1052     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1053         callOnMainThread([weakThis] {
1054             if (weakThis)
1055                 [weakThis->m_objcObserver.get() metadataLoaded];
1056         });
1057
1058         dispatch_release(metadataLoadingGroup);
1059     });
1060 }
1061
1062 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1063 {
1064     if (!m_avPlayerItem)
1065         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1066
1067     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1068         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1069     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1070         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1071     if (m_cachedLikelyToKeepUp)
1072         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1073     if (m_cachedBufferFull)
1074         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1075     if (m_cachedBufferEmpty)
1076         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1077
1078     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1079 }
1080
1081 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1082 {
1083     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1084     PlatformMedia pm;
1085     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1086     pm.media.avfMediaPlayer = m_avPlayer.get();
1087     return pm;
1088 }
1089
1090 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1091 {
1092 #if PLATFORM(IOS)
1093     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1094 #else
1095     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1096 #endif
1097 }
1098
1099 #if PLATFORM(IOS)
1100 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1101 {
1102     if (m_videoFullscreenLayer == videoFullscreenLayer)
1103         return;
1104
1105     m_videoFullscreenLayer = videoFullscreenLayer;
1106
1107     [CATransaction begin];
1108     [CATransaction setDisableActions:YES];
1109     
1110     CALayer *oldRootLayer = videoFullscreenLayer;
1111     while (oldRootLayer.superlayer)
1112         oldRootLayer = oldRootLayer.superlayer;
1113
1114     CALayer *newRootLayer = nil;
1115     
1116     if (m_videoFullscreenLayer && m_videoLayer) {
1117         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1118         [m_videoLayer removeFromSuperlayer];
1119         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1120         newRootLayer = m_videoFullscreenLayer.get();
1121     } else if (m_videoInlineLayer && m_videoLayer) {
1122         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1123         [m_videoLayer removeFromSuperlayer];
1124         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1125         newRootLayer = m_videoInlineLayer.get();
1126     } else if (m_videoLayer)
1127         [m_videoLayer removeFromSuperlayer];
1128
1129     while (newRootLayer.superlayer)
1130         newRootLayer = newRootLayer.superlayer;
1131
1132     if (oldRootLayer && newRootLayer && oldRootLayer != newRootLayer) {
1133         mach_port_t fencePort = 0;
1134         for (CAContext *context in [CAContext allContexts]) {
1135             if (context.layer == oldRootLayer || context.layer == newRootLayer) {
1136                 if (!fencePort)
1137                     fencePort = [context createFencePort];
1138                 else
1139                     [context setFencePort:fencePort];
1140             }
1141         }
1142         mach_port_deallocate(mach_task_self(), fencePort);
1143     }
1144     [CATransaction commit];
1145
1146     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1147         syncTextTrackBounds();
1148         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1149     }
1150
1151     updateDisableExternalPlayback();
1152 }
1153
1154 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1155 {
1156     m_videoFullscreenFrame = frame;
1157     if (!m_videoFullscreenLayer)
1158         return;
1159
1160     if (m_videoLayer) {
1161         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1162         [CATransaction begin];
1163         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1164         [CATransaction commit];
1165         [m_videoLayer web_disableAllActions];
1166     }
1167     syncTextTrackBounds();
1168 }
1169
1170 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1171 {
1172     m_videoFullscreenGravity = gravity;
1173     if (!m_videoLayer)
1174         return;
1175
1176     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1177     if (gravity == MediaPlayer::VideoGravityResize)
1178         videoGravity = AVLayerVideoGravityResize;
1179     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1180         videoGravity = AVLayerVideoGravityResizeAspect;
1181     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1182         videoGravity = AVLayerVideoGravityResizeAspectFill;
1183     else
1184         ASSERT_NOT_REACHED();
1185
1186     [m_videoLayer setVideoGravity:videoGravity];
1187 }
1188
1189 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1190 {
1191     if (m_currentMetaData)
1192         return m_currentMetaData.get();
1193     return nil;
1194 }
1195
1196 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1197 {
1198     if (!m_avPlayerItem)
1199         return emptyString();
1200     
1201     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1202     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1203
1204     return logString.get();
1205 }
1206
1207 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1208 {
1209     if (!m_avPlayerItem)
1210         return emptyString();
1211
1212     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1213     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1214
1215     return logString.get();
1216 }
1217 #endif
1218
1219 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1220 {
1221     [CATransaction begin];
1222     [CATransaction setDisableActions:YES];    
1223     if (m_videoLayer)
1224         [m_videoLayer.get() setHidden:!isVisible];
1225     [CATransaction commit];
1226 }
1227     
1228 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1229 {
1230     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1231     if (!metaDataAvailable())
1232         return;
1233
1234     setDelayCallbacks(true);
1235     m_cachedRate = requestedRate();
1236     [m_avPlayer.get() setRate:requestedRate()];
1237     setDelayCallbacks(false);
1238 }
1239
1240 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1241 {
1242     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1243     if (!metaDataAvailable())
1244         return;
1245
1246     setDelayCallbacks(true);
1247     m_cachedRate = 0;
1248     [m_avPlayer.get() setRate:0];
1249     setDelayCallbacks(false);
1250 }
1251
1252 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1253 {
1254     // Do not ask the asset for duration before it has been loaded or it will fetch the
1255     // answer synchronously.
1256     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1257         return MediaTime::invalidTime();
1258     
1259     CMTime cmDuration;
1260     
1261     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1262     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1263         cmDuration = [m_avPlayerItem.get() duration];
1264     else
1265         cmDuration = [m_avAsset.get() duration];
1266
1267     if (CMTIME_IS_NUMERIC(cmDuration))
1268         return toMediaTime(cmDuration);
1269
1270     if (CMTIME_IS_INDEFINITE(cmDuration))
1271         return MediaTime::positiveInfiniteTime();
1272
1273     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1274     return MediaTime::invalidTime();
1275 }
1276
1277 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1278 {
1279     if (!metaDataAvailable() || !m_avPlayerItem)
1280         return MediaTime::zeroTime();
1281
1282     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1283     if (CMTIME_IS_NUMERIC(itemTime))
1284         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1285
1286     return MediaTime::zeroTime();
1287 }
1288
1289 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1290 {
1291     // setCurrentTime generates several event callbacks, update afterwards.
1292     setDelayCallbacks(true);
1293
1294     if (m_metadataTrack)
1295         m_metadataTrack->flushPartialCues();
1296
1297     CMTime cmTime = toCMTime(time);
1298     CMTime cmBefore = toCMTime(negativeTolerance);
1299     CMTime cmAfter = toCMTime(positiveTolerance);
1300
1301     auto weakThis = createWeakPtr();
1302
1303     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1304         callOnMainThread([weakThis, finished] {
1305             auto _this = weakThis.get();
1306             if (!_this)
1307                 return;
1308
1309             _this->seekCompleted(finished);
1310         });
1311     }];
1312
1313     setDelayCallbacks(false);
1314 }
1315
1316 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1317 {
1318 #if PLATFORM(IOS)
1319     UNUSED_PARAM(volume);
1320     return;
1321 #else
1322     if (!metaDataAvailable())
1323         return;
1324
1325     [m_avPlayer.get() setVolume:volume];
1326 #endif
1327 }
1328
1329 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1330 {
1331     UNUSED_PARAM(closedCaptionsVisible);
1332
1333     if (!metaDataAvailable())
1334         return;
1335
1336     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1337 }
1338
1339 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1340 {
1341     setDelayCallbacks(true);
1342     m_cachedRate = rate;
1343     [m_avPlayer.get() setRate:rate];
1344     setDelayCallbacks(false);
1345 }
1346
1347 double MediaPlayerPrivateAVFoundationObjC::rate() const
1348 {
1349     if (!metaDataAvailable())
1350         return 0;
1351
1352     return m_cachedRate;
1353 }
1354
1355 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1356 {
1357     if (m_avPlayerItem)
1358         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1359 }
1360
1361 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1362 {
1363     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1364
1365     if (!m_avPlayerItem)
1366         return timeRanges;
1367
1368     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1369         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1370         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1371             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1372     }
1373     return timeRanges;
1374 }
1375
1376 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1377 {
1378     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1379         return MediaTime::zeroTime();
1380
1381     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1382     bool hasValidRange = false;
1383     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1384         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1385         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1386             continue;
1387
1388         hasValidRange = true;
1389         MediaTime startOfRange = toMediaTime(timeRange.start);
1390         if (minTimeSeekable > startOfRange)
1391             minTimeSeekable = startOfRange;
1392     }
1393     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1394 }
1395
1396 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1397 {
1398     if (!m_cachedSeekableRanges)
1399         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1400
1401     MediaTime maxTimeSeekable;
1402     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1403         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1404         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1405             continue;
1406         
1407         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1408         if (maxTimeSeekable < endOfRange)
1409             maxTimeSeekable = endOfRange;
1410     }
1411     return maxTimeSeekable;
1412 }
1413
1414 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1415 {
1416     if (!m_cachedLoadedRanges)
1417         return MediaTime::zeroTime();
1418
1419     MediaTime maxTimeLoaded;
1420     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1421         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1422         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1423             continue;
1424         
1425         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1426         if (maxTimeLoaded < endOfRange)
1427             maxTimeLoaded = endOfRange;
1428     }
1429
1430     return maxTimeLoaded;   
1431 }
1432
1433 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1434 {
1435     if (!metaDataAvailable())
1436         return 0;
1437
1438     if (m_cachedTotalBytes)
1439         return m_cachedTotalBytes;
1440
1441     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1442         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1443
1444     return m_cachedTotalBytes;
1445 }
1446
1447 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1448 {
1449     m_avAsset = asset;
1450 }
1451
1452 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1453 {
1454     if (!m_avAsset)
1455         return MediaPlayerAVAssetStatusDoesNotExist;
1456
1457     for (NSString *keyName in assetMetadataKeyNames()) {
1458         NSError *error = nil;
1459         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1460 #if !LOG_DISABLED
1461         if (error)
1462             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1463 #endif
1464
1465         if (keyStatus < AVKeyValueStatusLoaded)
1466             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1467         
1468         if (keyStatus == AVKeyValueStatusFailed)
1469             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1470
1471         if (keyStatus == AVKeyValueStatusCancelled)
1472             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1473     }
1474
1475     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1476         return MediaPlayerAVAssetStatusPlayable;
1477
1478     return MediaPlayerAVAssetStatusLoaded;
1479 }
1480
1481 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1482 {
1483     if (!m_avAsset)
1484         return 0;
1485
1486     NSError *error = nil;
1487     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1488     return [error code];
1489 }
1490
1491 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
1492 {
1493     if (!metaDataAvailable() || context->paintingDisabled())
1494         return;
1495
1496     setDelayCallbacks(true);
1497     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1498
1499 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1500     if (videoOutputHasAvailableFrame())
1501         paintWithVideoOutput(context, rect);
1502     else
1503 #endif
1504         paintWithImageGenerator(context, rect);
1505
1506     END_BLOCK_OBJC_EXCEPTIONS;
1507     setDelayCallbacks(false);
1508
1509     m_videoFrameHasDrawn = true;
1510 }
1511
1512 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const FloatRect& rect)
1513 {
1514     if (!metaDataAvailable() || context->paintingDisabled())
1515         return;
1516
1517     // We can ignore the request if we are already rendering to a layer.
1518     if (currentRenderingMode() == MediaRenderingToLayer)
1519         return;
1520
1521     // paint() is best effort, so only paint if we already have an image generator or video output available.
1522     if (!hasContextRenderer())
1523         return;
1524
1525     paintCurrentFrameInContext(context, rect);
1526 }
1527
1528 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const FloatRect& rect)
1529 {
1530     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1531
1532     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1533     if (image) {
1534         GraphicsContextStateSaver stateSaver(*context);
1535         context->translate(rect.x(), rect.y() + rect.height());
1536         context->scale(FloatSize(1.0f, -1.0f));
1537         context->setImageInterpolationQuality(InterpolationLow);
1538         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1539         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1540         image = 0;
1541     }
1542 }
1543
1544 static HashSet<String> mimeTypeCache()
1545 {
1546     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1547     static bool typeListInitialized = false;
1548
1549     if (typeListInitialized)
1550         return cache;
1551     typeListInitialized = true;
1552
1553     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1554     for (NSString *mimeType in types)
1555         cache.add([mimeType lowercaseString]);
1556
1557     return cache;
1558
1559
1560 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1561 {
1562     if (!m_imageGenerator)
1563         createImageGenerator();
1564     ASSERT(m_imageGenerator);
1565
1566 #if !LOG_DISABLED
1567     double start = monotonicallyIncreasingTime();
1568 #endif
1569
1570     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1571     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1572     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1573
1574 #if !LOG_DISABLED
1575     double duration = monotonicallyIncreasingTime() - start;
1576     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1577 #endif
1578
1579     return image;
1580 }
1581
1582 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1583 {
1584     supportedTypes = mimeTypeCache();
1585
1586
1587 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1588 static bool keySystemIsSupported(const String& keySystem)
1589 {
1590     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0") || equalIgnoringCase(keySystem, "org.w3c.clearkey"))
1591         return true;
1592     return false;
1593 }
1594 #endif
1595
1596 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1597 {
1598 #if ENABLE(ENCRYPTED_MEDIA)
1599     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1600     // In addition to the steps in the current specification, this method must run the following steps:
1601
1602     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1603     //    If keySystem is null, continue to the next step.
1604     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1605         // "Clear Key" is only supported with HLS:
1606         if (equalIgnoringCase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringCase(parameters.type, "application/x-mpegurl"))
1607             return MediaPlayer::IsNotSupported;
1608
1609         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1610         if (!keySystemIsSupported(parameters.keySystem))
1611             return MediaPlayer::IsNotSupported;
1612
1613         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1614         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1615     }
1616
1617     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1618 #endif
1619
1620 #if ENABLE(MEDIA_SOURCE)
1621     if (parameters.isMediaSource)
1622         return MediaPlayer::IsNotSupported;
1623 #endif
1624
1625     if (!mimeTypeCache().contains(parameters.type))
1626         return MediaPlayer::IsNotSupported;
1627
1628     // The spec says:
1629     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1630     if (parameters.codecs.isEmpty())
1631         return MediaPlayer::MayBeSupported;
1632
1633     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1634     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1635 }
1636
1637 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1638 {
1639 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1640     if (!keySystem.isEmpty()) {
1641         // "Clear Key" is only supported with HLS:
1642         if (equalIgnoringCase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringCase(mimeType, "application/x-mpegurl"))
1643             return MediaPlayer::IsNotSupported;
1644
1645         if (!keySystemIsSupported(keySystem))
1646             return false;
1647
1648         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1649             return false;
1650
1651         return true;
1652     }
1653 #else
1654     UNUSED_PARAM(keySystem);
1655     UNUSED_PARAM(mimeType);
1656 #endif
1657     return false;
1658 }
1659
1660 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1661 #if ENABLE(ENCRYPTED_MEDIA_V2)
1662 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1663 {
1664     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1665         [infoRequest setContentLength:keyData->byteLength()];
1666         [infoRequest setByteRangeAccessSupported:YES];
1667     }
1668
1669     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1670         long long start = [dataRequest currentOffset];
1671         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1672
1673         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1674             [request finishLoadingWithError:nil];
1675             return;
1676         }
1677
1678         ASSERT(start <= std::numeric_limits<int>::max());
1679         ASSERT(end <= std::numeric_limits<int>::max());
1680         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1681         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1682         [dataRequest respondWithData:nsData.get()];
1683     }
1684
1685     [request finishLoading];
1686 }
1687 #endif
1688
1689 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1690 {
1691     String scheme = [[[avRequest request] URL] scheme];
1692     String keyURI = [[[avRequest request] URL] absoluteString];
1693
1694 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1695     if (scheme == "skd") {
1696         // Create an initData with the following layout:
1697         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1698         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1699         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1700         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1701         initDataView->set<uint32_t>(0, keyURISize, true);
1702
1703         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1704         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1705
1706 #if ENABLE(ENCRYPTED_MEDIA)
1707         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1708 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1709         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1710         if (!player()->keyNeeded(initData.get()))
1711 #endif
1712             return false;
1713
1714         m_keyURIToRequestMap.set(keyURI, avRequest);
1715         return true;
1716 #if ENABLE(ENCRYPTED_MEDIA_V2)
1717     } else if (scheme == "clearkey") {
1718         String keyID = [[[avRequest request] URL] resourceSpecifier];
1719         StringView keyIDView(keyID);
1720         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1721
1722         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1723         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1724
1725         auto keyData = player()->cachedKeyForKeyId(keyID);
1726         if (keyData) {
1727             fulfillRequestWithKeyData(avRequest, keyData.get());
1728             return false;
1729         }
1730
1731         if (!player()->keyNeeded(initData.get()))
1732             return false;
1733
1734         m_keyURIToRequestMap.set(keyID, avRequest);
1735         return true;
1736 #endif
1737     }
1738 #endif
1739
1740     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1741     m_resourceLoaderMap.add(avRequest, resourceLoader);
1742     resourceLoader->startLoading();
1743     return true;
1744 }
1745
1746 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1747 {
1748 #if USE(CFNETWORK)
1749     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1750     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1751     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1752 #else
1753     AuthenticationChallenge challenge(nsChallenge);
1754 #endif
1755
1756     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1757 }
1758
1759 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1760 {
1761     String scheme = [[[avRequest request] URL] scheme];
1762
1763     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1764
1765     if (resourceLoader)
1766         resourceLoader->stopLoading();
1767 }
1768
1769 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1770 {
1771     m_resourceLoaderMap.remove(avRequest);
1772 }
1773 #endif
1774
1775 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1776 {
1777     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1778 }
1779
1780 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1781 {
1782     if (!metaDataAvailable())
1783         return timeValue;
1784
1785     // FIXME - impossible to implement until rdar://8721510 is fixed.
1786     return timeValue;
1787 }
1788
1789 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1790 {
1791 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1792     return 0;
1793 #else
1794     return 5;
1795 #endif
1796 }
1797
1798 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1799 {
1800     if (!m_videoLayer)
1801         return;
1802
1803 #if PLATFORM(IOS)
1804     // Do not attempt to change the video gravity while in full screen mode.
1805     // See setVideoFullscreenGravity().
1806     if (m_videoFullscreenLayer)
1807         return;
1808 #endif
1809
1810     [CATransaction begin];
1811     [CATransaction setDisableActions:YES];    
1812     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1813     [m_videoLayer.get() setVideoGravity:gravity];
1814     [CATransaction commit];
1815 }
1816
1817 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1818 {
1819     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1820         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1821     }];
1822     if (index == NSNotFound)
1823         return nil;
1824     return [tracks objectAtIndex:index];
1825 }
1826
1827 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1828 {
1829     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1830     m_languageOfPrimaryAudioTrack = String();
1831
1832     if (!m_avAsset)
1833         return;
1834
1835     setDelayCharacteristicsChangedNotification(true);
1836
1837     bool haveCCTrack = false;
1838     bool hasCaptions = false;
1839
1840     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1841     // asked about those fairly fequently.
1842     if (!m_avPlayerItem) {
1843         // We don't have a player item yet, so check with the asset because some assets support inspection
1844         // prior to becoming ready to play.
1845         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1846         setHasVideo(firstEnabledVideoTrack);
1847         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1848 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1849         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1850 #endif
1851
1852         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1853     } else {
1854         bool hasVideo = false;
1855         bool hasAudio = false;
1856         bool hasMetaData = false;
1857         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1858             if ([track isEnabled]) {
1859                 AVAssetTrack *assetTrack = [track assetTrack];
1860                 NSString *mediaType = [assetTrack mediaType];
1861                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1862                     hasVideo = true;
1863                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1864                     hasAudio = true;
1865                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1866 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1867                     hasCaptions = true;
1868 #endif
1869                     haveCCTrack = true;
1870                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1871                     hasMetaData = true;
1872                 }
1873             }
1874         }
1875
1876 #if ENABLE(VIDEO_TRACK)
1877         updateAudioTracks();
1878         updateVideoTracks();
1879
1880 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1881         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1882         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1883 #endif
1884 #endif
1885
1886         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1887         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1888         // when it is not.
1889         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1890
1891         setHasAudio(hasAudio);
1892 #if ENABLE(DATACUE_VALUE)
1893         if (hasMetaData)
1894             processMetadataTrack();
1895 #endif
1896     }
1897
1898 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1899     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1900     if (legibleGroup && m_cachedTracks) {
1901         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1902         if (hasCaptions)
1903             processMediaSelectionOptions();
1904     }
1905 #endif
1906
1907 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1908     if (!hasCaptions && haveCCTrack)
1909         processLegacyClosedCaptionsTracks();
1910 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1911     if (haveCCTrack)
1912         processLegacyClosedCaptionsTracks();
1913 #endif
1914
1915     setHasClosedCaptions(hasCaptions);
1916
1917     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1918         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1919
1920     sizeChanged();
1921
1922     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1923         characteristicsChanged();
1924
1925 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1926     if (m_provider)
1927         m_provider->setAudioTrack(firstEnabledTrack([m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1928 #endif
1929
1930     setDelayCharacteristicsChangedNotification(false);
1931 }
1932
1933 #if ENABLE(VIDEO_TRACK)
1934 template <typename RefT, typename PassRefT>
1935 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1936 {
1937     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1938         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1939     }]]]);
1940     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1941
1942     for (auto& oldItem : oldItems) {
1943         if (oldItem->playerItemTrack())
1944             [oldTracks addObject:oldItem->playerItemTrack()];
1945     }
1946
1947     // Find the added & removed AVPlayerItemTracks:
1948     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1949     [removedTracks minusSet:newTracks.get()];
1950
1951     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1952     [addedTracks minusSet:oldTracks.get()];
1953
1954     typedef Vector<RefT> ItemVector;
1955     ItemVector replacementItems;
1956     ItemVector addedItems;
1957     ItemVector removedItems;
1958     for (auto& oldItem : oldItems) {
1959         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1960             removedItems.append(oldItem);
1961         else
1962             replacementItems.append(oldItem);
1963     }
1964
1965     for (AVPlayerItemTrack* track in addedTracks.get())
1966         addedItems.append(itemFactory(track));
1967
1968     replacementItems.appendVector(addedItems);
1969     oldItems.swap(replacementItems);
1970
1971     for (auto& removedItem : removedItems)
1972         (player->*removedFunction)(removedItem);
1973
1974     for (auto& addedItem : addedItems)
1975         (player->*addedFunction)(addedItem);
1976 }
1977
1978 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1979 template <typename RefT, typename PassRefT>
1980 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1981 {
1982     group->updateOptions();
1983
1984     // Only add selection options which do not have an associated persistant track.
1985     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1986     for (auto& option : group->options()) {
1987         if (!option)
1988             continue;
1989         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1990         if (!avOption)
1991             continue;
1992         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1993             newSelectionOptions.add(option);
1994     }
1995
1996     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1997     for (auto& oldItem : oldItems) {
1998         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1999             oldSelectionOptions.add(option);
2000     }
2001
2002     // Find the added & removed AVMediaSelectionOptions:
2003     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2004     for (auto& oldOption : oldSelectionOptions) {
2005         if (!newSelectionOptions.contains(oldOption))
2006             removedSelectionOptions.add(oldOption);
2007     }
2008
2009     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2010     for (auto& newOption : newSelectionOptions) {
2011         if (!oldSelectionOptions.contains(newOption))
2012             addedSelectionOptions.add(newOption);
2013     }
2014
2015     typedef Vector<RefT> ItemVector;
2016     ItemVector replacementItems;
2017     ItemVector addedItems;
2018     ItemVector removedItems;
2019     for (auto& oldItem : oldItems) {
2020         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2021             removedItems.append(oldItem);
2022         else
2023             replacementItems.append(oldItem);
2024     }
2025
2026     for (auto& option : addedSelectionOptions)
2027         addedItems.append(itemFactory(*option.get()));
2028
2029     replacementItems.appendVector(addedItems);
2030     oldItems.swap(replacementItems);
2031     
2032     for (auto& removedItem : removedItems)
2033         (player->*removedFunction)(removedItem);
2034     
2035     for (auto& addedItem : addedItems)
2036         (player->*addedFunction)(addedItem);
2037 }
2038 #endif
2039
2040 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2041 {
2042 #if !LOG_DISABLED
2043     size_t count = m_audioTracks.size();
2044 #endif
2045
2046     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2047
2048 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2049     if (!m_audibleGroup) {
2050         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2051             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2052     }
2053
2054     if (m_audibleGroup)
2055         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2056 #endif
2057
2058     for (auto& track : m_audioTracks)
2059         track->resetPropertiesFromTrack();
2060
2061 #if !LOG_DISABLED
2062     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2063 #endif
2064 }
2065
2066 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2067 {
2068 #if !LOG_DISABLED
2069     size_t count = m_videoTracks.size();
2070 #endif
2071
2072     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2073
2074 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2075     if (!m_visualGroup) {
2076         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2077             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2078     }
2079
2080     if (m_visualGroup)
2081         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2082 #endif
2083
2084     for (auto& track : m_audioTracks)
2085         track->resetPropertiesFromTrack();
2086
2087 #if !LOG_DISABLED
2088     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2089 #endif
2090 }
2091
2092 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2093 {
2094 #if PLATFORM(IOS)
2095     if (m_videoFullscreenLayer)
2096         return true;
2097 #endif
2098     return false;
2099 }
2100
2101 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2102 {
2103 #if PLATFORM(IOS)
2104     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2105         return;
2106     
2107     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2108     [m_textTrackRepresentationLayer setFrame:textFrame];
2109 #endif
2110 }
2111
2112 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2113 {
2114 #if PLATFORM(IOS)
2115     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2116     if (representationLayer == m_textTrackRepresentationLayer) {
2117         syncTextTrackBounds();
2118         return;
2119     }
2120
2121     if (m_textTrackRepresentationLayer)
2122         [m_textTrackRepresentationLayer removeFromSuperlayer];
2123
2124     m_textTrackRepresentationLayer = representationLayer;
2125
2126     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2127         syncTextTrackBounds();
2128         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2129     }
2130
2131 #else
2132     UNUSED_PARAM(representation);
2133 #endif
2134 }
2135 #endif // ENABLE(VIDEO_TRACK)
2136
2137 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2138 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2139 {
2140     if (!m_provider) {
2141         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2142         m_provider->setAudioTrack(firstEnabledTrack([m_avAsset tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
2143     }
2144
2145     return m_provider.get();
2146 }
2147 #endif
2148
2149 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2150 {
2151     if (!m_avAsset)
2152         return;
2153
2154     setNaturalSize(m_cachedPresentationSize);
2155 }
2156     
2157 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2158 {
2159     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2160         return false;
2161     
2162     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2163     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2164     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2165 }
2166
2167 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2168 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2169 {
2170     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2171
2172     if (!m_avPlayerItem || m_videoOutput)
2173         return;
2174
2175 #if USE(VIDEOTOOLBOX)
2176     NSDictionary* attributes = nil;
2177 #else
2178     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2179                                 nil];
2180 #endif
2181     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2182     ASSERT(m_videoOutput);
2183
2184     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2185
2186     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2187
2188     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2189 }
2190
2191 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2192 {
2193     if (!m_videoOutput)
2194         return;
2195
2196     if (m_avPlayerItem)
2197         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2198     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2199
2200     m_videoOutput = 0;
2201 }
2202
2203 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2204 {
2205     if (!m_videoOutput)
2206         createVideoOutput();
2207     ASSERT(m_videoOutput);
2208
2209 #if !LOG_DISABLED
2210     double start = monotonicallyIncreasingTime();
2211 #endif
2212
2213     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2214
2215     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2216         return 0;
2217
2218     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2219     if (!buffer)
2220         return 0;
2221
2222 #if USE(VIDEOTOOLBOX)
2223     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2224     if (!m_pixelTransferSession) {
2225         VTPixelTransferSessionRef session = 0;
2226         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2227         m_pixelTransferSession = adoptCF(session);
2228     }
2229
2230     CVPixelBufferRef outputBuffer;
2231     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2232     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2233     buffer = adoptCF(outputBuffer);
2234 #endif
2235
2236 #if !LOG_DISABLED
2237     double duration = monotonicallyIncreasingTime() - start;
2238     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2239 #endif
2240
2241     return buffer;
2242 }
2243
2244 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2245 {
2246     if (!m_avPlayerItem)
2247         return false;
2248
2249     if (m_lastImage)
2250         return true;
2251
2252     if (!m_videoOutput)
2253         createVideoOutput();
2254
2255     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2256 }
2257
2258 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2259 {
2260     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2261     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2262     return CVPixelBufferGetBaseAddress(pixelBuffer);
2263 }
2264
2265 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2266 {
2267     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2268     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2269 }
2270
2271 static void CVPixelBufferReleaseInfoCallback(void* info)
2272 {
2273     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2274     CFRelease(pixelBuffer);
2275 }
2276
2277 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2278 {
2279     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2280     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2281
2282     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2283     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2284     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2285     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2286     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2287
2288     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2289     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2290     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2291
2292     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2293 }
2294
2295 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2296 {
2297     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2298
2299     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2300     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2301     // should be displayed.
2302     if (pixelBuffer)
2303         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2304 }
2305
2306 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const FloatRect& outputRect)
2307 {
2308     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2309         waitForVideoOutputMediaDataWillChange();
2310
2311     updateLastImage();
2312
2313     if (!m_lastImage)
2314         return;
2315
2316     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2317     if (!firstEnabledVideoTrack)
2318         return;
2319
2320     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2321
2322     GraphicsContextStateSaver stateSaver(*context);
2323     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2324     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2325     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2326
2327     context->concatCTM(videoTransform);
2328     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2329
2330     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2331     // video frame, destroy it now that it is no longer needed.
2332     if (m_imageGenerator)
2333         destroyImageGenerator();
2334
2335 }
2336
2337 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2338 {
2339     updateLastImage();
2340     return m_lastImage.get();
2341 }
2342
2343 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2344 {
2345     if (!m_videoOutputSemaphore)
2346         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2347
2348     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2349
2350     // Wait for 1 second.
2351     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2352
2353     if (result)
2354         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2355 }
2356
2357 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2358 {
2359     dispatch_semaphore_signal(m_videoOutputSemaphore);
2360 }
2361 #endif
2362
2363 #if ENABLE(ENCRYPTED_MEDIA)
2364 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2365 {
2366     if (!keySystemIsSupported(keySystem))
2367         return MediaPlayer::KeySystemNotSupported;
2368
2369     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2370     String keyURI;
2371     String keyID;
2372     RefPtr<Uint8Array> certificate;
2373     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2374         return MediaPlayer::InvalidPlayerState;
2375
2376     if (!m_keyURIToRequestMap.contains(keyURI))
2377         return MediaPlayer::InvalidPlayerState;
2378
2379     String sessionID = createCanonicalUUIDString();
2380
2381     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2382
2383     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2384     NSString* assetStr = keyID;
2385     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2386     NSError* error = 0;
2387     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2388
2389     if (!keyRequest) {
2390         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2391         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2392         return MediaPlayer::NoError;
2393     }
2394
2395     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2396     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2397     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2398
2399     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2400     m_sessionIDToRequestMap.set(sessionID, avRequest);
2401     m_keyURIToRequestMap.remove(keyURI);
2402
2403     return MediaPlayer::NoError;
2404 }
2405
2406 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2407 {
2408     if (!keySystemIsSupported(keySystem))
2409         return MediaPlayer::KeySystemNotSupported;
2410
2411     if (!m_sessionIDToRequestMap.contains(sessionID))
2412         return MediaPlayer::InvalidPlayerState;
2413
2414     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2415     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2416     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2417     [avRequest.get() finishLoading];
2418     m_sessionIDToRequestMap.remove(sessionID);
2419
2420     player()->keyAdded(keySystem, sessionID);
2421
2422     UNUSED_PARAM(initDataPtr);
2423     UNUSED_PARAM(initDataLength);
2424     return MediaPlayer::NoError;
2425 }
2426
2427 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2428 {
2429     if (!keySystemIsSupported(keySystem))
2430         return MediaPlayer::KeySystemNotSupported;
2431
2432     if (!m_sessionIDToRequestMap.contains(sessionID))
2433         return MediaPlayer::InvalidPlayerState;
2434
2435     m_sessionIDToRequestMap.remove(sessionID);
2436     return MediaPlayer::NoError;
2437 }
2438 #endif
2439
2440 #if ENABLE(ENCRYPTED_MEDIA_V2)
2441 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2442 {
2443     return m_keyURIToRequestMap.take(keyURI);
2444 }
2445
2446 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2447 {
2448     Vector<String> fulfilledKeyIds;
2449
2450     for (auto& pair : m_keyURIToRequestMap) {
2451         const String& keyId = pair.key;
2452         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2453
2454         auto keyData = player()->cachedKeyForKeyId(keyId);
2455         if (!keyData)
2456             continue;
2457
2458         fulfillRequestWithKeyData(request.get(), keyData.get());
2459         fulfilledKeyIds.append(keyId);
2460     }
2461
2462     for (auto& keyId : fulfilledKeyIds)
2463         m_keyURIToRequestMap.remove(keyId);
2464 }
2465
2466 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2467 {
2468     if (!keySystemIsSupported(keySystem))
2469         return nullptr;
2470
2471     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2472 }
2473 #endif
2474
2475 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2476 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2477 {
2478 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2479     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2480 #endif
2481
2482     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2483     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2484
2485         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2486         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2487             continue;
2488
2489         bool newCCTrack = true;
2490         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2491             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2492                 continue;
2493
2494             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2495             if (track->avPlayerItemTrack() == playerItemTrack) {
2496                 removedTextTracks.remove(i - 1);
2497                 newCCTrack = false;
2498                 break;
2499             }
2500         }
2501
2502         if (!newCCTrack)
2503             continue;
2504         
2505         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2506     }
2507
2508     processNewAndRemovedTextTracks(removedTextTracks);
2509 }
2510 #endif
2511
2512 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2513 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2514 {
2515     if (!m_avAsset)
2516         return false;
2517
2518     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2519         return false;
2520
2521     return true;
2522 }
2523
2524 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2525 {
2526     if (!hasLoadedMediaSelectionGroups())
2527         return nil;
2528
2529     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2530 }
2531
2532 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2533 {
2534     if (!hasLoadedMediaSelectionGroups())
2535         return nil;
2536
2537     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2538 }
2539
2540 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2541 {
2542     if (!hasLoadedMediaSelectionGroups())
2543         return nil;
2544
2545     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2546 }
2547
2548 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2549 {
2550     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2551     if (!legibleGroup) {
2552         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2553         return;
2554     }
2555
2556     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2557     // but set the selected legible track to nil so text tracks will not be automatically configured.
2558     if (!m_textTracks.size())
2559         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2560
2561     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2562     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2563     for (AVMediaSelectionOptionType *option in legibleOptions) {
2564         bool newTrack = true;
2565         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2566             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2567                 continue;
2568             
2569             RetainPtr<AVMediaSelectionOptionType> currentOption;
2570 #if ENABLE(AVF_CAPTIONS)
2571             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2572                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2573                 currentOption = track->mediaSelectionOption();
2574             } else
2575 #endif
2576             {
2577                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2578                 currentOption = track->mediaSelectionOption();
2579             }
2580             
2581             if ([currentOption.get() isEqual:option]) {
2582                 removedTextTracks.remove(i - 1);
2583                 newTrack = false;
2584                 break;
2585             }
2586         }
2587         if (!newTrack)
2588             continue;
2589
2590 #if ENABLE(AVF_CAPTIONS)
2591         if ([option outOfBandSource]) {
2592             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2593             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2594             continue;
2595         }
2596 #endif
2597
2598         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2599     }
2600
2601     processNewAndRemovedTextTracks(removedTextTracks);
2602 }
2603
2604 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2605 {
2606     if (m_metadataTrack)
2607         return;
2608
2609     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2610     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2611     player()->addTextTrack(m_metadataTrack);
2612 }
2613
2614 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2615 {
2616     if (!m_currentTextTrack)
2617         return;
2618
2619     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2620 }
2621
2622 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2623 {
2624     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2625
2626     if (!m_currentTextTrack)
2627         return;
2628     
2629     m_currentTextTrack->resetCueValues();
2630 }
2631 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2632
2633 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2634 {
2635     if (m_currentTextTrack == track)
2636         return;
2637
2638     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2639         
2640     m_currentTextTrack = track;
2641
2642     if (track) {
2643         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2644             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2645 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2646 #if ENABLE(AVF_CAPTIONS)
2647         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2648             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2649 #endif
2650         else
2651             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2652 #endif
2653     } else {
2654 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2655         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2656 #endif
2657         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2658     }
2659
2660 }
2661
2662 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2663 {
2664     if (!m_languageOfPrimaryAudioTrack.isNull())
2665         return m_languageOfPrimaryAudioTrack;
2666
2667     if (!m_avPlayerItem.get())
2668         return emptyString();
2669
2670 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2671     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2672     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2673     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2674     if (currentlySelectedAudibleOption) {
2675         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2676         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2677
2678         return m_languageOfPrimaryAudioTrack;
2679     }
2680 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2681
2682     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2683     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2684     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2685     if (!tracks || [tracks count] != 1) {
2686         m_languageOfPrimaryAudioTrack = emptyString();
2687         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2688         return m_languageOfPrimaryAudioTrack;
2689     }
2690
2691     AVAssetTrack *track = [tracks objectAtIndex:0];
2692     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2693
2694 #if !LOG_DISABLED
2695     if (m_languageOfPrimaryAudioTrack == emptyString())
2696         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2697     else
2698         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2699 #endif
2700
2701     return m_languageOfPrimaryAudioTrack;
2702 }
2703
2704 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2705 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2706 {
2707     if (!m_avPlayer)
2708         return false;
2709
2710     bool wirelessTarget = m_avPlayer.get().externalPlaybackActive;
2711     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2712
2713     return wirelessTarget;
2714 }
2715
2716 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2717 {
2718     if (!m_avPlayer)
2719         return MediaPlayer::TargetTypeNone;
2720
2721 #if PLATFORM(IOS)
2722     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2723     case wkExternalPlaybackTypeNone:
2724         return MediaPlayer::TargetTypeNone;
2725     case wkExternalPlaybackTypeAirPlay:
2726         return MediaPlayer::TargetTypeAirPlay;
2727     case wkExternalPlaybackTypeTVOut:
2728         return MediaPlayer::TargetTypeTVOut;
2729     }
2730
2731     ASSERT_NOT_REACHED();
2732     return MediaPlayer::TargetTypeNone;
2733
2734 #else
2735     return MediaPlayer::TargetTypeAirPlay;
2736 #endif
2737 }
2738
2739 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2740 {
2741     if (!m_avPlayer)
2742         return emptyString();
2743
2744     String wirelessTargetName;
2745 #if !PLATFORM(IOS)
2746     if (m_outputContext)
2747         wirelessTargetName = m_outputContext.get().deviceName;
2748 #else
2749     wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2750 #endif
2751     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2752
2753     return wirelessTargetName;
2754 }
2755
2756 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2757 {
2758     if (!m_avPlayer)
2759         return !m_allowsWirelessVideoPlayback;
2760
2761     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2762     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2763
2764     return !m_allowsWirelessVideoPlayback;
2765 }
2766
2767 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2768 {
2769     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2770     m_allowsWirelessVideoPlayback = !disabled;
2771     if (!m_avPlayer)
2772         return;
2773
2774     setDelayCallbacks(true);
2775     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2776     setDelayCallbacks(false);
2777 }
2778
2779 #if !PLATFORM(IOS)
2780 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(const MediaPlaybackTarget& target)
2781 {
2782     m_outputContext = target.devicePickerContext();
2783     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p", this, m_outputContext.get());
2784
2785     if (!m_outputContext || !m_outputContext.get().deviceName)
2786         stopPlayingToPlaybackTarget();
2787 }
2788
2789 void MediaPlayerPrivateAVFoundationObjC::startPlayingToPlaybackTarget()
2790 {
2791     if (!m_avPlayer)
2792         return;
2793
2794     if ([m_avPlayer.get().outputContext isEqual:m_outputContext.get()])
2795         return;
2796
2797     setDelayCallbacks(true);
2798     m_avPlayer.get().outputContext = m_outputContext.get();
2799     setDelayCallbacks(false);
2800
2801     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::startPlayingToPlaybackTarget(%p) - target = %p", this, m_avPlayer.get().outputContext);
2802 }
2803
2804 void MediaPlayerPrivateAVFoundationObjC::stopPlayingToPlaybackTarget()
2805 {
2806     if (!m_avPlayer)
2807         return;
2808
2809     setDelayCallbacks(true);
2810     // FIXME: uncomment the following line once rdar://20335217 has been fixed.
2811     // m_avPlayer.get().outputContext = nil;
2812     setDelayCallbacks(false);
2813
2814     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::stopPlayingToPlaybackTarget(%p) - target = %p", this, m_avPlayer.get().outputContext);
2815 }
2816
2817 bool MediaPlayerPrivateAVFoundationObjC::isPlayingToWirelessPlaybackTarget()
2818 {
2819     if (!m_avPlayer)
2820         return false;
2821
2822     if (!m_outputContext || !m_outputContext.get().deviceName)
2823         return false;
2824
2825     return m_cachedRate;
2826 }
2827 #endif // !PLATFORM(IOS)
2828
2829 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2830 {
2831     if (!m_avPlayer)
2832         return;
2833
2834 #if PLATFORM(IOS)
2835     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2836 #endif
2837 }
2838 #endif
2839
2840 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2841 {
2842     m_cachedItemStatus = status;
2843
2844     updateStates();
2845 }
2846
2847 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2848 {
2849     m_pendingStatusChanges++;
2850 }
2851
2852 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2853 {
2854     m_cachedLikelyToKeepUp = likelyToKeepUp;
2855
2856     ASSERT(m_pendingStatusChanges);
2857     if (!--m_pendingStatusChanges)
2858         updateStates();
2859 }
2860
2861 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2862 {
2863     m_pendingStatusChanges++;
2864 }
2865
2866 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2867 {
2868     m_cachedBufferEmpty = bufferEmpty;
2869
2870     ASSERT(m_pendingStatusChanges);
2871     if (!--m_pendingStatusChanges)
2872         updateStates();
2873 }
2874
2875 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2876 {
2877     m_pendingStatusChanges++;
2878 }
2879
2880 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2881 {
2882     m_cachedBufferFull = bufferFull;
2883
2884     ASSERT(m_pendingStatusChanges);
2885     if (!--m_pendingStatusChanges)
2886         updateStates();
2887 }
2888
2889 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2890 {
2891     m_cachedSeekableRanges = seekableRanges;
2892
2893     seekableTimeRangesChanged();
2894     updateStates();
2895 }
2896
2897 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2898 {
2899     m_cachedLoadedRanges = loadedRanges;
2900
2901     loadedTimeRangesChanged();
2902     updateStates();
2903 }
2904
2905 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2906 {
2907     m_cachedIsReadyForDisplay = isReady;
2908     if (!hasVideo() && isReady)
2909         tracksChanged();
2910     updateStates();
2911 }
2912
2913 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2914 {
2915     tracksChanged();
2916     updateStates();
2917 }
2918
2919 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2920 {
2921     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2922     if (m_shouldBufferData == shouldBuffer)
2923         return;
2924
2925     m_shouldBufferData = shouldBuffer;
2926     
2927     if (!m_avPlayer)
2928         return;
2929
2930     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2931 }
2932
2933 #if ENABLE(DATACUE_VALUE)
2934 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2935 {
2936     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2937     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2938     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2939     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2940     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2941
2942     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2943         return quickTimeUserData;
2944     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2945         return isoUserData;
2946     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2947         return quickTimeMetadata;
2948     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2949         return iTunesMetadata;
2950     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2951         return id3Metadata;
2952
2953     return emptyAtom;
2954 }
2955 #endif
2956
2957 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2958 {
2959     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2960
2961     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2962
2963 #if ENABLE(DATACUE_VALUE)
2964     if (seeking())
2965         return;
2966
2967     if (!m_metadataTrack)
2968         processMetadataTrack();
2969
2970     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2971         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2972         return;
2973     }
2974
2975     // Set the duration of all incomplete cues before adding new ones.
2976     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2977     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2978         MediaTime start = toMediaTime(item.time);
2979         if (start < earliestStartTime)
2980             earliestStartTime = start;
2981     }
2982     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2983
2984     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2985         MediaTime start = toMediaTime(item.time);
2986         MediaTime end = MediaTime::positiveInfiniteTime();
2987         if (CMTIME_IS_VALID(item.duration))
2988             end = start + toMediaTime(item.duration);
2989
2990         AtomicString type = nullAtom;
2991         if (item.keySpace)
2992             type = metadataType(item.keySpace);
2993
2994         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2995     }
2996 #endif
2997 }
2998
2999 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
3000 {
3001     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3002         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
3003
3004     NSArray *assetTracks = [m_avAsset tracks];
3005
3006     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
3007         AVAssetTrack* assetTrack = [obj assetTrack];
3008
3009         if ([assetTracks containsObject:assetTrack])
3010             return YES;
3011
3012         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
3013         if (!hasLoadedMediaSelectionGroups())
3014             return NO;
3015
3016         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
3017             return NO;
3018
3019         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
3020             return NO;
3021
3022         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
3023             return NO;
3024
3025         return YES;
3026     }]];
3027
3028     for (AVPlayerItemTrack *track in m_cachedTracks.get())
3029         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
3030
3031     m_cachedTotalBytes = 0;
3032
3033     tracksChanged();
3034     updateStates();
3035 }
3036
3037 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
3038 {
3039     m_cachedHasEnabledAudio = hasEnabledAudio;
3040
3041     tracksChanged();
3042     updateStates();
3043 }
3044
3045 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3046 {
3047     m_cachedPresentationSize = size;
3048
3049     sizeChanged();
3050     updateStates();
3051 }
3052
3053 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3054 {
3055     m_cachedDuration = duration;
3056
3057     invalidateCachedDuration();
3058 }
3059
3060 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3061 {
3062     m_cachedRate = rate;
3063
3064     updateStates();
3065     rateChanged();
3066 }
3067     
3068 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3069 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3070 {
3071     playbackTargetIsWirelessChanged();
3072 }
3073 #endif
3074
3075 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3076 {
3077     m_cachedCanPlayFastForward = newValue;
3078 }
3079
3080 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3081 {
3082     m_cachedCanPlayFastReverse = newValue;
3083 }
3084
3085 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3086 {
3087     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3088         return MediaPlayerPrivateAVFoundation::resolvedURL();
3089
3090     return URL([m_avAsset resolvedURL]);
3091 }
3092
3093 NSArray* assetMetadataKeyNames()
3094 {
3095     static NSArray* keys;
3096     if (!keys) {
3097         keys = [[NSArray alloc] initWithObjects:@"duration",
3098                     @"naturalSize",
3099                     @"preferredTransform",
3100                     @"preferredVolume",
3101                     @"preferredRate",
3102                     @"playable",
3103                     @"resolvedURL",
3104                     @"tracks",
3105                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3106                    nil];
3107     }
3108     return keys;
3109 }
3110
3111 NSArray* itemKVOProperties()
3112 {
3113     static NSArray* keys;
3114     if (!keys) {
3115         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3116                 @"status",
3117                 @"asset",
3118                 @"tracks",
3119                 @"seekableTimeRanges",
3120                 @"loadedTimeRanges",
3121                 @"playbackLikelyToKeepUp",
3122                 @"playbackBufferFull",
3123                 @"playbackBufferEmpty",
3124                 @"duration",
3125                 @"hasEnabledAudio",
3126                 @"timedMetadata",
3127                 @"canPlayFastForward",
3128                 @"canPlayFastReverse",
3129                 nil];
3130     }
3131     return keys;
3132 }
3133
3134 NSArray* assetTrackMetadataKeyNames()
3135 {
3136     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3137     return keys;
3138 }
3139
3140 NSArray* playerKVOProperties()
3141 {
3142     static NSArray* keys = [[NSArray alloc] initWithObjects:@"rate",
3143 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3144                             @"externalPlaybackActive", @"allowsExternalPlayback",
3145 #endif
3146                             nil];
3147     return keys;
3148 }
3149 } // namespace WebCore
3150
3151 @implementation WebCoreAVFMovieObserver
3152
3153 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3154 {
3155     self = [super init];
3156     if (!self)
3157         return nil;
3158     m_callback = callback;
3159     return self;
3160 }
3161
3162 - (void)disconnect
3163 {
3164     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3165     m_callback = 0;
3166 }
3167
3168 - (void)metadataLoaded
3169 {
3170     if (!m_callback)
3171         return;
3172
3173     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3174 }
3175
3176 - (void)didEnd:(NSNotification *)unusedNotification
3177 {
3178     UNUSED_PARAM(unusedNotification);
3179     if (!m_callback)
3180         return;
3181     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3182 }
3183
3184 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3185 {
3186     UNUSED_PARAM(object);
3187     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3188
3189     if (!m_callback)
3190         return;
3191
3192     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3193
3194 #if !LOG_DISABLED
3195     if (willChange)
3196         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3197     else {
3198         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3199         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3200     }
3201 #endif
3202
3203     WTF::Function<void ()> function;
3204
3205     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3206         if ([keyPath isEqualToString:@"readyForDisplay"])
3207             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3208     }
3209
3210     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3211         if ([keyPath isEqualToString:@"enabled"])
3212             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3213     }
3214
3215     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3216         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3217             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3218         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3219             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3220         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3221             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3222     }
3223
3224     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3225         // A value changed for an AVPlayerItem
3226         if ([keyPath isEqualToString:@"status"])
3227             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3228         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3229             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3230         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3231             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3232         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3233             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3234         else if ([keyPath isEqualToString:@"asset"])
3235             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
3236         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3237             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3238         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3239             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3240         else if ([keyPath isEqualToString:@"tracks"])
3241             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3242         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3243             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3244         else if ([keyPath isEqualToString:@"presentationSize"])
3245             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3246         else if ([keyPath isEqualToString:@"duration"])
3247             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3248         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3249             MediaTime now;
3250             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3251             if (CMTIME_IS_NUMERIC(itemTime))
3252                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3253             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3254         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3255             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3256         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3257             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3258     }
3259
3260     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3261         // A value changed for an AVPlayer.
3262         if ([keyPath isEqualToString:@"rate"])
3263             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3264 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3265         else if ([keyPath isEqualToString:@"externalPlaybackActive"] || [keyPath isEqualToString:@"allowsExternalPlayback"])
3266             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3267 #endif
3268     }
3269     
3270     if (function.isNull())
3271         return;
3272
3273     auto weakThis = m_callback->createWeakPtr();
3274     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3275         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3276         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3277         if (!weakThis)
3278             return;
3279         function();
3280     }));
3281 }
3282
3283 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3284 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3285 {
3286     UNUSED_PARAM(output);
3287     UNUSED_PARAM(nativeSamples);
3288
3289     if (!m_callback)
3290         return;
3291
3292     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3293     RetainPtr<NSArray> strongStrings = strings;
3294     RetainPtr<NSArray> strongSamples = nativeSamples;
3295     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3296         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3297         if (!callback)
3298             return;
3299         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3300     });
3301 }
3302
3303 - (void)outputSequenceWasFlushed:(id)output
3304 {
3305     UNUSED_PARAM(output);
3306
3307     if (!m_callback)
3308         return;
3309     
3310     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3311     callOnMainThread([strongSelf] {
3312         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3313             callback->flushCues();
3314     });
3315 }
3316 #endif
3317
3318 @end
3319
3320 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3321 @implementation WebCoreAVFLoaderDelegate
3322
3323 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3324 {
3325     self = [super init];
3326     if (!self)
3327         return nil;
3328     m_callback = callback;
3329     return self;
3330 }
3331
3332 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3333 {
3334     UNUSED_PARAM(resourceLoader);
3335     if (!m_callback)
3336         return NO;
3337
3338     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3339     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3340     callOnMainThread([strongSelf, strongRequest] {
3341         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3342         if (!callback) {
3343             [strongRequest finishLoadingWithError:nil];
3344             return;
3345         }
3346
3347         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3348             [strongRequest finishLoadingWithError:nil];
3349     });
3350
3351     return YES;
3352 }
3353
3354 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3355 {
3356     UNUSED_PARAM(resourceLoader);
3357     if (!m_callback)
3358         return NO;
3359
3360     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3361         return NO;
3362
3363     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3364     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3365     callOnMainThread([strongSelf, strongChallenge] {
3366         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3367         if (!callback) {
3368             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3369             return;
3370         }
3371
3372         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3373             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3374     });
3375
3376     return YES;
3377 }
3378
3379 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3380 {
3381     UNUSED_PARAM(resourceLoader);
3382     if (!m_callback)
3383         return;
3384
3385     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3386     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3387     callOnMainThread([strongSelf, strongRequest] {
3388         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3389         if (callback)
3390             callback->didCancelLoadingRequest(strongRequest.get());
3391     });
3392 }
3393
3394 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3395 {
3396     m_callback = callback;
3397 }
3398 @end
3399 #endif
3400
3401 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3402 @implementation WebCoreAVFPullDelegate
3403 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3404 {
3405     self = [super init];
3406     if (self)
3407         m_callback = callback;
3408     return self;
3409 }
3410
3411 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3412 {
3413     m_callback = callback;
3414 }
3415
3416 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3417 {
3418     if (m_callback)
3419         m_callback->outputMediaDataWillChange(output);
3420 }
3421
3422 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3423 {
3424     UNUSED_PARAM(output);
3425     // No-op.
3426 }
3427 @end
3428 #endif
3429
3430 #endif