d7f7717f23ffb4f7c5508c340ccafd440582ed9a
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioSourceProviderAVFObjC.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "Cookie.h"
38 #import "ExceptionCodePlaceholder.h"
39 #import "FloatConversion.h"
40 #import "FloatConversion.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandMetadataTextTrackPrivateAVF.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
46 #import "OutOfBandTextTrackPrivateAVF.h"
47 #import "URL.h"
48 #import "Logging.h"
49 #import "MediaPlaybackTarget.h"
50 #import "MediaSelectionGroupAVFObjC.h"
51 #import "MediaTimeAVFoundation.h"
52 #import "PlatformTimeRanges.h"
53 #import "QuartzCoreSPI.h"
54 #import "SecurityOrigin.h"
55 #import "SerializedPlatformRepresentationMac.h"
56 #import "TextEncoding.h"
57 #import "TextTrackRepresentation.h"
58 #import "UUID.h"
59 #import "VideoTrackPrivateAVFObjC.h"
60 #import "WebCoreAVFResourceLoader.h"
61 #import "WebCoreCALayerExtras.h"
62 #import "WebCoreSystemInterface.h"
63 #import <objc/runtime.h>
64 #import <runtime/DataView.h>
65 #import <runtime/JSCInlines.h>
66 #import <runtime/TypedArrayInlines.h>
67 #import <runtime/Uint16Array.h>
68 #import <runtime/Uint32Array.h>
69 #import <runtime/Uint8Array.h>
70 #import <wtf/CurrentTime.h>
71 #import <wtf/Functional.h>
72 #import <wtf/ListHashSet.h>
73 #import <wtf/NeverDestroyed.h>
74 #import <wtf/text/CString.h>
75 #import <wtf/text/StringBuilder.h>
76
77 #if ENABLE(AVF_CAPTIONS)
78 #include "TextTrack.h"
79 #endif
80
81 #import <AVFoundation/AVFoundation.h>
82 #if PLATFORM(IOS)
83 #import "WAKAppKitStubs.h"
84 #import <CoreImage/CoreImage.h>
85 #import <mach/mach_port.h>
86 #else
87 #import <Foundation/NSGeometry.h>
88 #import <QuartzCore/CoreImage.h>
89 #endif
90
91 #if USE(VIDEOTOOLBOX)
92 #import <CoreVideo/CoreVideo.h>
93 #import <VideoToolbox/VideoToolbox.h>
94 #endif
95
96 #if USE(CFNETWORK)
97 #include "CFNSURLConnectionSPI.h"
98 #endif
99
100 namespace std {
101 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
102     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
103 };
104 }
105
106 @interface WebVideoContainerLayer : CALayer
107 @end
108
109 @implementation WebVideoContainerLayer
110
111 - (void)setBounds:(CGRect)bounds
112 {
113     [super setBounds:bounds];
114     for (CALayer* layer in self.sublayers)
115         layer.frame = bounds;
116 }
117 @end
118
119 #if ENABLE(AVF_CAPTIONS)
120 // Note: This must be defined before our SOFT_LINK macros:
121 @class AVMediaSelectionOption;
122 @interface AVMediaSelectionOption (OutOfBandExtensions)
123 @property (nonatomic, readonly) NSString* outOfBandSource;
124 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
125 @end
126 #endif
127
128 #if PLATFORM(IOS)
129 @class AVPlayerItem;
130 @interface AVPlayerItem (WebKitExtensions)
131 @property (nonatomic, copy) NSString* dataYouTubeID;
132 @end
133 #endif
134
135 @interface AVURLAsset (WebKitExtensions)
136 @property (nonatomic, readonly) NSURL *resolvedURL;
137 @end
138
139 #if PLATFORM(MAC) && ENABLE(WIRELESS_PLAYBACK_TARGET)
140 typedef AVOutputDevicePickerContext AVOutputDevicePickerContextType;
141
142 @interface AVPlayer (WebKitExtensions)
143 @property (nonatomic) AVOutputDevicePickerContext *outputDevicePickerContext;
144 @end
145 #endif
146
147 typedef AVPlayer AVPlayerType;
148 typedef AVPlayerItem AVPlayerItemType;
149 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
150 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
151 typedef AVMetadataItem AVMetadataItemType;
152 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
153 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
154
155 #pragma mark - Soft Linking
156
157 // Soft-linking headers must be included last since they #define functions, constants, etc.
158 #import "CoreMediaSoftLink.h"
159
160 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
161 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
162 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
163
164 #if USE(VIDEOTOOLBOX)
165 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
166 #endif
167
168 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
169 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
170 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
171 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
172 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
173 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
174 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
175 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
176
177 #if USE(VIDEOTOOLBOX)
178 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
179 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
180 #endif
181
182 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
183 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
184 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
185 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
186 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
187 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
188 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
189
190 SOFT_LINK_CLASS(CoreImage, CIContext)
191 SOFT_LINK_CLASS(CoreImage, CIImage)
192
193 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
194 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
195 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
200 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
201 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
202 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
203 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
204 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
205 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
206 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
207 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
208 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
209
210 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
211
212 #define AVPlayer getAVPlayerClass()
213 #define AVPlayerItem getAVPlayerItemClass()
214 #define AVPlayerLayer getAVPlayerLayerClass()
215 #define AVURLAsset getAVURLAssetClass()
216 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
217 #define AVMetadataItem getAVMetadataItemClass()
218
219 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
220 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
221 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
222 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
223 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
224 #define AVMediaTypeVideo getAVMediaTypeVideo()
225 #define AVMediaTypeAudio getAVMediaTypeAudio()
226 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
227 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
228 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
229 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
230 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
231 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
232 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
233 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
234 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
235 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
236
237 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
238 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
239 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
240
241 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
242 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
243 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
244
245 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
246 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
247 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
248 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
249
250 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
251 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
252 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
253 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
254 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
255 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
256 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
257 #endif
258
259 #if ENABLE(AVF_CAPTIONS)
260 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
262 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
263 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
264 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
265 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
266 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
267 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
268 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
269 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
270
271 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
272 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
273 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
274 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
275 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
276 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
277 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
278 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
279 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
280 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
281 #endif
282
283 #if ENABLE(DATACUE_VALUE)
284 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
285 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
286 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
287 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
288 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
289
290 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
291 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
292 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
293 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
294 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
295 #endif
296
297 #if PLATFORM(IOS)
298 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
299 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
300 #endif
301
302 using namespace WebCore;
303
304 enum MediaPlayerAVFoundationObservationContext {
305     MediaPlayerAVFoundationObservationContextPlayerItem,
306     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
307     MediaPlayerAVFoundationObservationContextPlayer,
308     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
309 };
310
311 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
312 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
313 #else
314 @interface WebCoreAVFMovieObserver : NSObject
315 #endif
316 {
317     MediaPlayerPrivateAVFoundationObjC* m_callback;
318     int m_delayCallbacks;
319 }
320 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
321 -(void)disconnect;
322 -(void)metadataLoaded;
323 -(void)didEnd:(NSNotification *)notification;
324 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
325 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
326 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
327 - (void)outputSequenceWasFlushed:(id)output;
328 #endif
329 @end
330
331 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
332 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
333     MediaPlayerPrivateAVFoundationObjC* m_callback;
334 }
335 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
336 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
337 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
338 @end
339 #endif
340
341 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
342 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
343     MediaPlayerPrivateAVFoundationObjC *m_callback;
344     dispatch_semaphore_t m_semaphore;
345 }
346 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
347 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
348 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
349 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
350 @end
351 #endif
352
353 namespace WebCore {
354
355 static NSArray *assetMetadataKeyNames();
356 static NSArray *itemKVOProperties();
357 static NSArray* assetTrackMetadataKeyNames();
358
359 #if !LOG_DISABLED
360 static const char *boolString(bool val)
361 {
362     return val ? "true" : "false";
363 }
364 #endif
365
366 #if ENABLE(ENCRYPTED_MEDIA_V2)
367 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
368 static PlayerToPrivateMapType& playerToPrivateMap()
369 {
370     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
371     return map;
372 };
373 #endif
374
375 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
376 static dispatch_queue_t globalLoaderDelegateQueue()
377 {
378     static dispatch_queue_t globalQueue;
379     static dispatch_once_t onceToken;
380     dispatch_once(&onceToken, ^{
381         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
382     });
383     return globalQueue;
384 }
385 #endif
386
387 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
388 static dispatch_queue_t globalPullDelegateQueue()
389 {
390     static dispatch_queue_t globalQueue;
391     static dispatch_once_t onceToken;
392     dispatch_once(&onceToken, ^{
393         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
394     });
395     return globalQueue;
396 }
397 #endif
398
399 #if USE(CFNETWORK)
400 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
401 public:
402     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
403     {
404         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
405     }
406
407     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
408     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
409
410 private:
411     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
412         : m_challenge(challenge)
413     {
414         ASSERT(m_challenge);
415     }
416
417     virtual void refAuthenticationClient() override { ref(); }
418     virtual void derefAuthenticationClient() override { deref(); }
419
420     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential)
421     {
422         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
423     }
424
425     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&)
426     {
427         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
428     }
429
430     virtual void receivedCancellation(const AuthenticationChallenge&)
431     {
432         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
433     }
434
435     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&)
436     {
437         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
438             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
439     }
440
441     virtual void receivedChallengeRejection(const AuthenticationChallenge&)
442     {
443         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
444             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
445     }
446
447     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
448 };
449 #endif
450
451 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
452 {
453     if (isAvailable())
454         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
455             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
456 }
457
458 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
459     : MediaPlayerPrivateAVFoundation(player)
460     , m_weakPtrFactory(this)
461 #if PLATFORM(IOS)
462     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
463 #endif
464     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
465     , m_videoFrameHasDrawn(false)
466     , m_haveCheckedPlayability(false)
467 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
468     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
469     , m_videoOutputSemaphore(nullptr)
470 #endif
471 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
472     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
473 #endif
474     , m_currentTextTrack(0)
475     , m_cachedRate(0)
476     , m_cachedTotalBytes(0)
477     , m_pendingStatusChanges(0)
478     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
479     , m_cachedLikelyToKeepUp(false)
480     , m_cachedBufferEmpty(false)
481     , m_cachedBufferFull(false)
482     , m_cachedHasEnabledAudio(false)
483     , m_shouldBufferData(true)
484     , m_cachedIsReadyForDisplay(false)
485     , m_haveBeenAskedToCreateLayer(false)
486 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
487     , m_allowsWirelessVideoPlayback(true)
488 #endif
489 {
490 #if ENABLE(ENCRYPTED_MEDIA_V2)
491     playerToPrivateMap().set(player, this);
492 #endif
493 }
494
495 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
496 {
497 #if ENABLE(ENCRYPTED_MEDIA_V2)
498     playerToPrivateMap().remove(player());
499 #endif
500 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
501     [m_loaderDelegate.get() setCallback:0];
502     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
503
504     for (auto& pair : m_resourceLoaderMap)
505         pair.value->invalidate();
506 #endif
507 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
508     [m_videoOutputDelegate setCallback:0];
509     [m_videoOutput setDelegate:nil queue:0];
510     if (m_videoOutputSemaphore)
511         dispatch_release(m_videoOutputSemaphore);
512 #endif
513
514     if (m_videoLayer)
515         destroyVideoLayer();
516
517     cancelLoad();
518 }
519
520 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
521 {
522     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
523     tearDownVideoRendering();
524
525     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
526     [m_objcObserver.get() disconnect];
527
528     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
529     setIgnoreLoadStateChanges(true);
530     if (m_avAsset) {
531         [m_avAsset.get() cancelLoading];
532         m_avAsset = nil;
533     }
534
535     clearTextTracks();
536
537 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
538     if (m_legibleOutput) {
539         if (m_avPlayerItem)
540             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
541         m_legibleOutput = nil;
542     }
543 #endif
544
545     if (m_avPlayerItem) {
546         for (NSString *keyName in itemKVOProperties())
547             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
548         
549         m_avPlayerItem = nil;
550     }
551     if (m_avPlayer) {
552         if (m_timeObserver)
553             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
554         m_timeObserver = nil;
555         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
556 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
557         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
558         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"outputDevicePickerContext"];
559 #endif
560         m_avPlayer = nil;
561     }
562
563     // Reset cached properties
564     m_pendingStatusChanges = 0;
565     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
566     m_cachedSeekableRanges = nullptr;
567     m_cachedLoadedRanges = nullptr;
568     m_cachedHasEnabledAudio = false;
569     m_cachedPresentationSize = FloatSize();
570     m_cachedDuration = MediaTime::zeroTime();
571
572     for (AVPlayerItemTrack *track in m_cachedTracks.get())
573         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
574     m_cachedTracks = nullptr;
575
576 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
577     if (m_provider)
578         m_provider->setPlayerItem(nullptr);
579 #endif
580
581     setIgnoreLoadStateChanges(false);
582 }
583
584 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
585 {
586     return m_haveBeenAskedToCreateLayer;
587 }
588
589 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
590 {
591 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
592     if (m_videoOutput)
593         return true;
594 #endif
595     return m_imageGenerator;
596 }
597
598 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
599 {
600 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
601     createVideoOutput();
602 #else
603     createImageGenerator();
604 #endif
605 }
606
607 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
608 {
609     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
610
611     if (!m_avAsset || m_imageGenerator)
612         return;
613
614     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
615
616     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
617     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
618     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
619     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
620
621     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
622 }
623
624 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
625 {
626 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
627     destroyVideoOutput();
628 #endif
629     destroyImageGenerator();
630 }
631
632 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
633 {
634     if (!m_imageGenerator)
635         return;
636
637     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
638
639     m_imageGenerator = 0;
640 }
641
642 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
643 {
644     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
645         return;
646
647     auto weakThis = createWeakPtr();
648     callOnMainThread([this, weakThis] {
649         if (!weakThis)
650             return;
651
652         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
653             return;
654         m_haveBeenAskedToCreateLayer = true;
655
656         if (!m_videoLayer)
657             createAVPlayerLayer();
658
659 #if USE(VIDEOTOOLBOX)
660         if (!m_videoOutput)
661             createVideoOutput();
662 #endif
663
664         player()->client().mediaPlayerRenderingModeChanged(player());
665     });
666 }
667
668 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
669 {
670     if (!m_avPlayer)
671         return;
672
673     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
674     [m_videoLayer setPlayer:m_avPlayer.get()];
675     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
676 #ifndef NDEBUG
677     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
678 #endif
679     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
680     updateVideoLayerGravity();
681     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
682     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
683     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
684
685 #if PLATFORM(IOS)
686     [m_videoLayer web_disableAllActions];
687     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
688 #ifndef NDEBUG
689     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
690 #endif
691     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
692     if (m_videoFullscreenLayer) {
693         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
694         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
695     } else {
696         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
697         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
698     }
699 #else
700     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
701 #endif
702 }
703
704 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
705 {
706     if (!m_videoLayer)
707         return;
708
709     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
710
711     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
712     [m_videoLayer.get() setPlayer:nil];
713
714 #if PLATFORM(IOS)
715     if (m_videoFullscreenLayer)
716         [m_videoLayer removeFromSuperlayer];
717     m_videoInlineLayer = nil;
718 #endif
719
720     m_videoLayer = nil;
721 }
722
723 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
724 {
725     if (currentRenderingMode() == MediaRenderingToLayer)
726         return m_cachedIsReadyForDisplay;
727
728     return m_videoFrameHasDrawn;
729 }
730
731 #if ENABLE(AVF_CAPTIONS)
732 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
733 {
734     // FIXME: Match these to correct types:
735     if (kind == PlatformTextTrack::Caption)
736         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
737
738     if (kind == PlatformTextTrack::Subtitle)
739         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
740
741     if (kind == PlatformTextTrack::Description)
742         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
743
744     if (kind == PlatformTextTrack::Forced)
745         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
746
747     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
748 }
749     
750 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
751 {
752     trackModeChanged();
753 }
754     
755 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
756 {
757     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
758     
759     for (auto& textTrack : m_textTracks) {
760         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
761             continue;
762         
763         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
764         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
765         
766         for (auto& track : outOfBandTrackSources) {
767             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
768             
769             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
770                 continue;
771             
772             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
773             if (track->mode() == PlatformTextTrack::Hidden)
774                 mode = InbandTextTrackPrivate::Hidden;
775             else if (track->mode() == PlatformTextTrack::Disabled)
776                 mode = InbandTextTrackPrivate::Disabled;
777             else if (track->mode() == PlatformTextTrack::Showing)
778                 mode = InbandTextTrackPrivate::Showing;
779             
780             textTrack->setMode(mode);
781             break;
782         }
783     }
784 }
785 #endif
786
787
788 static NSURL *canonicalURL(const String& url)
789 {
790     NSURL *cocoaURL = URL(ParsedURLString, url);
791     if (url.isEmpty())
792         return cocoaURL;
793
794     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
795     if (!request)
796         return cocoaURL;
797
798     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
799     if (!canonicalRequest)
800         return cocoaURL;
801
802     return [canonicalRequest URL];
803 }
804
805 #if PLATFORM(IOS)
806 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
807 {
808     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
809     [properties setDictionary:@{
810         NSHTTPCookieName: cookie.name,
811         NSHTTPCookieValue: cookie.value,
812         NSHTTPCookieDomain: cookie.domain,
813         NSHTTPCookiePath: cookie.path,
814         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
815     }];
816     if (cookie.secure)
817         [properties setObject:@YES forKey:NSHTTPCookieSecure];
818     if (cookie.session)
819         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
820
821     return [NSHTTPCookie cookieWithProperties:properties.get()];
822 }
823 #endif
824
825 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
826 {
827     if (m_avAsset)
828         return;
829
830     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
831
832     setDelayCallbacks(true);
833
834     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
835
836     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
837
838     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
839
840     String referrer = player()->referrer();
841     if (!referrer.isEmpty())
842         [headerFields.get() setObject:referrer forKey:@"Referer"];
843
844     String userAgent = player()->userAgent();
845     if (!userAgent.isEmpty())
846         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
847
848     if ([headerFields.get() count])
849         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
850
851     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
852         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
853
854     String identifier = player()->sourceApplicationIdentifier();
855     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
856         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
857
858 #if ENABLE(AVF_CAPTIONS)
859     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
860     if (!outOfBandTrackSources.isEmpty()) {
861         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
862         for (auto& trackSource : outOfBandTrackSources) {
863             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
864             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
865             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
866             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
867             [outOfBandTracks.get() addObject:@{
868                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
869                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
870                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
871                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
872                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
873                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
874             }];
875         }
876
877         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
878     }
879 #endif
880
881 #if PLATFORM(IOS)
882     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
883     if (!networkInterfaceName.isEmpty())
884         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
885 #endif
886
887 #if PLATFORM(IOS)
888     Vector<Cookie> cookies;
889     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
890         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
891         for (auto& cookie : cookies)
892             [nsCookies addObject:toNSHTTPCookie(cookie)];
893
894         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
895     }
896 #endif
897
898     NSURL *cocoaURL = canonicalURL(url);
899     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
900
901 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
902     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
903 #endif
904
905     m_haveCheckedPlayability = false;
906
907     setDelayCallbacks(false);
908 }
909
910 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
911 {
912     if (!m_avPlayer)
913         return;
914
915     if (pthread_main_np()) {
916         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
917         return;
918     }
919
920     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
921     RetainPtr<AVPlayerItemType> strongItem = item;
922     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
923         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
924     });
925 }
926
927 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
928 {
929     if (m_avPlayer)
930         return;
931
932     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
933
934     setDelayCallbacks(true);
935
936     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
937     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
938 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
939     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
940     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"outputDevicePickerContext" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
941 #endif
942
943 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
944     updateDisableExternalPlayback();
945 #endif
946
947 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
948     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
949 #endif
950
951 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
952     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
953 #endif
954
955 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
956     if (m_outputDevicePickerContext)
957         m_avPlayer.get().outputDevicePickerContext = m_outputDevicePickerContext.get();
958 #endif
959
960     if (player()->client().mediaPlayerIsVideo())
961         createAVPlayerLayer();
962
963     if (m_avPlayerItem)
964         setAVPlayerItem(m_avPlayerItem.get());
965
966     setDelayCallbacks(false);
967 }
968
969 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
970 {
971     if (m_avPlayerItem)
972         return;
973
974     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
975
976     setDelayCallbacks(true);
977
978     // Create the player item so we can load media data. 
979     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
980
981     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
982
983     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
984     for (NSString *keyName in itemKVOProperties())
985         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
986
987     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
988
989     if (m_avPlayer)
990         setAVPlayerItem(m_avPlayerItem.get());
991
992 #if PLATFORM(IOS)
993     AtomicString value;
994     if (player()->doesHaveAttribute("data-youtube-id", &value))
995         [m_avPlayerItem.get() setDataYouTubeID: value];
996 #endif
997
998 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
999     const NSTimeInterval legibleOutputAdvanceInterval = 2;
1000
1001     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
1002     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
1003     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
1004
1005     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
1006     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1007     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1008     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1009 #endif
1010
1011 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1012     if (m_provider)
1013         m_provider->setPlayerItem(m_avPlayerItem.get());
1014 #endif
1015
1016     setDelayCallbacks(false);
1017 }
1018
1019 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1020 {
1021     if (m_haveCheckedPlayability)
1022         return;
1023     m_haveCheckedPlayability = true;
1024
1025     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1026     auto weakThis = createWeakPtr();
1027
1028     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1029         callOnMainThread([weakThis] {
1030             if (weakThis)
1031                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1032         });
1033     }];
1034 }
1035
1036 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1037 {
1038     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1039
1040     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1041     dispatch_group_enter(metadataLoadingGroup);
1042     auto weakThis = createWeakPtr();
1043     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1044
1045         callOnMainThread([weakThis, metadataLoadingGroup] {
1046             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1047                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1048                     dispatch_group_enter(metadataLoadingGroup);
1049                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1050                         dispatch_group_leave(metadataLoadingGroup);
1051                     }];
1052                 }
1053             }
1054             dispatch_group_leave(metadataLoadingGroup);
1055         });
1056     }];
1057
1058     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1059         callOnMainThread([weakThis] {
1060             if (weakThis)
1061                 [weakThis->m_objcObserver.get() metadataLoaded];
1062         });
1063
1064         dispatch_release(metadataLoadingGroup);
1065     });
1066 }
1067
1068 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1069 {
1070     if (!m_avPlayerItem)
1071         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1072
1073     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1074         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1075     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1076         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1077     if (m_cachedLikelyToKeepUp)
1078         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1079     if (m_cachedBufferFull)
1080         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1081     if (m_cachedBufferEmpty)
1082         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1083
1084     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1085 }
1086
1087 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1088 {
1089     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1090     PlatformMedia pm;
1091     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1092     pm.media.avfMediaPlayer = m_avPlayer.get();
1093     return pm;
1094 }
1095
1096 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1097 {
1098 #if PLATFORM(IOS)
1099     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1100 #else
1101     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1102 #endif
1103 }
1104
1105 #if PLATFORM(IOS)
1106 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1107 {
1108     if (m_videoFullscreenLayer == videoFullscreenLayer)
1109         return;
1110
1111     m_videoFullscreenLayer = videoFullscreenLayer;
1112
1113     [CATransaction begin];
1114     [CATransaction setDisableActions:YES];
1115     
1116     CALayer *oldRootLayer = videoFullscreenLayer;
1117     while (oldRootLayer.superlayer)
1118         oldRootLayer = oldRootLayer.superlayer;
1119
1120     CALayer *newRootLayer = nil;
1121     
1122     if (m_videoFullscreenLayer && m_videoLayer) {
1123         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1124         [m_videoLayer removeFromSuperlayer];
1125         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1126         newRootLayer = m_videoFullscreenLayer.get();
1127     } else if (m_videoInlineLayer && m_videoLayer) {
1128         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1129         [m_videoLayer removeFromSuperlayer];
1130         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1131         newRootLayer = m_videoInlineLayer.get();
1132     } else if (m_videoLayer)
1133         [m_videoLayer removeFromSuperlayer];
1134
1135     while (newRootLayer.superlayer)
1136         newRootLayer = newRootLayer.superlayer;
1137
1138     if (oldRootLayer && newRootLayer && oldRootLayer != newRootLayer) {
1139         mach_port_t fencePort = 0;
1140         for (CAContext *context in [CAContext allContexts]) {
1141             if (context.layer == oldRootLayer || context.layer == newRootLayer) {
1142                 if (!fencePort)
1143                     fencePort = [context createFencePort];
1144                 else
1145                     [context setFencePort:fencePort];
1146             }
1147         }
1148         mach_port_deallocate(mach_task_self(), fencePort);
1149     }
1150     [CATransaction commit];
1151
1152     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1153         syncTextTrackBounds();
1154         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1155     }
1156
1157     updateDisableExternalPlayback();
1158 }
1159
1160 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1161 {
1162     m_videoFullscreenFrame = frame;
1163     if (!m_videoFullscreenLayer)
1164         return;
1165
1166     if (m_videoLayer) {
1167         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1168         [CATransaction begin];
1169         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1170         [CATransaction commit];
1171         [m_videoLayer web_disableAllActions];
1172     }
1173     syncTextTrackBounds();
1174 }
1175
1176 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1177 {
1178     m_videoFullscreenGravity = gravity;
1179     if (!m_videoLayer)
1180         return;
1181
1182     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1183     if (gravity == MediaPlayer::VideoGravityResize)
1184         videoGravity = AVLayerVideoGravityResize;
1185     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1186         videoGravity = AVLayerVideoGravityResizeAspect;
1187     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1188         videoGravity = AVLayerVideoGravityResizeAspectFill;
1189     else
1190         ASSERT_NOT_REACHED();
1191
1192     [m_videoLayer setVideoGravity:videoGravity];
1193 }
1194
1195 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1196 {
1197     if (m_currentMetaData)
1198         return m_currentMetaData.get();
1199     return nil;
1200 }
1201
1202 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1203 {
1204     if (!m_avPlayerItem)
1205         return emptyString();
1206     
1207     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1208     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1209
1210     return logString.get();
1211 }
1212
1213 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1214 {
1215     if (!m_avPlayerItem)
1216         return emptyString();
1217
1218     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1219     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1220
1221     return logString.get();
1222 }
1223 #endif
1224
1225 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1226 {
1227     [CATransaction begin];
1228     [CATransaction setDisableActions:YES];    
1229     if (m_videoLayer)
1230         [m_videoLayer.get() setHidden:!isVisible];
1231     [CATransaction commit];
1232 }
1233     
1234 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1235 {
1236     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1237     if (!metaDataAvailable())
1238         return;
1239
1240     setDelayCallbacks(true);
1241     m_cachedRate = requestedRate();
1242     [m_avPlayer.get() setRate:requestedRate()];
1243     setDelayCallbacks(false);
1244 }
1245
1246 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1247 {
1248     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1249     if (!metaDataAvailable())
1250         return;
1251
1252     setDelayCallbacks(true);
1253     m_cachedRate = 0;
1254     [m_avPlayer.get() setRate:0];
1255     setDelayCallbacks(false);
1256 }
1257
1258 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1259 {
1260     // Do not ask the asset for duration before it has been loaded or it will fetch the
1261     // answer synchronously.
1262     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1263         return MediaTime::invalidTime();
1264     
1265     CMTime cmDuration;
1266     
1267     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1268     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1269         cmDuration = [m_avPlayerItem.get() duration];
1270     else
1271         cmDuration = [m_avAsset.get() duration];
1272
1273     if (CMTIME_IS_NUMERIC(cmDuration))
1274         return toMediaTime(cmDuration);
1275
1276     if (CMTIME_IS_INDEFINITE(cmDuration))
1277         return MediaTime::positiveInfiniteTime();
1278
1279     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1280     return MediaTime::invalidTime();
1281 }
1282
1283 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1284 {
1285     if (!metaDataAvailable() || !m_avPlayerItem)
1286         return MediaTime::zeroTime();
1287
1288     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1289     if (CMTIME_IS_NUMERIC(itemTime))
1290         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1291
1292     return MediaTime::zeroTime();
1293 }
1294
1295 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1296 {
1297     // setCurrentTime generates several event callbacks, update afterwards.
1298     setDelayCallbacks(true);
1299
1300     if (m_metadataTrack)
1301         m_metadataTrack->flushPartialCues();
1302
1303     CMTime cmTime = toCMTime(time);
1304     CMTime cmBefore = toCMTime(negativeTolerance);
1305     CMTime cmAfter = toCMTime(positiveTolerance);
1306
1307     auto weakThis = createWeakPtr();
1308
1309     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1310         callOnMainThread([weakThis, finished] {
1311             auto _this = weakThis.get();
1312             if (!_this)
1313                 return;
1314
1315             _this->seekCompleted(finished);
1316         });
1317     }];
1318
1319     setDelayCallbacks(false);
1320 }
1321
1322 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1323 {
1324 #if PLATFORM(IOS)
1325     UNUSED_PARAM(volume);
1326     return;
1327 #else
1328     if (!metaDataAvailable())
1329         return;
1330
1331     [m_avPlayer.get() setVolume:volume];
1332 #endif
1333 }
1334
1335 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1336 {
1337     UNUSED_PARAM(closedCaptionsVisible);
1338
1339     if (!metaDataAvailable())
1340         return;
1341
1342     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1343 }
1344
1345 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1346 {
1347     setDelayCallbacks(true);
1348     m_cachedRate = rate;
1349     [m_avPlayer.get() setRate:rate];
1350     setDelayCallbacks(false);
1351 }
1352
1353 double MediaPlayerPrivateAVFoundationObjC::rate() const
1354 {
1355     if (!metaDataAvailable())
1356         return 0;
1357
1358     return m_cachedRate;
1359 }
1360
1361 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1362 {
1363     if (m_avPlayerItem)
1364         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1365 }
1366
1367 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1368 {
1369     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1370
1371     if (!m_avPlayerItem)
1372         return timeRanges;
1373
1374     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1375         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1376         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1377             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1378     }
1379     return timeRanges;
1380 }
1381
1382 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1383 {
1384     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1385         return MediaTime::zeroTime();
1386
1387     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1388     bool hasValidRange = false;
1389     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1390         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1391         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1392             continue;
1393
1394         hasValidRange = true;
1395         MediaTime startOfRange = toMediaTime(timeRange.start);
1396         if (minTimeSeekable > startOfRange)
1397             minTimeSeekable = startOfRange;
1398     }
1399     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1400 }
1401
1402 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1403 {
1404     if (!m_cachedSeekableRanges)
1405         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1406
1407     MediaTime maxTimeSeekable;
1408     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1409         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1410         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1411             continue;
1412         
1413         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1414         if (maxTimeSeekable < endOfRange)
1415             maxTimeSeekable = endOfRange;
1416     }
1417     return maxTimeSeekable;
1418 }
1419
1420 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1421 {
1422     if (!m_cachedLoadedRanges)
1423         return MediaTime::zeroTime();
1424
1425     MediaTime maxTimeLoaded;
1426     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1427         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1428         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1429             continue;
1430         
1431         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1432         if (maxTimeLoaded < endOfRange)
1433             maxTimeLoaded = endOfRange;
1434     }
1435
1436     return maxTimeLoaded;   
1437 }
1438
1439 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1440 {
1441     if (!metaDataAvailable())
1442         return 0;
1443
1444     if (m_cachedTotalBytes)
1445         return m_cachedTotalBytes;
1446
1447     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1448         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1449
1450     return m_cachedTotalBytes;
1451 }
1452
1453 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1454 {
1455     m_avAsset = asset;
1456 }
1457
1458 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1459 {
1460     if (!m_avAsset)
1461         return MediaPlayerAVAssetStatusDoesNotExist;
1462
1463     for (NSString *keyName in assetMetadataKeyNames()) {
1464         NSError *error = nil;
1465         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1466 #if !LOG_DISABLED
1467         if (error)
1468             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1469 #endif
1470
1471         if (keyStatus < AVKeyValueStatusLoaded)
1472             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1473         
1474         if (keyStatus == AVKeyValueStatusFailed)
1475             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1476
1477         if (keyStatus == AVKeyValueStatusCancelled)
1478             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1479     }
1480
1481     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1482         return MediaPlayerAVAssetStatusPlayable;
1483
1484     return MediaPlayerAVAssetStatusLoaded;
1485 }
1486
1487 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1488 {
1489     if (!m_avAsset)
1490         return 0;
1491
1492     NSError *error = nil;
1493     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1494     return [error code];
1495 }
1496
1497 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
1498 {
1499     if (!metaDataAvailable() || context->paintingDisabled())
1500         return;
1501
1502     setDelayCallbacks(true);
1503     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1504
1505 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1506     if (videoOutputHasAvailableFrame())
1507         paintWithVideoOutput(context, rect);
1508     else
1509 #endif
1510         paintWithImageGenerator(context, rect);
1511
1512     END_BLOCK_OBJC_EXCEPTIONS;
1513     setDelayCallbacks(false);
1514
1515     m_videoFrameHasDrawn = true;
1516 }
1517
1518 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const FloatRect& rect)
1519 {
1520     if (!metaDataAvailable() || context->paintingDisabled())
1521         return;
1522
1523     // We can ignore the request if we are already rendering to a layer.
1524     if (currentRenderingMode() == MediaRenderingToLayer)
1525         return;
1526
1527     // paint() is best effort, so only paint if we already have an image generator or video output available.
1528     if (!hasContextRenderer())
1529         return;
1530
1531     paintCurrentFrameInContext(context, rect);
1532 }
1533
1534 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const FloatRect& rect)
1535 {
1536     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1537
1538     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1539     if (image) {
1540         GraphicsContextStateSaver stateSaver(*context);
1541         context->translate(rect.x(), rect.y() + rect.height());
1542         context->scale(FloatSize(1.0f, -1.0f));
1543         context->setImageInterpolationQuality(InterpolationLow);
1544         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1545         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1546         image = 0;
1547     }
1548 }
1549
1550 static HashSet<String> mimeTypeCache()
1551 {
1552     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1553     static bool typeListInitialized = false;
1554
1555     if (typeListInitialized)
1556         return cache;
1557     typeListInitialized = true;
1558
1559     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1560     for (NSString *mimeType in types)
1561         cache.add([mimeType lowercaseString]);
1562
1563     return cache;
1564
1565
1566 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1567 {
1568     if (!m_imageGenerator)
1569         createImageGenerator();
1570     ASSERT(m_imageGenerator);
1571
1572 #if !LOG_DISABLED
1573     double start = monotonicallyIncreasingTime();
1574 #endif
1575
1576     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1577     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1578     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1579
1580 #if !LOG_DISABLED
1581     double duration = monotonicallyIncreasingTime() - start;
1582     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1583 #endif
1584
1585     return image;
1586 }
1587
1588 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1589 {
1590     supportedTypes = mimeTypeCache();
1591
1592
1593 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1594 static bool keySystemIsSupported(const String& keySystem)
1595 {
1596     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0") || equalIgnoringCase(keySystem, "org.w3c.clearkey"))
1597         return true;
1598     return false;
1599 }
1600 #endif
1601
1602 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1603 {
1604 #if ENABLE(ENCRYPTED_MEDIA)
1605     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1606     // In addition to the steps in the current specification, this method must run the following steps:
1607
1608     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1609     //    If keySystem is null, continue to the next step.
1610     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1611         // "Clear Key" is only supported with HLS:
1612         if (equalIgnoringCase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringCase(parameters.type, "application/x-mpegurl"))
1613             return MediaPlayer::IsNotSupported;
1614
1615         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1616         if (!keySystemIsSupported(parameters.keySystem))
1617             return MediaPlayer::IsNotSupported;
1618
1619         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1620         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1621     }
1622
1623     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1624 #endif
1625
1626 #if ENABLE(MEDIA_SOURCE)
1627     if (parameters.isMediaSource)
1628         return MediaPlayer::IsNotSupported;
1629 #endif
1630
1631     if (!mimeTypeCache().contains(parameters.type))
1632         return MediaPlayer::IsNotSupported;
1633
1634     // The spec says:
1635     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1636     if (parameters.codecs.isEmpty())
1637         return MediaPlayer::MayBeSupported;
1638
1639     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1640     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1641 }
1642
1643 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1644 {
1645 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1646     if (!keySystem.isEmpty()) {
1647         // "Clear Key" is only supported with HLS:
1648         if (equalIgnoringCase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringCase(mimeType, "application/x-mpegurl"))
1649             return MediaPlayer::IsNotSupported;
1650
1651         if (!keySystemIsSupported(keySystem))
1652             return false;
1653
1654         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1655             return false;
1656
1657         return true;
1658     }
1659 #else
1660     UNUSED_PARAM(keySystem);
1661     UNUSED_PARAM(mimeType);
1662 #endif
1663     return false;
1664 }
1665
1666 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1667 #if ENABLE(ENCRYPTED_MEDIA_V2)
1668 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1669 {
1670     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1671         [infoRequest setContentLength:keyData->byteLength()];
1672         [infoRequest setByteRangeAccessSupported:YES];
1673     }
1674
1675     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1676         long long start = [dataRequest currentOffset];
1677         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1678
1679         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1680             [request finishLoadingWithError:nil];
1681             return;
1682         }
1683
1684         ASSERT(start <= std::numeric_limits<int>::max());
1685         ASSERT(end <= std::numeric_limits<int>::max());
1686         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1687         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1688         [dataRequest respondWithData:nsData.get()];
1689     }
1690
1691     [request finishLoading];
1692 }
1693 #endif
1694
1695 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1696 {
1697     String scheme = [[[avRequest request] URL] scheme];
1698     String keyURI = [[[avRequest request] URL] absoluteString];
1699
1700 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1701     if (scheme == "skd") {
1702         // Create an initData with the following layout:
1703         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1704         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1705         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1706         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1707         initDataView->set<uint32_t>(0, keyURISize, true);
1708
1709         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1710         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1711
1712 #if ENABLE(ENCRYPTED_MEDIA)
1713         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1714 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1715         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1716         if (!player()->keyNeeded(initData.get()))
1717 #endif
1718             return false;
1719
1720         m_keyURIToRequestMap.set(keyURI, avRequest);
1721         return true;
1722 #if ENABLE(ENCRYPTED_MEDIA_V2)
1723     } else if (scheme == "clearkey") {
1724         String keyID = [[[avRequest request] URL] resourceSpecifier];
1725         StringView keyIDView(keyID);
1726         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1727
1728         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1729         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1730
1731         auto keyData = player()->cachedKeyForKeyId(keyID);
1732         if (keyData) {
1733             fulfillRequestWithKeyData(avRequest, keyData.get());
1734             return false;
1735         }
1736
1737         if (!player()->keyNeeded(initData.get()))
1738             return false;
1739
1740         m_keyURIToRequestMap.set(keyID, avRequest);
1741         return true;
1742 #endif
1743     }
1744 #endif
1745
1746     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1747     m_resourceLoaderMap.add(avRequest, resourceLoader);
1748     resourceLoader->startLoading();
1749     return true;
1750 }
1751
1752 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1753 {
1754 #if USE(CFNETWORK)
1755     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1756     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1757     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1758 #else
1759     AuthenticationChallenge challenge(nsChallenge);
1760 #endif
1761
1762     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1763 }
1764
1765 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1766 {
1767     String scheme = [[[avRequest request] URL] scheme];
1768
1769     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1770
1771     if (resourceLoader)
1772         resourceLoader->stopLoading();
1773 }
1774
1775 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1776 {
1777     m_resourceLoaderMap.remove(avRequest);
1778 }
1779 #endif
1780
1781 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1782 {
1783     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1784 }
1785
1786 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1787 {
1788     if (!metaDataAvailable())
1789         return timeValue;
1790
1791     // FIXME - impossible to implement until rdar://8721510 is fixed.
1792     return timeValue;
1793 }
1794
1795 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1796 {
1797 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1798     return 0;
1799 #else
1800     return 5;
1801 #endif
1802 }
1803
1804 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1805 {
1806     if (!m_videoLayer)
1807         return;
1808
1809 #if PLATFORM(IOS)
1810     // Do not attempt to change the video gravity while in full screen mode.
1811     // See setVideoFullscreenGravity().
1812     if (m_videoFullscreenLayer)
1813         return;
1814 #endif
1815
1816     [CATransaction begin];
1817     [CATransaction setDisableActions:YES];    
1818     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1819     [m_videoLayer.get() setVideoGravity:gravity];
1820     [CATransaction commit];
1821 }
1822
1823 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1824 {
1825     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1826         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1827     }];
1828     if (index == NSNotFound)
1829         return nil;
1830     return [tracks objectAtIndex:index];
1831 }
1832
1833 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1834 {
1835     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1836     m_languageOfPrimaryAudioTrack = String();
1837
1838     if (!m_avAsset)
1839         return;
1840
1841     setDelayCharacteristicsChangedNotification(true);
1842
1843     bool haveCCTrack = false;
1844     bool hasCaptions = false;
1845
1846     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1847     // asked about those fairly fequently.
1848     if (!m_avPlayerItem) {
1849         // We don't have a player item yet, so check with the asset because some assets support inspection
1850         // prior to becoming ready to play.
1851         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1852         setHasVideo(firstEnabledVideoTrack);
1853         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1854 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1855         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1856 #endif
1857
1858         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1859     } else {
1860         bool hasVideo = false;
1861         bool hasAudio = false;
1862         bool hasMetaData = false;
1863         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1864             if ([track isEnabled]) {
1865                 AVAssetTrack *assetTrack = [track assetTrack];
1866                 NSString *mediaType = [assetTrack mediaType];
1867                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1868                     hasVideo = true;
1869                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1870                     hasAudio = true;
1871                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1872 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1873                     hasCaptions = true;
1874 #endif
1875                     haveCCTrack = true;
1876                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1877                     hasMetaData = true;
1878                 }
1879             }
1880         }
1881
1882 #if ENABLE(VIDEO_TRACK)
1883         updateAudioTracks();
1884         updateVideoTracks();
1885
1886 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1887         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1888         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1889 #endif
1890 #endif
1891
1892         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1893         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1894         // when it is not.
1895         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1896
1897         setHasAudio(hasAudio);
1898 #if ENABLE(DATACUE_VALUE)
1899         if (hasMetaData)
1900             processMetadataTrack();
1901 #endif
1902     }
1903
1904 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1905     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1906     if (legibleGroup && m_cachedTracks) {
1907         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1908         if (hasCaptions)
1909             processMediaSelectionOptions();
1910     }
1911 #endif
1912
1913 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1914     if (!hasCaptions && haveCCTrack)
1915         processLegacyClosedCaptionsTracks();
1916 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1917     if (haveCCTrack)
1918         processLegacyClosedCaptionsTracks();
1919 #endif
1920
1921     setHasClosedCaptions(hasCaptions);
1922
1923     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1924         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1925
1926     sizeChanged();
1927
1928     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1929         characteristicsChanged();
1930
1931     setDelayCharacteristicsChangedNotification(false);
1932 }
1933
1934 #if ENABLE(VIDEO_TRACK)
1935 template <typename RefT, typename PassRefT>
1936 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1937 {
1938     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1939         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1940     }]]]);
1941     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1942
1943     for (auto& oldItem : oldItems) {
1944         if (oldItem->playerItemTrack())
1945             [oldTracks addObject:oldItem->playerItemTrack()];
1946     }
1947
1948     // Find the added & removed AVPlayerItemTracks:
1949     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1950     [removedTracks minusSet:newTracks.get()];
1951
1952     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1953     [addedTracks minusSet:oldTracks.get()];
1954
1955     typedef Vector<RefT> ItemVector;
1956     ItemVector replacementItems;
1957     ItemVector addedItems;
1958     ItemVector removedItems;
1959     for (auto& oldItem : oldItems) {
1960         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1961             removedItems.append(oldItem);
1962         else
1963             replacementItems.append(oldItem);
1964     }
1965
1966     for (AVPlayerItemTrack* track in addedTracks.get())
1967         addedItems.append(itemFactory(track));
1968
1969     replacementItems.appendVector(addedItems);
1970     oldItems.swap(replacementItems);
1971
1972     for (auto& removedItem : removedItems)
1973         (player->*removedFunction)(removedItem);
1974
1975     for (auto& addedItem : addedItems)
1976         (player->*addedFunction)(addedItem);
1977 }
1978
1979 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1980 template <typename RefT, typename PassRefT>
1981 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1982 {
1983     group->updateOptions();
1984
1985     // Only add selection options which do not have an associated persistant track.
1986     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1987     for (auto& option : group->options()) {
1988         if (!option)
1989             continue;
1990         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1991         if (!avOption)
1992             continue;
1993         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1994             newSelectionOptions.add(option);
1995     }
1996
1997     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1998     for (auto& oldItem : oldItems) {
1999         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
2000             oldSelectionOptions.add(option);
2001     }
2002
2003     // Find the added & removed AVMediaSelectionOptions:
2004     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
2005     for (auto& oldOption : oldSelectionOptions) {
2006         if (!newSelectionOptions.contains(oldOption))
2007             removedSelectionOptions.add(oldOption);
2008     }
2009
2010     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2011     for (auto& newOption : newSelectionOptions) {
2012         if (!oldSelectionOptions.contains(newOption))
2013             addedSelectionOptions.add(newOption);
2014     }
2015
2016     typedef Vector<RefT> ItemVector;
2017     ItemVector replacementItems;
2018     ItemVector addedItems;
2019     ItemVector removedItems;
2020     for (auto& oldItem : oldItems) {
2021         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2022             removedItems.append(oldItem);
2023         else
2024             replacementItems.append(oldItem);
2025     }
2026
2027     for (auto& option : addedSelectionOptions)
2028         addedItems.append(itemFactory(*option.get()));
2029
2030     replacementItems.appendVector(addedItems);
2031     oldItems.swap(replacementItems);
2032     
2033     for (auto& removedItem : removedItems)
2034         (player->*removedFunction)(removedItem);
2035     
2036     for (auto& addedItem : addedItems)
2037         (player->*addedFunction)(addedItem);
2038 }
2039 #endif
2040
2041 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2042 {
2043 #if !LOG_DISABLED
2044     size_t count = m_audioTracks.size();
2045 #endif
2046
2047     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2048
2049 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2050     if (!m_audibleGroup) {
2051         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2052             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2053     }
2054
2055     if (m_audibleGroup)
2056         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2057 #endif
2058
2059     for (auto& track : m_audioTracks)
2060         track->resetPropertiesFromTrack();
2061
2062 #if !LOG_DISABLED
2063     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2064 #endif
2065 }
2066
2067 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2068 {
2069 #if !LOG_DISABLED
2070     size_t count = m_videoTracks.size();
2071 #endif
2072
2073     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2074
2075 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2076     if (!m_visualGroup) {
2077         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2078             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2079     }
2080
2081     if (m_visualGroup)
2082         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2083 #endif
2084
2085     for (auto& track : m_audioTracks)
2086         track->resetPropertiesFromTrack();
2087
2088 #if !LOG_DISABLED
2089     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2090 #endif
2091 }
2092
2093 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2094 {
2095 #if PLATFORM(IOS)
2096     if (m_videoFullscreenLayer)
2097         return true;
2098 #endif
2099     return false;
2100 }
2101
2102 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2103 {
2104 #if PLATFORM(IOS)
2105     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2106         return;
2107     
2108     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2109     [m_textTrackRepresentationLayer setFrame:textFrame];
2110 #endif
2111 }
2112
2113 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2114 {
2115 #if PLATFORM(IOS)
2116     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2117     if (representationLayer == m_textTrackRepresentationLayer) {
2118         syncTextTrackBounds();
2119         return;
2120     }
2121
2122     if (m_textTrackRepresentationLayer)
2123         [m_textTrackRepresentationLayer removeFromSuperlayer];
2124
2125     m_textTrackRepresentationLayer = representationLayer;
2126
2127     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2128         syncTextTrackBounds();
2129         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2130     }
2131
2132 #else
2133     UNUSED_PARAM(representation);
2134 #endif
2135 }
2136 #endif // ENABLE(VIDEO_TRACK)
2137
2138 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2139 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2140 {
2141     if (!m_provider)
2142         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2143     return m_provider.get();
2144 }
2145 #endif
2146
2147 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2148 {
2149     if (!m_avAsset)
2150         return;
2151
2152     setNaturalSize(m_cachedPresentationSize);
2153 }
2154     
2155 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2156 {
2157     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2158         return false;
2159     
2160     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2161     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2162     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2163 }
2164
2165 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2166 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2167 {
2168     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2169
2170     if (!m_avPlayerItem || m_videoOutput)
2171         return;
2172
2173 #if USE(VIDEOTOOLBOX)
2174     NSDictionary* attributes = nil;
2175 #else
2176     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2177                                 nil];
2178 #endif
2179     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2180     ASSERT(m_videoOutput);
2181
2182     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2183
2184     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2185
2186     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2187 }
2188
2189 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2190 {
2191     if (!m_videoOutput)
2192         return;
2193
2194     if (m_avPlayerItem)
2195         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2196     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2197
2198     m_videoOutput = 0;
2199 }
2200
2201 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2202 {
2203     if (!m_videoOutput)
2204         createVideoOutput();
2205     ASSERT(m_videoOutput);
2206
2207 #if !LOG_DISABLED
2208     double start = monotonicallyIncreasingTime();
2209 #endif
2210
2211     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2212
2213     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2214         return 0;
2215
2216     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2217     if (!buffer)
2218         return 0;
2219
2220 #if USE(VIDEOTOOLBOX)
2221     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2222     if (!m_pixelTransferSession) {
2223         VTPixelTransferSessionRef session = 0;
2224         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2225         m_pixelTransferSession = adoptCF(session);
2226     }
2227
2228     CVPixelBufferRef outputBuffer;
2229     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2230     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2231     buffer = adoptCF(outputBuffer);
2232 #endif
2233
2234 #if !LOG_DISABLED
2235     double duration = monotonicallyIncreasingTime() - start;
2236     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2237 #endif
2238
2239     return buffer;
2240 }
2241
2242 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2243 {
2244     if (!m_avPlayerItem)
2245         return false;
2246
2247     if (m_lastImage)
2248         return true;
2249
2250     if (!m_videoOutput)
2251         createVideoOutput();
2252
2253     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2254 }
2255
2256 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2257 {
2258     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2259     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2260     return CVPixelBufferGetBaseAddress(pixelBuffer);
2261 }
2262
2263 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2264 {
2265     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2266     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2267 }
2268
2269 static void CVPixelBufferReleaseInfoCallback(void* info)
2270 {
2271     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2272     CFRelease(pixelBuffer);
2273 }
2274
2275 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2276 {
2277     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2278     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2279
2280     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2281     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2282     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2283     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2284     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2285
2286     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2287     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2288     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2289
2290     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2291 }
2292
2293 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2294 {
2295     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2296
2297     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2298     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2299     // should be displayed.
2300     if (pixelBuffer)
2301         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2302 }
2303
2304 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const FloatRect& outputRect)
2305 {
2306     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2307         waitForVideoOutputMediaDataWillChange();
2308
2309     updateLastImage();
2310
2311     if (!m_lastImage)
2312         return;
2313
2314     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2315     if (!firstEnabledVideoTrack)
2316         return;
2317
2318     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2319
2320     GraphicsContextStateSaver stateSaver(*context);
2321     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2322     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2323     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2324
2325     context->concatCTM(videoTransform);
2326     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2327
2328     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2329     // video frame, destroy it now that it is no longer needed.
2330     if (m_imageGenerator)
2331         destroyImageGenerator();
2332
2333 }
2334
2335 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2336 {
2337     updateLastImage();
2338     return m_lastImage.get();
2339 }
2340
2341 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2342 {
2343     if (!m_videoOutputSemaphore)
2344         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2345
2346     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2347
2348     // Wait for 1 second.
2349     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2350
2351     if (result)
2352         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2353 }
2354
2355 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2356 {
2357     dispatch_semaphore_signal(m_videoOutputSemaphore);
2358 }
2359 #endif
2360
2361 #if ENABLE(ENCRYPTED_MEDIA)
2362 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2363 {
2364     if (!keySystemIsSupported(keySystem))
2365         return MediaPlayer::KeySystemNotSupported;
2366
2367     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2368     String keyURI;
2369     String keyID;
2370     RefPtr<Uint8Array> certificate;
2371     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2372         return MediaPlayer::InvalidPlayerState;
2373
2374     if (!m_keyURIToRequestMap.contains(keyURI))
2375         return MediaPlayer::InvalidPlayerState;
2376
2377     String sessionID = createCanonicalUUIDString();
2378
2379     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2380
2381     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2382     NSString* assetStr = keyID;
2383     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2384     NSError* error = 0;
2385     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2386
2387     if (!keyRequest) {
2388         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2389         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2390         return MediaPlayer::NoError;
2391     }
2392
2393     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2394     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2395     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2396
2397     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2398     m_sessionIDToRequestMap.set(sessionID, avRequest);
2399     m_keyURIToRequestMap.remove(keyURI);
2400
2401     return MediaPlayer::NoError;
2402 }
2403
2404 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2405 {
2406     if (!keySystemIsSupported(keySystem))
2407         return MediaPlayer::KeySystemNotSupported;
2408
2409     if (!m_sessionIDToRequestMap.contains(sessionID))
2410         return MediaPlayer::InvalidPlayerState;
2411
2412     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2413     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2414     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2415     [avRequest.get() finishLoading];
2416     m_sessionIDToRequestMap.remove(sessionID);
2417
2418     player()->keyAdded(keySystem, sessionID);
2419
2420     UNUSED_PARAM(initDataPtr);
2421     UNUSED_PARAM(initDataLength);
2422     return MediaPlayer::NoError;
2423 }
2424
2425 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2426 {
2427     if (!keySystemIsSupported(keySystem))
2428         return MediaPlayer::KeySystemNotSupported;
2429
2430     if (!m_sessionIDToRequestMap.contains(sessionID))
2431         return MediaPlayer::InvalidPlayerState;
2432
2433     m_sessionIDToRequestMap.remove(sessionID);
2434     return MediaPlayer::NoError;
2435 }
2436 #endif
2437
2438 #if ENABLE(ENCRYPTED_MEDIA_V2)
2439 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2440 {
2441     return m_keyURIToRequestMap.take(keyURI);
2442 }
2443
2444 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2445 {
2446     Vector<String> fulfilledKeyIds;
2447
2448     for (auto& pair : m_keyURIToRequestMap) {
2449         const String& keyId = pair.key;
2450         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2451
2452         auto keyData = player()->cachedKeyForKeyId(keyId);
2453         if (!keyData)
2454             continue;
2455
2456         fulfillRequestWithKeyData(request.get(), keyData.get());
2457         fulfilledKeyIds.append(keyId);
2458     }
2459
2460     for (auto& keyId : fulfilledKeyIds)
2461         m_keyURIToRequestMap.remove(keyId);
2462 }
2463
2464 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2465 {
2466     if (!keySystemIsSupported(keySystem))
2467         return nullptr;
2468
2469     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2470 }
2471 #endif
2472
2473 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2474 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2475 {
2476 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2477     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2478 #endif
2479
2480     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2481     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2482
2483         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2484         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2485             continue;
2486
2487         bool newCCTrack = true;
2488         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2489             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2490                 continue;
2491
2492             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2493             if (track->avPlayerItemTrack() == playerItemTrack) {
2494                 removedTextTracks.remove(i - 1);
2495                 newCCTrack = false;
2496                 break;
2497             }
2498         }
2499
2500         if (!newCCTrack)
2501             continue;
2502         
2503         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2504     }
2505
2506     processNewAndRemovedTextTracks(removedTextTracks);
2507 }
2508 #endif
2509
2510 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2511 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2512 {
2513     if (!m_avAsset)
2514         return false;
2515
2516     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2517         return false;
2518
2519     return true;
2520 }
2521
2522 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2523 {
2524     if (!hasLoadedMediaSelectionGroups())
2525         return nil;
2526
2527     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2528 }
2529
2530 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2531 {
2532     if (!hasLoadedMediaSelectionGroups())
2533         return nil;
2534
2535     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2536 }
2537
2538 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2539 {
2540     if (!hasLoadedMediaSelectionGroups())
2541         return nil;
2542
2543     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2544 }
2545
2546 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2547 {
2548     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2549     if (!legibleGroup) {
2550         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2551         return;
2552     }
2553
2554     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2555     // but set the selected legible track to nil so text tracks will not be automatically configured.
2556     if (!m_textTracks.size())
2557         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2558
2559     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2560     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2561     for (AVMediaSelectionOptionType *option in legibleOptions) {
2562         bool newTrack = true;
2563         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2564             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2565                 continue;
2566             
2567             RetainPtr<AVMediaSelectionOptionType> currentOption;
2568 #if ENABLE(AVF_CAPTIONS)
2569             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2570                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2571                 currentOption = track->mediaSelectionOption();
2572             } else
2573 #endif
2574             {
2575                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2576                 currentOption = track->mediaSelectionOption();
2577             }
2578             
2579             if ([currentOption.get() isEqual:option]) {
2580                 removedTextTracks.remove(i - 1);
2581                 newTrack = false;
2582                 break;
2583             }
2584         }
2585         if (!newTrack)
2586             continue;
2587
2588 #if ENABLE(AVF_CAPTIONS)
2589         if ([option outOfBandSource]) {
2590             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2591             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2592             continue;
2593         }
2594 #endif
2595
2596         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2597     }
2598
2599     processNewAndRemovedTextTracks(removedTextTracks);
2600 }
2601
2602 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2603 {
2604     if (m_metadataTrack)
2605         return;
2606
2607     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2608     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2609     player()->addTextTrack(m_metadataTrack);
2610 }
2611
2612 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2613 {
2614     if (!m_currentTextTrack)
2615         return;
2616
2617     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2618 }
2619
2620 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2621 {
2622     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2623
2624     if (!m_currentTextTrack)
2625         return;
2626     
2627     m_currentTextTrack->resetCueValues();
2628 }
2629 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2630
2631 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2632 {
2633     if (m_currentTextTrack == track)
2634         return;
2635
2636     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2637         
2638     m_currentTextTrack = track;
2639
2640     if (track) {
2641         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2642             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2643 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2644 #if ENABLE(AVF_CAPTIONS)
2645         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2646             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2647 #endif
2648         else
2649             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2650 #endif
2651     } else {
2652 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2653         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2654 #endif
2655         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2656     }
2657
2658 }
2659
2660 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2661 {
2662     if (!m_languageOfPrimaryAudioTrack.isNull())
2663         return m_languageOfPrimaryAudioTrack;
2664
2665     if (!m_avPlayerItem.get())
2666         return emptyString();
2667
2668 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2669     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2670     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2671     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2672     if (currentlySelectedAudibleOption) {
2673         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2674         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2675
2676         return m_languageOfPrimaryAudioTrack;
2677     }
2678 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2679
2680     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2681     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2682     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2683     if (!tracks || [tracks count] != 1) {
2684         m_languageOfPrimaryAudioTrack = emptyString();
2685         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2686         return m_languageOfPrimaryAudioTrack;
2687     }
2688
2689     AVAssetTrack *track = [tracks objectAtIndex:0];
2690     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2691
2692 #if !LOG_DISABLED
2693     if (m_languageOfPrimaryAudioTrack == emptyString())
2694         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2695     else
2696         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2697 #endif
2698
2699     return m_languageOfPrimaryAudioTrack;
2700 }
2701
2702 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2703 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2704 {
2705     if (!m_avPlayer)
2706         return false;
2707
2708     bool wirelessTarget = m_avPlayer.get().externalPlaybackActive;
2709     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2710
2711     return wirelessTarget;
2712 }
2713
2714 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2715 {
2716     if (!m_avPlayer)
2717         return MediaPlayer::TargetTypeNone;
2718
2719 #if PLATFORM(IOS)
2720     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2721     case wkExternalPlaybackTypeNone:
2722         return MediaPlayer::TargetTypeNone;
2723     case wkExternalPlaybackTypeAirPlay:
2724         return MediaPlayer::TargetTypeAirPlay;
2725     case wkExternalPlaybackTypeTVOut:
2726         return MediaPlayer::TargetTypeTVOut;
2727     }
2728
2729     ASSERT_NOT_REACHED();
2730     return MediaPlayer::TargetTypeNone;
2731
2732 #else
2733     return MediaPlayer::TargetTypeAirPlay;
2734 #endif
2735 }
2736
2737 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2738 {
2739     if (!m_avPlayer)
2740         return emptyString();
2741     
2742     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2743     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2744
2745     return wirelessTargetName;
2746 }
2747
2748 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2749 {
2750     if (!m_avPlayer)
2751         return !m_allowsWirelessVideoPlayback;
2752
2753     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2754     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2755
2756     return !m_allowsWirelessVideoPlayback;
2757 }
2758
2759 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2760 {
2761     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2762     m_allowsWirelessVideoPlayback = !disabled;
2763     if (!m_avPlayer)
2764         return;
2765
2766     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2767 }
2768
2769 #if !PLATFORM(IOS)
2770 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(const MediaPlaybackTarget& target)
2771 {
2772     m_outputDevicePickerContext = target.devicePickerContext();
2773
2774     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p", this, m_outputDevicePickerContext.get());
2775
2776     if (!m_avPlayer)
2777         return;
2778
2779     m_avPlayer.get().outputDevicePickerContext = m_outputDevicePickerContext.get();
2780 }
2781 #endif
2782
2783 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2784 {
2785     if (!m_avPlayer)
2786         return;
2787
2788 #if PLATFORM(IOS)
2789     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2790 #endif
2791 }
2792 #endif
2793
2794 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2795 {
2796     m_cachedItemStatus = status;
2797
2798     updateStates();
2799 }
2800
2801 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2802 {
2803     m_pendingStatusChanges++;
2804 }
2805
2806 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2807 {
2808     m_cachedLikelyToKeepUp = likelyToKeepUp;
2809
2810     ASSERT(m_pendingStatusChanges);
2811     if (!--m_pendingStatusChanges)
2812         updateStates();
2813 }
2814
2815 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2816 {
2817     m_pendingStatusChanges++;
2818 }
2819
2820 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2821 {
2822     m_cachedBufferEmpty = bufferEmpty;
2823
2824     ASSERT(m_pendingStatusChanges);
2825     if (!--m_pendingStatusChanges)
2826         updateStates();
2827 }
2828
2829 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2830 {
2831     m_pendingStatusChanges++;
2832 }
2833
2834 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2835 {
2836     m_cachedBufferFull = bufferFull;
2837
2838     ASSERT(m_pendingStatusChanges);
2839     if (!--m_pendingStatusChanges)
2840         updateStates();
2841 }
2842
2843 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2844 {
2845     m_cachedSeekableRanges = seekableRanges;
2846
2847     seekableTimeRangesChanged();
2848     updateStates();
2849 }
2850
2851 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2852 {
2853     m_cachedLoadedRanges = loadedRanges;
2854
2855     loadedTimeRangesChanged();
2856     updateStates();
2857 }
2858
2859 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2860 {
2861     m_cachedIsReadyForDisplay = isReady;
2862     if (!hasVideo() && isReady)
2863         tracksChanged();
2864     updateStates();
2865 }
2866
2867 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2868 {
2869     tracksChanged();
2870     updateStates();
2871 }
2872
2873 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2874 {
2875     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2876     if (m_shouldBufferData == shouldBuffer)
2877         return;
2878
2879     m_shouldBufferData = shouldBuffer;
2880     
2881     if (!m_avPlayer)
2882         return;
2883
2884     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2885 }
2886
2887 #if ENABLE(DATACUE_VALUE)
2888 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2889 {
2890     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2891     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2892     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2893     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2894     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2895
2896     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2897         return quickTimeUserData;
2898     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2899         return isoUserData;
2900     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2901         return quickTimeMetadata;
2902     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2903         return iTunesMetadata;
2904     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2905         return id3Metadata;
2906
2907     return emptyAtom;
2908 }
2909 #endif
2910
2911 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2912 {
2913     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2914
2915     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2916
2917 #if ENABLE(DATACUE_VALUE)
2918     if (seeking())
2919         return;
2920
2921     if (!m_metadataTrack)
2922         processMetadataTrack();
2923
2924     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2925         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2926         return;
2927     }
2928
2929     // Set the duration of all incomplete cues before adding new ones.
2930     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2931     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2932         MediaTime start = toMediaTime(item.time);
2933         if (start < earliestStartTime)
2934             earliestStartTime = start;
2935     }
2936     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2937
2938     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2939         MediaTime start = toMediaTime(item.time);
2940         MediaTime end = MediaTime::positiveInfiniteTime();
2941         if (CMTIME_IS_VALID(item.duration))
2942             end = start + toMediaTime(item.duration);
2943
2944         AtomicString type = nullAtom;
2945         if (item.keySpace)
2946             type = metadataType(item.keySpace);
2947
2948         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2949     }
2950 #endif
2951 }
2952
2953 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2954 {
2955     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2956         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2957
2958     NSArray *assetTracks = [m_avAsset tracks];
2959
2960     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2961         AVAssetTrack* assetTrack = [obj assetTrack];
2962
2963         if ([assetTracks containsObject:assetTrack])
2964             return YES;
2965
2966         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2967         if (!hasLoadedMediaSelectionGroups())
2968             return NO;
2969
2970         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2971             return NO;
2972
2973         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2974             return NO;
2975
2976         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2977             return NO;
2978
2979         return YES;
2980     }]];
2981
2982     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2983         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2984
2985     m_cachedTotalBytes = 0;
2986
2987     tracksChanged();
2988     updateStates();
2989 }
2990
2991 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2992 {
2993     m_cachedHasEnabledAudio = hasEnabledAudio;
2994
2995     tracksChanged();
2996     updateStates();
2997 }
2998
2999 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
3000 {
3001     m_cachedPresentationSize = size;
3002
3003     sizeChanged();
3004     updateStates();
3005 }
3006
3007 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3008 {
3009     m_cachedDuration = duration;
3010
3011     invalidateCachedDuration();
3012 }
3013
3014 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3015 {
3016     m_cachedRate = rate;
3017
3018     updateStates();
3019     rateChanged();
3020 }
3021     
3022 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3023 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3024 {
3025     playbackTargetIsWirelessChanged();
3026 }
3027 #endif
3028
3029 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3030 {
3031     m_cachedCanPlayFastForward = newValue;
3032 }
3033
3034 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3035 {
3036     m_cachedCanPlayFastReverse = newValue;
3037 }
3038
3039 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3040 {
3041     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3042         return MediaPlayerPrivateAVFoundation::resolvedURL();
3043
3044     return URL([m_avAsset resolvedURL]);
3045 }
3046
3047 NSArray* assetMetadataKeyNames()
3048 {
3049     static NSArray* keys;
3050     if (!keys) {
3051         keys = [[NSArray alloc] initWithObjects:@"duration",
3052                     @"naturalSize",
3053                     @"preferredTransform",
3054                     @"preferredVolume",
3055                     @"preferredRate",
3056                     @"playable",
3057                     @"resolvedURL",
3058                     @"tracks",
3059                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
3060                    nil];
3061     }
3062     return keys;
3063 }
3064
3065 NSArray* itemKVOProperties()
3066 {
3067     static NSArray* keys;
3068     if (!keys) {
3069         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
3070                 @"status",
3071                 @"asset",
3072                 @"tracks",
3073                 @"seekableTimeRanges",
3074                 @"loadedTimeRanges",
3075                 @"playbackLikelyToKeepUp",
3076                 @"playbackBufferFull",
3077                 @"playbackBufferEmpty",
3078                 @"duration",
3079                 @"hasEnabledAudio",
3080                 @"timedMetadata",
3081                 @"canPlayFastForward",
3082                 @"canPlayFastReverse",
3083                 nil];
3084     }
3085     return keys;
3086 }
3087
3088 NSArray* assetTrackMetadataKeyNames()
3089 {
3090     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
3091     return keys;
3092 }
3093
3094 } // namespace WebCore
3095
3096 @implementation WebCoreAVFMovieObserver
3097
3098 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3099 {
3100     self = [super init];
3101     if (!self)
3102         return nil;
3103     m_callback = callback;
3104     return self;
3105 }
3106
3107 - (void)disconnect
3108 {
3109     [NSObject cancelPreviousPerformRequestsWithTarget:self];
3110     m_callback = 0;
3111 }
3112
3113 - (void)metadataLoaded
3114 {
3115     if (!m_callback)
3116         return;
3117
3118     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
3119 }
3120
3121 - (void)didEnd:(NSNotification *)unusedNotification
3122 {
3123     UNUSED_PARAM(unusedNotification);
3124     if (!m_callback)
3125         return;
3126     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
3127 }
3128
3129 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
3130 {
3131     UNUSED_PARAM(object);
3132     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
3133
3134     if (!m_callback)
3135         return;
3136
3137     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
3138
3139 #if !LOG_DISABLED
3140     if (willChange)
3141         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - will change, keyPath = %s", self, [keyPath UTF8String]);
3142     else {
3143         RetainPtr<NSString> valueString = adoptNS([[NSString alloc] initWithFormat:@"%@", newValue]);
3144         LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - did change, keyPath = %s, value = %s", self, [keyPath UTF8String], [valueString.get() UTF8String]);
3145     }
3146 #endif
3147
3148     WTF::Function<void ()> function;
3149
3150     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
3151         if ([keyPath isEqualToString:@"readyForDisplay"])
3152             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
3153     }
3154
3155     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
3156         if ([keyPath isEqualToString:@"enabled"])
3157             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
3158     }
3159
3160     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
3161         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3162             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
3163         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3164             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
3165         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3166             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
3167     }
3168
3169     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
3170         // A value changed for an AVPlayerItem
3171         if ([keyPath isEqualToString:@"status"])
3172             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
3173         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
3174             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
3175         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
3176             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
3177         else if ([keyPath isEqualToString:@"playbackBufferFull"])
3178             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
3179         else if ([keyPath isEqualToString:@"asset"])
3180             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
3181         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
3182             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3183         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
3184             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
3185         else if ([keyPath isEqualToString:@"tracks"])
3186             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
3187         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
3188             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
3189         else if ([keyPath isEqualToString:@"presentationSize"])
3190             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
3191         else if ([keyPath isEqualToString:@"duration"])
3192             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
3193         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3194             MediaTime now;
3195             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3196             if (CMTIME_IS_NUMERIC(itemTime))
3197                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3198             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3199         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3200             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3201         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3202             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3203     }
3204
3205     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3206         // A value changed for an AVPlayer.
3207         if ([keyPath isEqualToString:@"rate"])
3208             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3209 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3210         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
3211             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3212         else if ([keyPath isEqualToString:@"outputDevicePickerContext"])
3213             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3214 #endif
3215     }
3216     
3217     if (function.isNull())
3218         return;
3219
3220     auto weakThis = m_callback->createWeakPtr();
3221     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3222         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3223         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3224         if (!weakThis)
3225             return;
3226         function();
3227     }));
3228 }
3229
3230 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3231 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3232 {
3233     UNUSED_PARAM(output);
3234     UNUSED_PARAM(nativeSamples);
3235
3236     if (!m_callback)
3237         return;
3238
3239     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3240     RetainPtr<NSArray> strongStrings = strings;
3241     RetainPtr<NSArray> strongSamples = nativeSamples;
3242     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3243         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3244         if (!callback)
3245             return;
3246         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3247     });
3248 }
3249
3250 - (void)outputSequenceWasFlushed:(id)output
3251 {
3252     UNUSED_PARAM(output);
3253
3254     if (!m_callback)
3255         return;
3256     
3257     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3258     callOnMainThread([strongSelf] {
3259         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3260             callback->flushCues();
3261     });
3262 }
3263 #endif
3264
3265 @end
3266
3267 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3268 @implementation WebCoreAVFLoaderDelegate
3269
3270 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3271 {
3272     self = [super init];
3273     if (!self)
3274         return nil;
3275     m_callback = callback;
3276     return self;
3277 }
3278
3279 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3280 {
3281     UNUSED_PARAM(resourceLoader);
3282     if (!m_callback)
3283         return NO;
3284
3285     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3286     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3287     callOnMainThread([strongSelf, strongRequest] {
3288         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3289         if (!callback) {
3290             [strongRequest finishLoadingWithError:nil];
3291             return;
3292         }
3293
3294         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3295             [strongRequest finishLoadingWithError:nil];
3296     });
3297
3298     return YES;
3299 }
3300
3301 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3302 {
3303     UNUSED_PARAM(resourceLoader);
3304     if (!m_callback)
3305         return NO;
3306
3307     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3308         return NO;
3309
3310     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3311     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3312     callOnMainThread([strongSelf, strongChallenge] {
3313         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3314         if (!callback) {
3315             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3316             return;
3317         }
3318
3319         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3320             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3321     });
3322
3323     return YES;
3324 }
3325
3326 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3327 {
3328     UNUSED_PARAM(resourceLoader);
3329     if (!m_callback)
3330         return;
3331
3332     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3333     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3334     callOnMainThread([strongSelf, strongRequest] {
3335         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3336         if (callback)
3337             callback->didCancelLoadingRequest(strongRequest.get());
3338     });
3339 }
3340
3341 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3342 {
3343     m_callback = callback;
3344 }
3345 @end
3346 #endif
3347
3348 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3349 @implementation WebCoreAVFPullDelegate
3350 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3351 {
3352     self = [super init];
3353     if (self)
3354         m_callback = callback;
3355     return self;
3356 }
3357
3358 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3359 {
3360     m_callback = callback;
3361 }
3362
3363 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3364 {
3365     if (m_callback)
3366         m_callback->outputMediaDataWillChange(output);
3367 }
3368
3369 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3370 {
3371     UNUSED_PARAM(output);
3372     // No-op.
3373 }
3374 @end
3375 #endif
3376
3377 #endif