[Mac] Update for output device API change
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2015 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVFoundationSPI.h"
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioSourceProviderAVFObjC.h"
34 #import "AudioTrackPrivateAVFObjC.h"
35 #import "AuthenticationChallenge.h"
36 #import "BlockExceptions.h"
37 #import "CDMSessionAVFoundationObjC.h"
38 #import "Cookie.h"
39 #import "ExceptionCodePlaceholder.h"
40 #import "FloatConversion.h"
41 #import "FloatConversion.h"
42 #import "GraphicsContext.h"
43 #import "GraphicsContextCG.h"
44 #import "InbandMetadataTextTrackPrivateAVF.h"
45 #import "InbandTextTrackPrivateAVFObjC.h"
46 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
47 #import "OutOfBandTextTrackPrivateAVF.h"
48 #import "URL.h"
49 #import "Logging.h"
50 #import "MediaPlaybackTarget.h"
51 #import "MediaSelectionGroupAVFObjC.h"
52 #import "MediaTimeAVFoundation.h"
53 #import "PlatformTimeRanges.h"
54 #import "QuartzCoreSPI.h"
55 #import "SecurityOrigin.h"
56 #import "SerializedPlatformRepresentationMac.h"
57 #import "TextEncoding.h"
58 #import "TextTrackRepresentation.h"
59 #import "UUID.h"
60 #import "VideoTrackPrivateAVFObjC.h"
61 #import "WebCoreAVFResourceLoader.h"
62 #import "WebCoreCALayerExtras.h"
63 #import "WebCoreSystemInterface.h"
64 #import <objc/runtime.h>
65 #import <runtime/DataView.h>
66 #import <runtime/JSCInlines.h>
67 #import <runtime/TypedArrayInlines.h>
68 #import <runtime/Uint16Array.h>
69 #import <runtime/Uint32Array.h>
70 #import <runtime/Uint8Array.h>
71 #import <wtf/CurrentTime.h>
72 #import <wtf/Functional.h>
73 #import <wtf/ListHashSet.h>
74 #import <wtf/NeverDestroyed.h>
75 #import <wtf/text/CString.h>
76 #import <wtf/text/StringBuilder.h>
77
78 #if ENABLE(AVF_CAPTIONS)
79 #include "TextTrack.h"
80 #endif
81
82 #import <AVFoundation/AVFoundation.h>
83 #if PLATFORM(IOS)
84 #import "WAKAppKitStubs.h"
85 #import <CoreImage/CoreImage.h>
86 #import <mach/mach_port.h>
87 #else
88 #import <Foundation/NSGeometry.h>
89 #import <QuartzCore/CoreImage.h>
90 #endif
91
92 #if USE(VIDEOTOOLBOX)
93 #import <CoreVideo/CoreVideo.h>
94 #import <VideoToolbox/VideoToolbox.h>
95 #endif
96
97 #if USE(CFNETWORK)
98 #include "CFNSURLConnectionSPI.h"
99 #endif
100
101 namespace std {
102 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
103     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
104 };
105 }
106
107 @interface WebVideoContainerLayer : CALayer
108 @end
109
110 @implementation WebVideoContainerLayer
111
112 - (void)setBounds:(CGRect)bounds
113 {
114     [super setBounds:bounds];
115     for (CALayer* layer in self.sublayers)
116         layer.frame = bounds;
117 }
118 @end
119
120 #if ENABLE(AVF_CAPTIONS)
121 // Note: This must be defined before our SOFT_LINK macros:
122 @class AVMediaSelectionOption;
123 @interface AVMediaSelectionOption (OutOfBandExtensions)
124 @property (nonatomic, readonly) NSString* outOfBandSource;
125 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
126 @end
127 #endif
128
129 #if PLATFORM(IOS)
130 @class AVPlayerItem;
131 @interface AVPlayerItem (WebKitExtensions)
132 @property (nonatomic, copy) NSString* dataYouTubeID;
133 @end
134 #endif
135
136 @interface AVURLAsset (WebKitExtensions)
137 @property (nonatomic, readonly) NSURL *resolvedURL;
138 @end
139
140 typedef AVPlayer AVPlayerType;
141 typedef AVPlayerItem AVPlayerItemType;
142 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
143 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
144 typedef AVMetadataItem AVMetadataItemType;
145 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
146 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
147
148 #pragma mark - Soft Linking
149
150 // Soft-linking headers must be included last since they #define functions, constants, etc.
151 #import "CoreMediaSoftLink.h"
152
153 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
154 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
155 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
156
157 #if USE(VIDEOTOOLBOX)
158 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
159 #endif
160
161 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
162 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
163 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
164 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
165 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
166 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
167 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
168 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
169
170 #if USE(VIDEOTOOLBOX)
171 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
172 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
173 #endif
174
175 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
176 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
177 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
178 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
179 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
180 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
181 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
182
183 SOFT_LINK_CLASS(CoreImage, CIContext)
184 SOFT_LINK_CLASS(CoreImage, CIImage)
185
186 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmSpectral, NSString*)
187 SOFT_LINK_POINTER(AVFoundation, AVAudioTimePitchAlgorithmVarispeed, NSString*)
188 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
193 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
194 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
195 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
196 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
197 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
200 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
201 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
202
203 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
204
205 #define AVPlayer getAVPlayerClass()
206 #define AVPlayerItem getAVPlayerItemClass()
207 #define AVPlayerLayer getAVPlayerLayerClass()
208 #define AVURLAsset getAVURLAssetClass()
209 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
210 #define AVMetadataItem getAVMetadataItemClass()
211
212 #define AVAudioTimePitchAlgorithmSpectral getAVAudioTimePitchAlgorithmSpectral()
213 #define AVAudioTimePitchAlgorithmVarispeed getAVAudioTimePitchAlgorithmVarispeed()
214 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
215 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
216 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
217 #define AVMediaTypeVideo getAVMediaTypeVideo()
218 #define AVMediaTypeAudio getAVMediaTypeAudio()
219 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
220 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
221 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
222 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
223 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
224 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
225 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
226 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
227 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
228 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
229
230 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
231 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
232 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
233
234 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
235 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
236 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
237
238 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
239 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
240 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
241 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
242
243 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
244 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
245 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
246 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
247 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
248 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
249 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
250 #endif
251
252 #if ENABLE(AVF_CAPTIONS)
253 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
255 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
256 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
257 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
258 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
259 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
260 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
261 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
262 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
263
264 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
265 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
266 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
267 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
268 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
269 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
270 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
271 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
272 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
273 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
274 #endif
275
276 #if ENABLE(DATACUE_VALUE)
277 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
278 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
279 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
280 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
281 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
282
283 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
284 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
285 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
286 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
287 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
288 #endif
289
290 #if PLATFORM(IOS)
291 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
292 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
293 #endif
294
295 using namespace WebCore;
296
297 enum MediaPlayerAVFoundationObservationContext {
298     MediaPlayerAVFoundationObservationContextPlayerItem,
299     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
300     MediaPlayerAVFoundationObservationContextPlayer,
301     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
302 };
303
304 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
305 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
306 #else
307 @interface WebCoreAVFMovieObserver : NSObject
308 #endif
309 {
310     MediaPlayerPrivateAVFoundationObjC* m_callback;
311     int m_delayCallbacks;
312 }
313 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
314 -(void)disconnect;
315 -(void)metadataLoaded;
316 -(void)didEnd:(NSNotification *)notification;
317 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
318 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
319 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
320 - (void)outputSequenceWasFlushed:(id)output;
321 #endif
322 @end
323
324 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
325 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
326     MediaPlayerPrivateAVFoundationObjC* m_callback;
327 }
328 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
329 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
330 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
331 @end
332 #endif
333
334 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
335 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
336     MediaPlayerPrivateAVFoundationObjC *m_callback;
337     dispatch_semaphore_t m_semaphore;
338 }
339 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
340 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
341 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
342 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
343 @end
344 #endif
345
346 namespace WebCore {
347
348 static NSArray *assetMetadataKeyNames();
349 static NSArray *itemKVOProperties();
350 static NSArray* assetTrackMetadataKeyNames();
351
352 #if !LOG_DISABLED
353 static const char *boolString(bool val)
354 {
355     return val ? "true" : "false";
356 }
357 #endif
358
359 #if ENABLE(ENCRYPTED_MEDIA_V2)
360 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
361 static PlayerToPrivateMapType& playerToPrivateMap()
362 {
363     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
364     return map;
365 };
366 #endif
367
368 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
369 static dispatch_queue_t globalLoaderDelegateQueue()
370 {
371     static dispatch_queue_t globalQueue;
372     static dispatch_once_t onceToken;
373     dispatch_once(&onceToken, ^{
374         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
375     });
376     return globalQueue;
377 }
378 #endif
379
380 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
381 static dispatch_queue_t globalPullDelegateQueue()
382 {
383     static dispatch_queue_t globalQueue;
384     static dispatch_once_t onceToken;
385     dispatch_once(&onceToken, ^{
386         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
387     });
388     return globalQueue;
389 }
390 #endif
391
392 #if USE(CFNETWORK)
393 class WebCoreNSURLAuthenticationChallengeClient : public RefCounted<WebCoreNSURLAuthenticationChallengeClient>, public AuthenticationClient {
394 public:
395     static RefPtr<WebCoreNSURLAuthenticationChallengeClient> create(NSURLAuthenticationChallenge *challenge)
396     {
397         return adoptRef(new WebCoreNSURLAuthenticationChallengeClient(challenge));
398     }
399
400     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::ref;
401     using RefCounted<WebCoreNSURLAuthenticationChallengeClient>::deref;
402
403 private:
404     WebCoreNSURLAuthenticationChallengeClient(NSURLAuthenticationChallenge *challenge)
405         : m_challenge(challenge)
406     {
407         ASSERT(m_challenge);
408     }
409
410     virtual void refAuthenticationClient() override { ref(); }
411     virtual void derefAuthenticationClient() override { deref(); }
412
413     virtual void receivedCredential(const AuthenticationChallenge&, const Credential& credential)
414     {
415         [[m_challenge sender] useCredential:credential.nsCredential() forAuthenticationChallenge:m_challenge.get()];
416     }
417
418     virtual void receivedRequestToContinueWithoutCredential(const AuthenticationChallenge&)
419     {
420         [[m_challenge sender] continueWithoutCredentialForAuthenticationChallenge:m_challenge.get()];
421     }
422
423     virtual void receivedCancellation(const AuthenticationChallenge&)
424     {
425         [[m_challenge sender] cancelAuthenticationChallenge:m_challenge.get()];
426     }
427
428     virtual void receivedRequestToPerformDefaultHandling(const AuthenticationChallenge&)
429     {
430         if ([[m_challenge sender] respondsToSelector:@selector(performDefaultHandlingForAuthenticationChallenge:)])
431             [[m_challenge sender] performDefaultHandlingForAuthenticationChallenge:m_challenge.get()];
432     }
433
434     virtual void receivedChallengeRejection(const AuthenticationChallenge&)
435     {
436         if ([[m_challenge sender] respondsToSelector:@selector(rejectProtectionSpaceAndContinueWithChallenge:)])
437             [[m_challenge sender] rejectProtectionSpaceAndContinueWithChallenge:m_challenge.get()];
438     }
439
440     RetainPtr<NSURLAuthenticationChallenge> m_challenge;
441 };
442 #endif
443
444 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
445 {
446     if (isAvailable())
447         registrar([](MediaPlayer* player) { return std::make_unique<MediaPlayerPrivateAVFoundationObjC>(player); },
448             getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
449 }
450
451 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
452     : MediaPlayerPrivateAVFoundation(player)
453     , m_weakPtrFactory(this)
454 #if PLATFORM(IOS)
455     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
456 #endif
457     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
458     , m_videoFrameHasDrawn(false)
459     , m_haveCheckedPlayability(false)
460 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
461     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
462     , m_videoOutputSemaphore(nullptr)
463 #endif
464 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
465     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
466 #endif
467     , m_currentTextTrack(0)
468     , m_cachedRate(0)
469     , m_cachedTotalBytes(0)
470     , m_pendingStatusChanges(0)
471     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
472     , m_cachedLikelyToKeepUp(false)
473     , m_cachedBufferEmpty(false)
474     , m_cachedBufferFull(false)
475     , m_cachedHasEnabledAudio(false)
476     , m_shouldBufferData(true)
477     , m_cachedIsReadyForDisplay(false)
478     , m_haveBeenAskedToCreateLayer(false)
479 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
480     , m_allowsWirelessVideoPlayback(true)
481 #endif
482 {
483 #if ENABLE(ENCRYPTED_MEDIA_V2)
484     playerToPrivateMap().set(player, this);
485 #endif
486 }
487
488 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
489 {
490 #if ENABLE(ENCRYPTED_MEDIA_V2)
491     playerToPrivateMap().remove(player());
492 #endif
493 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
494     [m_loaderDelegate.get() setCallback:0];
495     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
496
497     for (auto& pair : m_resourceLoaderMap)
498         pair.value->invalidate();
499 #endif
500 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
501     [m_videoOutputDelegate setCallback:0];
502     [m_videoOutput setDelegate:nil queue:0];
503     if (m_videoOutputSemaphore)
504         dispatch_release(m_videoOutputSemaphore);
505 #endif
506
507     if (m_videoLayer)
508         destroyVideoLayer();
509
510     cancelLoad();
511 }
512
513 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
514 {
515     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
516     tearDownVideoRendering();
517
518     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
519     [m_objcObserver.get() disconnect];
520
521     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
522     setIgnoreLoadStateChanges(true);
523     if (m_avAsset) {
524         [m_avAsset.get() cancelLoading];
525         m_avAsset = nil;
526     }
527
528     clearTextTracks();
529
530 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
531     if (m_legibleOutput) {
532         if (m_avPlayerItem)
533             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
534         m_legibleOutput = nil;
535     }
536 #endif
537
538     if (m_avPlayerItem) {
539         for (NSString *keyName in itemKVOProperties())
540             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
541         
542         m_avPlayerItem = nil;
543     }
544     if (m_avPlayer) {
545         if (m_timeObserver)
546             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
547         m_timeObserver = nil;
548         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
549 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
550         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
551         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"outputContext"];
552 #endif
553         m_avPlayer = nil;
554     }
555
556     // Reset cached properties
557     m_pendingStatusChanges = 0;
558     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
559     m_cachedSeekableRanges = nullptr;
560     m_cachedLoadedRanges = nullptr;
561     m_cachedHasEnabledAudio = false;
562     m_cachedPresentationSize = FloatSize();
563     m_cachedDuration = MediaTime::zeroTime();
564
565     for (AVPlayerItemTrack *track in m_cachedTracks.get())
566         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
567     m_cachedTracks = nullptr;
568
569 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
570     if (m_provider)
571         m_provider->setPlayerItem(nullptr);
572 #endif
573
574     setIgnoreLoadStateChanges(false);
575 }
576
577 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
578 {
579     return m_haveBeenAskedToCreateLayer;
580 }
581
582 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
583 {
584 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
585     if (m_videoOutput)
586         return true;
587 #endif
588     return m_imageGenerator;
589 }
590
591 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
592 {
593 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
594     createVideoOutput();
595 #else
596     createImageGenerator();
597 #endif
598 }
599
600 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
601 {
602     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
603
604     if (!m_avAsset || m_imageGenerator)
605         return;
606
607     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
608
609     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
610     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
611     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
612     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
613
614     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
615 }
616
617 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
618 {
619 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
620     destroyVideoOutput();
621 #endif
622     destroyImageGenerator();
623 }
624
625 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
626 {
627     if (!m_imageGenerator)
628         return;
629
630     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
631
632     m_imageGenerator = 0;
633 }
634
635 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
636 {
637     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
638         return;
639
640     auto weakThis = createWeakPtr();
641     callOnMainThread([this, weakThis] {
642         if (!weakThis)
643             return;
644
645         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
646             return;
647         m_haveBeenAskedToCreateLayer = true;
648
649         if (!m_videoLayer)
650             createAVPlayerLayer();
651
652 #if USE(VIDEOTOOLBOX)
653         if (!m_videoOutput)
654             createVideoOutput();
655 #endif
656
657         player()->client().mediaPlayerRenderingModeChanged(player());
658     });
659 }
660
661 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
662 {
663     if (!m_avPlayer)
664         return;
665
666     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
667     [m_videoLayer setPlayer:m_avPlayer.get()];
668     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
669 #ifndef NDEBUG
670     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
671 #endif
672     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
673     updateVideoLayerGravity();
674     [m_videoLayer setContentsScale:player()->client().mediaPlayerContentsScale()];
675     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
676     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
677
678 #if PLATFORM(IOS)
679     [m_videoLayer web_disableAllActions];
680     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
681 #ifndef NDEBUG
682     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
683 #endif
684     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
685     if (m_videoFullscreenLayer) {
686         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
687         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
688     } else {
689         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
690         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
691     }
692 #else
693     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
694 #endif
695 }
696
697 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
698 {
699     if (!m_videoLayer)
700         return;
701
702     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
703
704     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
705     [m_videoLayer.get() setPlayer:nil];
706
707 #if PLATFORM(IOS)
708     if (m_videoFullscreenLayer)
709         [m_videoLayer removeFromSuperlayer];
710     m_videoInlineLayer = nil;
711 #endif
712
713     m_videoLayer = nil;
714 }
715
716 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
717 {
718     if (currentRenderingMode() == MediaRenderingToLayer)
719         return m_cachedIsReadyForDisplay;
720
721     return m_videoFrameHasDrawn;
722 }
723
724 #if ENABLE(AVF_CAPTIONS)
725 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
726 {
727     // FIXME: Match these to correct types:
728     if (kind == PlatformTextTrack::Caption)
729         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
730
731     if (kind == PlatformTextTrack::Subtitle)
732         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
733
734     if (kind == PlatformTextTrack::Description)
735         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
736
737     if (kind == PlatformTextTrack::Forced)
738         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
739
740     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
741 }
742     
743 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
744 {
745     trackModeChanged();
746 }
747     
748 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
749 {
750     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
751     
752     for (auto& textTrack : m_textTracks) {
753         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
754             continue;
755         
756         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
757         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
758         
759         for (auto& track : outOfBandTrackSources) {
760             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
761             
762             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
763                 continue;
764             
765             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
766             if (track->mode() == PlatformTextTrack::Hidden)
767                 mode = InbandTextTrackPrivate::Hidden;
768             else if (track->mode() == PlatformTextTrack::Disabled)
769                 mode = InbandTextTrackPrivate::Disabled;
770             else if (track->mode() == PlatformTextTrack::Showing)
771                 mode = InbandTextTrackPrivate::Showing;
772             
773             textTrack->setMode(mode);
774             break;
775         }
776     }
777 }
778 #endif
779
780
781 static NSURL *canonicalURL(const String& url)
782 {
783     NSURL *cocoaURL = URL(ParsedURLString, url);
784     if (url.isEmpty())
785         return cocoaURL;
786
787     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
788     if (!request)
789         return cocoaURL;
790
791     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
792     if (!canonicalRequest)
793         return cocoaURL;
794
795     return [canonicalRequest URL];
796 }
797
798 #if PLATFORM(IOS)
799 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
800 {
801     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
802     [properties setDictionary:@{
803         NSHTTPCookieName: cookie.name,
804         NSHTTPCookieValue: cookie.value,
805         NSHTTPCookieDomain: cookie.domain,
806         NSHTTPCookiePath: cookie.path,
807         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
808     }];
809     if (cookie.secure)
810         [properties setObject:@YES forKey:NSHTTPCookieSecure];
811     if (cookie.session)
812         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
813
814     return [NSHTTPCookie cookieWithProperties:properties.get()];
815 }
816 #endif
817
818 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
819 {
820     if (m_avAsset)
821         return;
822
823     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
824
825     setDelayCallbacks(true);
826
827     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
828
829     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
830
831     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
832
833     String referrer = player()->referrer();
834     if (!referrer.isEmpty())
835         [headerFields.get() setObject:referrer forKey:@"Referer"];
836
837     String userAgent = player()->userAgent();
838     if (!userAgent.isEmpty())
839         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
840
841     if ([headerFields.get() count])
842         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
843
844     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
845         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
846
847     String identifier = player()->sourceApplicationIdentifier();
848     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
849         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
850
851 #if ENABLE(AVF_CAPTIONS)
852     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
853     if (!outOfBandTrackSources.isEmpty()) {
854         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
855         for (auto& trackSource : outOfBandTrackSources) {
856             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
857             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
858             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
859             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
860             [outOfBandTracks.get() addObject:@{
861                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
862                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
863                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
864                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
865                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
866                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
867             }];
868         }
869
870         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
871     }
872 #endif
873
874 #if PLATFORM(IOS)
875     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
876     if (!networkInterfaceName.isEmpty())
877         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
878 #endif
879
880 #if PLATFORM(IOS)
881     Vector<Cookie> cookies;
882     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
883         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
884         for (auto& cookie : cookies)
885             [nsCookies addObject:toNSHTTPCookie(cookie)];
886
887         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
888     }
889 #endif
890
891     NSURL *cocoaURL = canonicalURL(url);
892     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
893
894 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
895     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
896 #endif
897
898     m_haveCheckedPlayability = false;
899
900     setDelayCallbacks(false);
901 }
902
903 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
904 {
905     if (!m_avPlayer)
906         return;
907
908     if (pthread_main_np()) {
909         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
910         return;
911     }
912
913     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
914     RetainPtr<AVPlayerItemType> strongItem = item;
915     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
916         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
917     });
918 }
919
920 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
921 {
922     if (m_avPlayer)
923         return;
924
925     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
926
927     setDelayCallbacks(true);
928
929     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
930     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
931 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
932     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
933     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"outputContext" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
934 #endif
935
936 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
937     updateDisableExternalPlayback();
938 #endif
939
940 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
941     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
942 #endif
943
944 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
945     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
946 #endif
947
948 #if ENABLE(WIRELESS_PLAYBACK_TARGET) && !PLATFORM(IOS)
949     if (m_outputContext)
950         m_avPlayer.get().outputContext = m_outputContext.get();
951 #endif
952
953     if (player()->client().mediaPlayerIsVideo())
954         createAVPlayerLayer();
955
956     if (m_avPlayerItem)
957         setAVPlayerItem(m_avPlayerItem.get());
958
959     setDelayCallbacks(false);
960 }
961
962 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
963 {
964     if (m_avPlayerItem)
965         return;
966
967     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
968
969     setDelayCallbacks(true);
970
971     // Create the player item so we can load media data. 
972     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
973
974     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
975
976     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
977     for (NSString *keyName in itemKVOProperties())
978         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
979
980     [m_avPlayerItem setAudioTimePitchAlgorithm:(player()->preservesPitch() ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
981
982     if (m_avPlayer)
983         setAVPlayerItem(m_avPlayerItem.get());
984
985 #if PLATFORM(IOS)
986     AtomicString value;
987     if (player()->doesHaveAttribute("data-youtube-id", &value))
988         [m_avPlayerItem.get() setDataYouTubeID: value];
989 #endif
990
991 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
992     const NSTimeInterval legibleOutputAdvanceInterval = 2;
993
994     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
995     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
996     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
997
998     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
999     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
1000     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
1001     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
1002 #endif
1003
1004 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1005     if (m_provider)
1006         m_provider->setPlayerItem(m_avPlayerItem.get());
1007 #endif
1008
1009     setDelayCallbacks(false);
1010 }
1011
1012 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
1013 {
1014     if (m_haveCheckedPlayability)
1015         return;
1016     m_haveCheckedPlayability = true;
1017
1018     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
1019     auto weakThis = createWeakPtr();
1020
1021     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
1022         callOnMainThread([weakThis] {
1023             if (weakThis)
1024                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1025         });
1026     }];
1027 }
1028
1029 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
1030 {
1031     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
1032
1033     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
1034     dispatch_group_enter(metadataLoadingGroup);
1035     auto weakThis = createWeakPtr();
1036     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
1037
1038         callOnMainThread([weakThis, metadataLoadingGroup] {
1039             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
1040                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
1041                     dispatch_group_enter(metadataLoadingGroup);
1042                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
1043                         dispatch_group_leave(metadataLoadingGroup);
1044                     }];
1045                 }
1046             }
1047             dispatch_group_leave(metadataLoadingGroup);
1048         });
1049     }];
1050
1051     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
1052         callOnMainThread([weakThis] {
1053             if (weakThis)
1054                 [weakThis->m_objcObserver.get() metadataLoaded];
1055         });
1056
1057         dispatch_release(metadataLoadingGroup);
1058     });
1059 }
1060
1061 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1062 {
1063     if (!m_avPlayerItem)
1064         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1065
1066     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1067         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1068     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1069         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1070     if (m_cachedLikelyToKeepUp)
1071         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1072     if (m_cachedBufferFull)
1073         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1074     if (m_cachedBufferEmpty)
1075         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1076
1077     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1078 }
1079
1080 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1081 {
1082     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1083     PlatformMedia pm;
1084     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1085     pm.media.avfMediaPlayer = m_avPlayer.get();
1086     return pm;
1087 }
1088
1089 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1090 {
1091 #if PLATFORM(IOS)
1092     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1093 #else
1094     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1095 #endif
1096 }
1097
1098 #if PLATFORM(IOS)
1099 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1100 {
1101     if (m_videoFullscreenLayer == videoFullscreenLayer)
1102         return;
1103
1104     m_videoFullscreenLayer = videoFullscreenLayer;
1105
1106     [CATransaction begin];
1107     [CATransaction setDisableActions:YES];
1108     
1109     CALayer *oldRootLayer = videoFullscreenLayer;
1110     while (oldRootLayer.superlayer)
1111         oldRootLayer = oldRootLayer.superlayer;
1112
1113     CALayer *newRootLayer = nil;
1114     
1115     if (m_videoFullscreenLayer && m_videoLayer) {
1116         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1117         [m_videoLayer removeFromSuperlayer];
1118         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1119         newRootLayer = m_videoFullscreenLayer.get();
1120     } else if (m_videoInlineLayer && m_videoLayer) {
1121         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1122         [m_videoLayer removeFromSuperlayer];
1123         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1124         newRootLayer = m_videoInlineLayer.get();
1125     } else if (m_videoLayer)
1126         [m_videoLayer removeFromSuperlayer];
1127
1128     while (newRootLayer.superlayer)
1129         newRootLayer = newRootLayer.superlayer;
1130
1131     if (oldRootLayer && newRootLayer && oldRootLayer != newRootLayer) {
1132         mach_port_t fencePort = 0;
1133         for (CAContext *context in [CAContext allContexts]) {
1134             if (context.layer == oldRootLayer || context.layer == newRootLayer) {
1135                 if (!fencePort)
1136                     fencePort = [context createFencePort];
1137                 else
1138                     [context setFencePort:fencePort];
1139             }
1140         }
1141         mach_port_deallocate(mach_task_self(), fencePort);
1142     }
1143     [CATransaction commit];
1144
1145     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1146         syncTextTrackBounds();
1147         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1148     }
1149
1150     updateDisableExternalPlayback();
1151 }
1152
1153 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1154 {
1155     m_videoFullscreenFrame = frame;
1156     if (!m_videoFullscreenLayer)
1157         return;
1158
1159     if (m_videoLayer) {
1160         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1161         [CATransaction begin];
1162         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1163         [CATransaction commit];
1164         [m_videoLayer web_disableAllActions];
1165     }
1166     syncTextTrackBounds();
1167 }
1168
1169 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1170 {
1171     m_videoFullscreenGravity = gravity;
1172     if (!m_videoLayer)
1173         return;
1174
1175     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1176     if (gravity == MediaPlayer::VideoGravityResize)
1177         videoGravity = AVLayerVideoGravityResize;
1178     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1179         videoGravity = AVLayerVideoGravityResizeAspect;
1180     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1181         videoGravity = AVLayerVideoGravityResizeAspectFill;
1182     else
1183         ASSERT_NOT_REACHED();
1184
1185     [m_videoLayer setVideoGravity:videoGravity];
1186 }
1187
1188 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1189 {
1190     if (m_currentMetaData)
1191         return m_currentMetaData.get();
1192     return nil;
1193 }
1194
1195 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1196 {
1197     if (!m_avPlayerItem)
1198         return emptyString();
1199     
1200     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1201     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1202
1203     return logString.get();
1204 }
1205
1206 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1207 {
1208     if (!m_avPlayerItem)
1209         return emptyString();
1210
1211     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1212     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1213
1214     return logString.get();
1215 }
1216 #endif
1217
1218 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1219 {
1220     [CATransaction begin];
1221     [CATransaction setDisableActions:YES];    
1222     if (m_videoLayer)
1223         [m_videoLayer.get() setHidden:!isVisible];
1224     [CATransaction commit];
1225 }
1226     
1227 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1228 {
1229     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1230     if (!metaDataAvailable())
1231         return;
1232
1233     setDelayCallbacks(true);
1234     m_cachedRate = requestedRate();
1235     [m_avPlayer.get() setRate:requestedRate()];
1236     setDelayCallbacks(false);
1237 }
1238
1239 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1240 {
1241     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1242     if (!metaDataAvailable())
1243         return;
1244
1245     setDelayCallbacks(true);
1246     m_cachedRate = 0;
1247     [m_avPlayer.get() setRate:0];
1248     setDelayCallbacks(false);
1249 }
1250
1251 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1252 {
1253     // Do not ask the asset for duration before it has been loaded or it will fetch the
1254     // answer synchronously.
1255     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1256         return MediaTime::invalidTime();
1257     
1258     CMTime cmDuration;
1259     
1260     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1261     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1262         cmDuration = [m_avPlayerItem.get() duration];
1263     else
1264         cmDuration = [m_avAsset.get() duration];
1265
1266     if (CMTIME_IS_NUMERIC(cmDuration))
1267         return toMediaTime(cmDuration);
1268
1269     if (CMTIME_IS_INDEFINITE(cmDuration))
1270         return MediaTime::positiveInfiniteTime();
1271
1272     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1273     return MediaTime::invalidTime();
1274 }
1275
1276 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1277 {
1278     if (!metaDataAvailable() || !m_avPlayerItem)
1279         return MediaTime::zeroTime();
1280
1281     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1282     if (CMTIME_IS_NUMERIC(itemTime))
1283         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1284
1285     return MediaTime::zeroTime();
1286 }
1287
1288 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1289 {
1290     // setCurrentTime generates several event callbacks, update afterwards.
1291     setDelayCallbacks(true);
1292
1293     if (m_metadataTrack)
1294         m_metadataTrack->flushPartialCues();
1295
1296     CMTime cmTime = toCMTime(time);
1297     CMTime cmBefore = toCMTime(negativeTolerance);
1298     CMTime cmAfter = toCMTime(positiveTolerance);
1299
1300     auto weakThis = createWeakPtr();
1301
1302     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1303         callOnMainThread([weakThis, finished] {
1304             auto _this = weakThis.get();
1305             if (!_this)
1306                 return;
1307
1308             _this->seekCompleted(finished);
1309         });
1310     }];
1311
1312     setDelayCallbacks(false);
1313 }
1314
1315 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1316 {
1317 #if PLATFORM(IOS)
1318     UNUSED_PARAM(volume);
1319     return;
1320 #else
1321     if (!metaDataAvailable())
1322         return;
1323
1324     [m_avPlayer.get() setVolume:volume];
1325 #endif
1326 }
1327
1328 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1329 {
1330     UNUSED_PARAM(closedCaptionsVisible);
1331
1332     if (!metaDataAvailable())
1333         return;
1334
1335     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1336 }
1337
1338 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1339 {
1340     setDelayCallbacks(true);
1341     m_cachedRate = rate;
1342     [m_avPlayer.get() setRate:rate];
1343     setDelayCallbacks(false);
1344 }
1345
1346 double MediaPlayerPrivateAVFoundationObjC::rate() const
1347 {
1348     if (!metaDataAvailable())
1349         return 0;
1350
1351     return m_cachedRate;
1352 }
1353
1354 void MediaPlayerPrivateAVFoundationObjC::setPreservesPitch(bool preservesPitch)
1355 {
1356     if (m_avPlayerItem)
1357         [m_avPlayerItem setAudioTimePitchAlgorithm:(preservesPitch ? AVAudioTimePitchAlgorithmSpectral : AVAudioTimePitchAlgorithmVarispeed)];
1358 }
1359
1360 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1361 {
1362     auto timeRanges = std::make_unique<PlatformTimeRanges>();
1363
1364     if (!m_avPlayerItem)
1365         return timeRanges;
1366
1367     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1368         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1369         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1370             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1371     }
1372     return timeRanges;
1373 }
1374
1375 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1376 {
1377     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1378         return MediaTime::zeroTime();
1379
1380     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1381     bool hasValidRange = false;
1382     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1383         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1384         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1385             continue;
1386
1387         hasValidRange = true;
1388         MediaTime startOfRange = toMediaTime(timeRange.start);
1389         if (minTimeSeekable > startOfRange)
1390             minTimeSeekable = startOfRange;
1391     }
1392     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1393 }
1394
1395 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1396 {
1397     if (!m_cachedSeekableRanges)
1398         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1399
1400     MediaTime maxTimeSeekable;
1401     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1402         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1403         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1404             continue;
1405         
1406         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1407         if (maxTimeSeekable < endOfRange)
1408             maxTimeSeekable = endOfRange;
1409     }
1410     return maxTimeSeekable;
1411 }
1412
1413 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1414 {
1415     if (!m_cachedLoadedRanges)
1416         return MediaTime::zeroTime();
1417
1418     MediaTime maxTimeLoaded;
1419     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1420         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1421         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1422             continue;
1423         
1424         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1425         if (maxTimeLoaded < endOfRange)
1426             maxTimeLoaded = endOfRange;
1427     }
1428
1429     return maxTimeLoaded;   
1430 }
1431
1432 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1433 {
1434     if (!metaDataAvailable())
1435         return 0;
1436
1437     if (m_cachedTotalBytes)
1438         return m_cachedTotalBytes;
1439
1440     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1441         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1442
1443     return m_cachedTotalBytes;
1444 }
1445
1446 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1447 {
1448     m_avAsset = asset;
1449 }
1450
1451 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1452 {
1453     if (!m_avAsset)
1454         return MediaPlayerAVAssetStatusDoesNotExist;
1455
1456     for (NSString *keyName in assetMetadataKeyNames()) {
1457         NSError *error = nil;
1458         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1459 #if !LOG_DISABLED
1460         if (error)
1461             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1462 #endif
1463
1464         if (keyStatus < AVKeyValueStatusLoaded)
1465             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1466         
1467         if (keyStatus == AVKeyValueStatusFailed)
1468             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1469
1470         if (keyStatus == AVKeyValueStatusCancelled)
1471             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1472     }
1473
1474     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1475         return MediaPlayerAVAssetStatusPlayable;
1476
1477     return MediaPlayerAVAssetStatusLoaded;
1478 }
1479
1480 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1481 {
1482     if (!m_avAsset)
1483         return 0;
1484
1485     NSError *error = nil;
1486     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1487     return [error code];
1488 }
1489
1490 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const FloatRect& rect)
1491 {
1492     if (!metaDataAvailable() || context->paintingDisabled())
1493         return;
1494
1495     setDelayCallbacks(true);
1496     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1497
1498 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1499     if (videoOutputHasAvailableFrame())
1500         paintWithVideoOutput(context, rect);
1501     else
1502 #endif
1503         paintWithImageGenerator(context, rect);
1504
1505     END_BLOCK_OBJC_EXCEPTIONS;
1506     setDelayCallbacks(false);
1507
1508     m_videoFrameHasDrawn = true;
1509 }
1510
1511 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const FloatRect& rect)
1512 {
1513     if (!metaDataAvailable() || context->paintingDisabled())
1514         return;
1515
1516     // We can ignore the request if we are already rendering to a layer.
1517     if (currentRenderingMode() == MediaRenderingToLayer)
1518         return;
1519
1520     // paint() is best effort, so only paint if we already have an image generator or video output available.
1521     if (!hasContextRenderer())
1522         return;
1523
1524     paintCurrentFrameInContext(context, rect);
1525 }
1526
1527 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const FloatRect& rect)
1528 {
1529     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1530
1531     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1532     if (image) {
1533         GraphicsContextStateSaver stateSaver(*context);
1534         context->translate(rect.x(), rect.y() + rect.height());
1535         context->scale(FloatSize(1.0f, -1.0f));
1536         context->setImageInterpolationQuality(InterpolationLow);
1537         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1538         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1539         image = 0;
1540     }
1541 }
1542
1543 static HashSet<String> mimeTypeCache()
1544 {
1545     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1546     static bool typeListInitialized = false;
1547
1548     if (typeListInitialized)
1549         return cache;
1550     typeListInitialized = true;
1551
1552     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1553     for (NSString *mimeType in types)
1554         cache.add([mimeType lowercaseString]);
1555
1556     return cache;
1557
1558
1559 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const FloatRect& rect)
1560 {
1561     if (!m_imageGenerator)
1562         createImageGenerator();
1563     ASSERT(m_imageGenerator);
1564
1565 #if !LOG_DISABLED
1566     double start = monotonicallyIncreasingTime();
1567 #endif
1568
1569     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1570     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1571     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1572
1573 #if !LOG_DISABLED
1574     double duration = monotonicallyIncreasingTime() - start;
1575     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1576 #endif
1577
1578     return image;
1579 }
1580
1581 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1582 {
1583     supportedTypes = mimeTypeCache();
1584
1585
1586 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1587 static bool keySystemIsSupported(const String& keySystem)
1588 {
1589     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0") || equalIgnoringCase(keySystem, "org.w3c.clearkey"))
1590         return true;
1591     return false;
1592 }
1593 #endif
1594
1595 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1596 {
1597 #if ENABLE(ENCRYPTED_MEDIA)
1598     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1599     // In addition to the steps in the current specification, this method must run the following steps:
1600
1601     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1602     //    If keySystem is null, continue to the next step.
1603     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1604         // "Clear Key" is only supported with HLS:
1605         if (equalIgnoringCase(parameters.keySystem, "org.w3c.clearkey") && !parameters.type.isEmpty() && !equalIgnoringCase(parameters.type, "application/x-mpegurl"))
1606             return MediaPlayer::IsNotSupported;
1607
1608         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1609         if (!keySystemIsSupported(parameters.keySystem))
1610             return MediaPlayer::IsNotSupported;
1611
1612         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1613         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1614     }
1615
1616     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1617 #endif
1618
1619 #if ENABLE(MEDIA_SOURCE)
1620     if (parameters.isMediaSource)
1621         return MediaPlayer::IsNotSupported;
1622 #endif
1623
1624     if (!mimeTypeCache().contains(parameters.type))
1625         return MediaPlayer::IsNotSupported;
1626
1627     // The spec says:
1628     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1629     if (parameters.codecs.isEmpty())
1630         return MediaPlayer::MayBeSupported;
1631
1632     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1633     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1634 }
1635
1636 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1637 {
1638 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1639     if (!keySystem.isEmpty()) {
1640         // "Clear Key" is only supported with HLS:
1641         if (equalIgnoringCase(keySystem, "org.w3c.clearkey") && !mimeType.isEmpty() && !equalIgnoringCase(mimeType, "application/x-mpegurl"))
1642             return MediaPlayer::IsNotSupported;
1643
1644         if (!keySystemIsSupported(keySystem))
1645             return false;
1646
1647         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1648             return false;
1649
1650         return true;
1651     }
1652 #else
1653     UNUSED_PARAM(keySystem);
1654     UNUSED_PARAM(mimeType);
1655 #endif
1656     return false;
1657 }
1658
1659 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1660 #if ENABLE(ENCRYPTED_MEDIA_V2)
1661 static void fulfillRequestWithKeyData(AVAssetResourceLoadingRequest *request, ArrayBuffer* keyData)
1662 {
1663     if (AVAssetResourceLoadingContentInformationRequest *infoRequest = [request contentInformationRequest]) {
1664         [infoRequest setContentLength:keyData->byteLength()];
1665         [infoRequest setByteRangeAccessSupported:YES];
1666     }
1667
1668     if (AVAssetResourceLoadingDataRequest *dataRequest = [request dataRequest]) {
1669         long long start = [dataRequest currentOffset];
1670         long long end = std::min<long long>(keyData->byteLength(), [dataRequest currentOffset] + [dataRequest requestedLength]);
1671
1672         if (start < 0 || end < 0 || start >= static_cast<long long>(keyData->byteLength())) {
1673             [request finishLoadingWithError:nil];
1674             return;
1675         }
1676
1677         ASSERT(start <= std::numeric_limits<int>::max());
1678         ASSERT(end <= std::numeric_limits<int>::max());
1679         RefPtr<ArrayBuffer> requestedKeyData = keyData->slice(static_cast<int>(start), static_cast<int>(end));
1680         RetainPtr<NSData> nsData = adoptNS([[NSData alloc] initWithBytes:requestedKeyData->data() length:requestedKeyData->byteLength()]);
1681         [dataRequest respondWithData:nsData.get()];
1682     }
1683
1684     [request finishLoading];
1685 }
1686 #endif
1687
1688 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1689 {
1690     String scheme = [[[avRequest request] URL] scheme];
1691     String keyURI = [[[avRequest request] URL] absoluteString];
1692
1693 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1694     if (scheme == "skd") {
1695         // Create an initData with the following layout:
1696         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1697         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1698         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1699         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1700         initDataView->set<uint32_t>(0, keyURISize, true);
1701
1702         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1703         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1704
1705 #if ENABLE(ENCRYPTED_MEDIA)
1706         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1707 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1708         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1709         if (!player()->keyNeeded(initData.get()))
1710 #endif
1711             return false;
1712
1713         m_keyURIToRequestMap.set(keyURI, avRequest);
1714         return true;
1715 #if ENABLE(ENCRYPTED_MEDIA_V2)
1716     } else if (scheme == "clearkey") {
1717         String keyID = [[[avRequest request] URL] resourceSpecifier];
1718         StringView keyIDView(keyID);
1719         CString utf8EncodedKeyId = UTF8Encoding().encode(keyIDView, URLEncodedEntitiesForUnencodables);
1720
1721         RefPtr<Uint8Array> initData = Uint8Array::create(utf8EncodedKeyId.length());
1722         initData->setRange((JSC::Uint8Adaptor::Type*)utf8EncodedKeyId.data(), utf8EncodedKeyId.length(), 0);
1723
1724         auto keyData = player()->cachedKeyForKeyId(keyID);
1725         if (keyData) {
1726             fulfillRequestWithKeyData(avRequest, keyData.get());
1727             return false;
1728         }
1729
1730         if (!player()->keyNeeded(initData.get()))
1731             return false;
1732
1733         m_keyURIToRequestMap.set(keyID, avRequest);
1734         return true;
1735 #endif
1736     }
1737 #endif
1738
1739     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1740     m_resourceLoaderMap.add(avRequest, resourceLoader);
1741     resourceLoader->startLoading();
1742     return true;
1743 }
1744
1745 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1746 {
1747 #if USE(CFNETWORK)
1748     RefPtr<WebCoreNSURLAuthenticationChallengeClient> client = WebCoreNSURLAuthenticationChallengeClient::create(nsChallenge);
1749     RetainPtr<CFURLAuthChallengeRef> cfChallenge = adoptCF([nsChallenge _createCFAuthChallenge]);
1750     AuthenticationChallenge challenge(cfChallenge.get(), client.get());
1751 #else
1752     AuthenticationChallenge challenge(nsChallenge);
1753 #endif
1754
1755     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1756 }
1757
1758 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1759 {
1760     String scheme = [[[avRequest request] URL] scheme];
1761
1762     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1763
1764     if (resourceLoader)
1765         resourceLoader->stopLoading();
1766 }
1767
1768 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1769 {
1770     m_resourceLoaderMap.remove(avRequest);
1771 }
1772 #endif
1773
1774 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1775 {
1776     return AVFoundationLibrary() && isCoreMediaFrameworkAvailable();
1777 }
1778
1779 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1780 {
1781     if (!metaDataAvailable())
1782         return timeValue;
1783
1784     // FIXME - impossible to implement until rdar://8721510 is fixed.
1785     return timeValue;
1786 }
1787
1788 double MediaPlayerPrivateAVFoundationObjC::maximumDurationToCacheMediaTime() const
1789 {
1790 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1010
1791     return 0;
1792 #else
1793     return 5;
1794 #endif
1795 }
1796
1797 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1798 {
1799     if (!m_videoLayer)
1800         return;
1801
1802 #if PLATFORM(IOS)
1803     // Do not attempt to change the video gravity while in full screen mode.
1804     // See setVideoFullscreenGravity().
1805     if (m_videoFullscreenLayer)
1806         return;
1807 #endif
1808
1809     [CATransaction begin];
1810     [CATransaction setDisableActions:YES];    
1811     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1812     [m_videoLayer.get() setVideoGravity:gravity];
1813     [CATransaction commit];
1814 }
1815
1816 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1817 {
1818     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1819         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1820     }];
1821     if (index == NSNotFound)
1822         return nil;
1823     return [tracks objectAtIndex:index];
1824 }
1825
1826 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1827 {
1828     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1829     m_languageOfPrimaryAudioTrack = String();
1830
1831     if (!m_avAsset)
1832         return;
1833
1834     setDelayCharacteristicsChangedNotification(true);
1835
1836     bool haveCCTrack = false;
1837     bool hasCaptions = false;
1838
1839     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1840     // asked about those fairly fequently.
1841     if (!m_avPlayerItem) {
1842         // We don't have a player item yet, so check with the asset because some assets support inspection
1843         // prior to becoming ready to play.
1844         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1845         setHasVideo(firstEnabledVideoTrack);
1846         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1847 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1848         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1849 #endif
1850
1851         presentationSizeDidChange(firstEnabledVideoTrack ? FloatSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : FloatSize());
1852     } else {
1853         bool hasVideo = false;
1854         bool hasAudio = false;
1855         bool hasMetaData = false;
1856         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1857             if ([track isEnabled]) {
1858                 AVAssetTrack *assetTrack = [track assetTrack];
1859                 NSString *mediaType = [assetTrack mediaType];
1860                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1861                     hasVideo = true;
1862                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1863                     hasAudio = true;
1864                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1865 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1866                     hasCaptions = true;
1867 #endif
1868                     haveCCTrack = true;
1869                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1870                     hasMetaData = true;
1871                 }
1872             }
1873         }
1874
1875 #if ENABLE(VIDEO_TRACK)
1876         updateAudioTracks();
1877         updateVideoTracks();
1878
1879 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1880         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1881         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1882 #endif
1883 #endif
1884
1885         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1886         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1887         // when it is not.
1888         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1889
1890         setHasAudio(hasAudio);
1891 #if ENABLE(DATACUE_VALUE)
1892         if (hasMetaData)
1893             processMetadataTrack();
1894 #endif
1895     }
1896
1897 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1898     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1899     if (legibleGroup && m_cachedTracks) {
1900         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1901         if (hasCaptions)
1902             processMediaSelectionOptions();
1903     }
1904 #endif
1905
1906 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1907     if (!hasCaptions && haveCCTrack)
1908         processLegacyClosedCaptionsTracks();
1909 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1910     if (haveCCTrack)
1911         processLegacyClosedCaptionsTracks();
1912 #endif
1913
1914     setHasClosedCaptions(hasCaptions);
1915
1916     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1917         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1918
1919     sizeChanged();
1920
1921     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1922         characteristicsChanged();
1923
1924     setDelayCharacteristicsChangedNotification(false);
1925 }
1926
1927 #if ENABLE(VIDEO_TRACK)
1928 template <typename RefT, typename PassRefT>
1929 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1930 {
1931     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1932         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1933     }]]]);
1934     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1935
1936     for (auto& oldItem : oldItems) {
1937         if (oldItem->playerItemTrack())
1938             [oldTracks addObject:oldItem->playerItemTrack()];
1939     }
1940
1941     // Find the added & removed AVPlayerItemTracks:
1942     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1943     [removedTracks minusSet:newTracks.get()];
1944
1945     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1946     [addedTracks minusSet:oldTracks.get()];
1947
1948     typedef Vector<RefT> ItemVector;
1949     ItemVector replacementItems;
1950     ItemVector addedItems;
1951     ItemVector removedItems;
1952     for (auto& oldItem : oldItems) {
1953         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1954             removedItems.append(oldItem);
1955         else
1956             replacementItems.append(oldItem);
1957     }
1958
1959     for (AVPlayerItemTrack* track in addedTracks.get())
1960         addedItems.append(itemFactory(track));
1961
1962     replacementItems.appendVector(addedItems);
1963     oldItems.swap(replacementItems);
1964
1965     for (auto& removedItem : removedItems)
1966         (player->*removedFunction)(removedItem);
1967
1968     for (auto& addedItem : addedItems)
1969         (player->*addedFunction)(addedItem);
1970 }
1971
1972 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1973 template <typename RefT, typename PassRefT>
1974 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1975 {
1976     group->updateOptions();
1977
1978     // Only add selection options which do not have an associated persistant track.
1979     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1980     for (auto& option : group->options()) {
1981         if (!option)
1982             continue;
1983         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1984         if (!avOption)
1985             continue;
1986         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1987             newSelectionOptions.add(option);
1988     }
1989
1990     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1991     for (auto& oldItem : oldItems) {
1992         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1993             oldSelectionOptions.add(option);
1994     }
1995
1996     // Find the added & removed AVMediaSelectionOptions:
1997     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1998     for (auto& oldOption : oldSelectionOptions) {
1999         if (!newSelectionOptions.contains(oldOption))
2000             removedSelectionOptions.add(oldOption);
2001     }
2002
2003     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
2004     for (auto& newOption : newSelectionOptions) {
2005         if (!oldSelectionOptions.contains(newOption))
2006             addedSelectionOptions.add(newOption);
2007     }
2008
2009     typedef Vector<RefT> ItemVector;
2010     ItemVector replacementItems;
2011     ItemVector addedItems;
2012     ItemVector removedItems;
2013     for (auto& oldItem : oldItems) {
2014         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
2015             removedItems.append(oldItem);
2016         else
2017             replacementItems.append(oldItem);
2018     }
2019
2020     for (auto& option : addedSelectionOptions)
2021         addedItems.append(itemFactory(*option.get()));
2022
2023     replacementItems.appendVector(addedItems);
2024     oldItems.swap(replacementItems);
2025     
2026     for (auto& removedItem : removedItems)
2027         (player->*removedFunction)(removedItem);
2028     
2029     for (auto& addedItem : addedItems)
2030         (player->*addedFunction)(addedItem);
2031 }
2032 #endif
2033
2034 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
2035 {
2036 #if !LOG_DISABLED
2037     size_t count = m_audioTracks.size();
2038 #endif
2039
2040     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2041
2042 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2043     if (!m_audibleGroup) {
2044         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
2045             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2046     }
2047
2048     if (m_audibleGroup)
2049         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
2050 #endif
2051
2052     for (auto& track : m_audioTracks)
2053         track->resetPropertiesFromTrack();
2054
2055 #if !LOG_DISABLED
2056     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
2057 #endif
2058 }
2059
2060 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
2061 {
2062 #if !LOG_DISABLED
2063     size_t count = m_videoTracks.size();
2064 #endif
2065
2066     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2067
2068 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2069     if (!m_visualGroup) {
2070         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
2071             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
2072     }
2073
2074     if (m_visualGroup)
2075         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
2076 #endif
2077
2078     for (auto& track : m_audioTracks)
2079         track->resetPropertiesFromTrack();
2080
2081 #if !LOG_DISABLED
2082     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
2083 #endif
2084 }
2085
2086 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
2087 {
2088 #if PLATFORM(IOS)
2089     if (m_videoFullscreenLayer)
2090         return true;
2091 #endif
2092     return false;
2093 }
2094
2095 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
2096 {
2097 #if PLATFORM(IOS)
2098     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
2099         return;
2100     
2101     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
2102     [m_textTrackRepresentationLayer setFrame:textFrame];
2103 #endif
2104 }
2105
2106 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
2107 {
2108 #if PLATFORM(IOS)
2109     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
2110     if (representationLayer == m_textTrackRepresentationLayer) {
2111         syncTextTrackBounds();
2112         return;
2113     }
2114
2115     if (m_textTrackRepresentationLayer)
2116         [m_textTrackRepresentationLayer removeFromSuperlayer];
2117
2118     m_textTrackRepresentationLayer = representationLayer;
2119
2120     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
2121         syncTextTrackBounds();
2122         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
2123     }
2124
2125 #else
2126     UNUSED_PARAM(representation);
2127 #endif
2128 }
2129 #endif // ENABLE(VIDEO_TRACK)
2130
2131 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
2132 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
2133 {
2134     if (!m_provider)
2135         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
2136     return m_provider.get();
2137 }
2138 #endif
2139
2140 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
2141 {
2142     if (!m_avAsset)
2143         return;
2144
2145     setNaturalSize(m_cachedPresentationSize);
2146 }
2147     
2148 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2149 {
2150     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2151         return false;
2152     
2153     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2154     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2155     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2156 }
2157
2158 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2159 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2160 {
2161     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2162
2163     if (!m_avPlayerItem || m_videoOutput)
2164         return;
2165
2166 #if USE(VIDEOTOOLBOX)
2167     NSDictionary* attributes = nil;
2168 #else
2169     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2170                                 nil];
2171 #endif
2172     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2173     ASSERT(m_videoOutput);
2174
2175     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2176
2177     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2178
2179     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2180 }
2181
2182 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2183 {
2184     if (!m_videoOutput)
2185         return;
2186
2187     if (m_avPlayerItem)
2188         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2189     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2190
2191     m_videoOutput = 0;
2192 }
2193
2194 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2195 {
2196     if (!m_videoOutput)
2197         createVideoOutput();
2198     ASSERT(m_videoOutput);
2199
2200 #if !LOG_DISABLED
2201     double start = monotonicallyIncreasingTime();
2202 #endif
2203
2204     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2205
2206     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2207         return 0;
2208
2209     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2210     if (!buffer)
2211         return 0;
2212
2213 #if USE(VIDEOTOOLBOX)
2214     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2215     if (!m_pixelTransferSession) {
2216         VTPixelTransferSessionRef session = 0;
2217         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2218         m_pixelTransferSession = adoptCF(session);
2219     }
2220
2221     CVPixelBufferRef outputBuffer;
2222     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2223     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2224     buffer = adoptCF(outputBuffer);
2225 #endif
2226
2227 #if !LOG_DISABLED
2228     double duration = monotonicallyIncreasingTime() - start;
2229     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2230 #endif
2231
2232     return buffer;
2233 }
2234
2235 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2236 {
2237     if (!m_avPlayerItem)
2238         return false;
2239
2240     if (m_lastImage)
2241         return true;
2242
2243     if (!m_videoOutput)
2244         createVideoOutput();
2245
2246     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2247 }
2248
2249 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2250 {
2251     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2252     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2253     return CVPixelBufferGetBaseAddress(pixelBuffer);
2254 }
2255
2256 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2257 {
2258     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2259     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2260 }
2261
2262 static void CVPixelBufferReleaseInfoCallback(void* info)
2263 {
2264     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2265     CFRelease(pixelBuffer);
2266 }
2267
2268 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2269 {
2270     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2271     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2272
2273     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2274     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2275     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2276     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2277     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2278
2279     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2280     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2281     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2282
2283     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2284 }
2285
2286 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2287 {
2288     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2289
2290     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2291     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2292     // should be displayed.
2293     if (pixelBuffer)
2294         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2295 }
2296
2297 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const FloatRect& outputRect)
2298 {
2299     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2300         waitForVideoOutputMediaDataWillChange();
2301
2302     updateLastImage();
2303
2304     if (!m_lastImage)
2305         return;
2306
2307     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2308     if (!firstEnabledVideoTrack)
2309         return;
2310
2311     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2312
2313     GraphicsContextStateSaver stateSaver(*context);
2314     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2315     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2316     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2317
2318     context->concatCTM(videoTransform);
2319     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2320
2321     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2322     // video frame, destroy it now that it is no longer needed.
2323     if (m_imageGenerator)
2324         destroyImageGenerator();
2325
2326 }
2327
2328 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2329 {
2330     updateLastImage();
2331     return m_lastImage.get();
2332 }
2333
2334 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2335 {
2336     if (!m_videoOutputSemaphore)
2337         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2338
2339     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2340
2341     // Wait for 1 second.
2342     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2343
2344     if (result)
2345         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2346 }
2347
2348 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2349 {
2350     dispatch_semaphore_signal(m_videoOutputSemaphore);
2351 }
2352 #endif
2353
2354 #if ENABLE(ENCRYPTED_MEDIA)
2355 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2356 {
2357     if (!keySystemIsSupported(keySystem))
2358         return MediaPlayer::KeySystemNotSupported;
2359
2360     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2361     String keyURI;
2362     String keyID;
2363     RefPtr<Uint8Array> certificate;
2364     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2365         return MediaPlayer::InvalidPlayerState;
2366
2367     if (!m_keyURIToRequestMap.contains(keyURI))
2368         return MediaPlayer::InvalidPlayerState;
2369
2370     String sessionID = createCanonicalUUIDString();
2371
2372     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2373
2374     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2375     NSString* assetStr = keyID;
2376     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2377     NSError* error = 0;
2378     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2379
2380     if (!keyRequest) {
2381         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2382         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2383         return MediaPlayer::NoError;
2384     }
2385
2386     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2387     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2388     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2389
2390     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2391     m_sessionIDToRequestMap.set(sessionID, avRequest);
2392     m_keyURIToRequestMap.remove(keyURI);
2393
2394     return MediaPlayer::NoError;
2395 }
2396
2397 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2398 {
2399     if (!keySystemIsSupported(keySystem))
2400         return MediaPlayer::KeySystemNotSupported;
2401
2402     if (!m_sessionIDToRequestMap.contains(sessionID))
2403         return MediaPlayer::InvalidPlayerState;
2404
2405     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2406     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2407     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2408     [avRequest.get() finishLoading];
2409     m_sessionIDToRequestMap.remove(sessionID);
2410
2411     player()->keyAdded(keySystem, sessionID);
2412
2413     UNUSED_PARAM(initDataPtr);
2414     UNUSED_PARAM(initDataLength);
2415     return MediaPlayer::NoError;
2416 }
2417
2418 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2419 {
2420     if (!keySystemIsSupported(keySystem))
2421         return MediaPlayer::KeySystemNotSupported;
2422
2423     if (!m_sessionIDToRequestMap.contains(sessionID))
2424         return MediaPlayer::InvalidPlayerState;
2425
2426     m_sessionIDToRequestMap.remove(sessionID);
2427     return MediaPlayer::NoError;
2428 }
2429 #endif
2430
2431 #if ENABLE(ENCRYPTED_MEDIA_V2)
2432 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2433 {
2434     return m_keyURIToRequestMap.take(keyURI);
2435 }
2436
2437 void MediaPlayerPrivateAVFoundationObjC::keyAdded()
2438 {
2439     Vector<String> fulfilledKeyIds;
2440
2441     for (auto& pair : m_keyURIToRequestMap) {
2442         const String& keyId = pair.key;
2443         const RetainPtr<AVAssetResourceLoadingRequest>& request = pair.value;
2444
2445         auto keyData = player()->cachedKeyForKeyId(keyId);
2446         if (!keyData)
2447             continue;
2448
2449         fulfillRequestWithKeyData(request.get(), keyData.get());
2450         fulfilledKeyIds.append(keyId);
2451     }
2452
2453     for (auto& keyId : fulfilledKeyIds)
2454         m_keyURIToRequestMap.remove(keyId);
2455 }
2456
2457 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2458 {
2459     if (!keySystemIsSupported(keySystem))
2460         return nullptr;
2461
2462     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2463 }
2464 #endif
2465
2466 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2467 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2468 {
2469 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2470     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2471 #endif
2472
2473     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2474     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2475
2476         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2477         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2478             continue;
2479
2480         bool newCCTrack = true;
2481         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2482             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2483                 continue;
2484
2485             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2486             if (track->avPlayerItemTrack() == playerItemTrack) {
2487                 removedTextTracks.remove(i - 1);
2488                 newCCTrack = false;
2489                 break;
2490             }
2491         }
2492
2493         if (!newCCTrack)
2494             continue;
2495         
2496         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2497     }
2498
2499     processNewAndRemovedTextTracks(removedTextTracks);
2500 }
2501 #endif
2502
2503 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2504 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2505 {
2506     if (!m_avAsset)
2507         return false;
2508
2509     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2510         return false;
2511
2512     return true;
2513 }
2514
2515 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2516 {
2517     if (!hasLoadedMediaSelectionGroups())
2518         return nil;
2519
2520     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2521 }
2522
2523 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2524 {
2525     if (!hasLoadedMediaSelectionGroups())
2526         return nil;
2527
2528     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2529 }
2530
2531 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2532 {
2533     if (!hasLoadedMediaSelectionGroups())
2534         return nil;
2535
2536     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2537 }
2538
2539 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2540 {
2541     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2542     if (!legibleGroup) {
2543         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2544         return;
2545     }
2546
2547     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2548     // but set the selected legible track to nil so text tracks will not be automatically configured.
2549     if (!m_textTracks.size())
2550         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2551
2552     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2553     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2554     for (AVMediaSelectionOptionType *option in legibleOptions) {
2555         bool newTrack = true;
2556         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2557             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2558                 continue;
2559             
2560             RetainPtr<AVMediaSelectionOptionType> currentOption;
2561 #if ENABLE(AVF_CAPTIONS)
2562             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2563                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2564                 currentOption = track->mediaSelectionOption();
2565             } else
2566 #endif
2567             {
2568                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2569                 currentOption = track->mediaSelectionOption();
2570             }
2571             
2572             if ([currentOption.get() isEqual:option]) {
2573                 removedTextTracks.remove(i - 1);
2574                 newTrack = false;
2575                 break;
2576             }
2577         }
2578         if (!newTrack)
2579             continue;
2580
2581 #if ENABLE(AVF_CAPTIONS)
2582         if ([option outOfBandSource]) {
2583             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2584             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2585             continue;
2586         }
2587 #endif
2588
2589         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2590     }
2591
2592     processNewAndRemovedTextTracks(removedTextTracks);
2593 }
2594
2595 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2596 {
2597     if (m_metadataTrack)
2598         return;
2599
2600     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2601     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2602     player()->addTextTrack(m_metadataTrack);
2603 }
2604
2605 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2606 {
2607     if (!m_currentTextTrack)
2608         return;
2609
2610     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2611 }
2612
2613 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2614 {
2615     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2616
2617     if (!m_currentTextTrack)
2618         return;
2619     
2620     m_currentTextTrack->resetCueValues();
2621 }
2622 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2623
2624 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2625 {
2626     if (m_currentTextTrack == track)
2627         return;
2628
2629     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2630         
2631     m_currentTextTrack = track;
2632
2633     if (track) {
2634         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2635             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2636 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2637 #if ENABLE(AVF_CAPTIONS)
2638         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2639             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2640 #endif
2641         else
2642             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2643 #endif
2644     } else {
2645 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2646         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2647 #endif
2648         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2649     }
2650
2651 }
2652
2653 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2654 {
2655     if (!m_languageOfPrimaryAudioTrack.isNull())
2656         return m_languageOfPrimaryAudioTrack;
2657
2658     if (!m_avPlayerItem.get())
2659         return emptyString();
2660
2661 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2662     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2663     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2664     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2665     if (currentlySelectedAudibleOption) {
2666         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2667         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2668
2669         return m_languageOfPrimaryAudioTrack;
2670     }
2671 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2672
2673     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2674     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2675     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2676     if (!tracks || [tracks count] != 1) {
2677         m_languageOfPrimaryAudioTrack = emptyString();
2678         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2679         return m_languageOfPrimaryAudioTrack;
2680     }
2681
2682     AVAssetTrack *track = [tracks objectAtIndex:0];
2683     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2684
2685 #if !LOG_DISABLED
2686     if (m_languageOfPrimaryAudioTrack == emptyString())
2687         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2688     else
2689         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2690 #endif
2691
2692     return m_languageOfPrimaryAudioTrack;
2693 }
2694
2695 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
2696 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2697 {
2698     if (!m_avPlayer)
2699         return false;
2700
2701     bool wirelessTarget = m_avPlayer.get().externalPlaybackActive;
2702     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2703
2704     return wirelessTarget;
2705 }
2706
2707 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2708 {
2709     if (!m_avPlayer)
2710         return MediaPlayer::TargetTypeNone;
2711
2712 #if PLATFORM(IOS)
2713     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2714     case wkExternalPlaybackTypeNone:
2715         return MediaPlayer::TargetTypeNone;
2716     case wkExternalPlaybackTypeAirPlay:
2717         return MediaPlayer::TargetTypeAirPlay;
2718     case wkExternalPlaybackTypeTVOut:
2719         return MediaPlayer::TargetTypeTVOut;
2720     }
2721
2722     ASSERT_NOT_REACHED();
2723     return MediaPlayer::TargetTypeNone;
2724
2725 #else
2726     return MediaPlayer::TargetTypeAirPlay;
2727 #endif
2728 }
2729
2730 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2731 {
2732     if (!m_avPlayer)
2733         return emptyString();
2734     
2735     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2736     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2737
2738     return wirelessTargetName;
2739 }
2740
2741 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2742 {
2743     if (!m_avPlayer)
2744         return !m_allowsWirelessVideoPlayback;
2745
2746     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2747     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2748
2749     return !m_allowsWirelessVideoPlayback;
2750 }
2751
2752 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2753 {
2754     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2755     m_allowsWirelessVideoPlayback = !disabled;
2756     if (!m_avPlayer)
2757         return;
2758
2759     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2760 }
2761
2762 #if !PLATFORM(IOS)
2763 void MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(const MediaPlaybackTarget& target)
2764 {
2765     m_outputContext = target.devicePickerContext();
2766
2767     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessPlaybackTarget(%p) - target = %p", this, m_outputContext.get());
2768
2769     if (!m_avPlayer)
2770         return;
2771
2772     m_avPlayer.get().outputContext = m_outputContext.get();
2773 }
2774 #endif
2775
2776 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2777 {
2778     if (!m_avPlayer)
2779         return;
2780
2781 #if PLATFORM(IOS)
2782     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2783 #endif
2784 }
2785 #endif
2786
2787 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2788 {
2789     m_cachedItemStatus = status;
2790
2791     updateStates();
2792 }
2793
2794 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2795 {
2796     m_pendingStatusChanges++;
2797 }
2798
2799 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2800 {
2801     m_cachedLikelyToKeepUp = likelyToKeepUp;
2802
2803     ASSERT(m_pendingStatusChanges);
2804     if (!--m_pendingStatusChanges)
2805         updateStates();
2806 }
2807
2808 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2809 {
2810     m_pendingStatusChanges++;
2811 }
2812
2813 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2814 {
2815     m_cachedBufferEmpty = bufferEmpty;
2816
2817     ASSERT(m_pendingStatusChanges);
2818     if (!--m_pendingStatusChanges)
2819         updateStates();
2820 }
2821
2822 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2823 {
2824     m_pendingStatusChanges++;
2825 }
2826
2827 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2828 {
2829     m_cachedBufferFull = bufferFull;
2830
2831     ASSERT(m_pendingStatusChanges);
2832     if (!--m_pendingStatusChanges)
2833         updateStates();
2834 }
2835
2836 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2837 {
2838     m_cachedSeekableRanges = seekableRanges;
2839
2840     seekableTimeRangesChanged();
2841     updateStates();
2842 }
2843
2844 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2845 {
2846     m_cachedLoadedRanges = loadedRanges;
2847
2848     loadedTimeRangesChanged();
2849     updateStates();
2850 }
2851
2852 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2853 {
2854     m_cachedIsReadyForDisplay = isReady;
2855     if (!hasVideo() && isReady)
2856         tracksChanged();
2857     updateStates();
2858 }
2859
2860 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2861 {
2862     tracksChanged();
2863     updateStates();
2864 }
2865
2866 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2867 {
2868     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2869     if (m_shouldBufferData == shouldBuffer)
2870         return;
2871
2872     m_shouldBufferData = shouldBuffer;
2873     
2874     if (!m_avPlayer)
2875         return;
2876
2877     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2878 }
2879
2880 #if ENABLE(DATACUE_VALUE)
2881 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2882 {
2883     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2884     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2885     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2886     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2887     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2888
2889     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2890         return quickTimeUserData;
2891     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2892         return isoUserData;
2893     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2894         return quickTimeMetadata;
2895     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2896         return iTunesMetadata;
2897     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2898         return id3Metadata;
2899
2900     return emptyAtom;
2901 }
2902 #endif
2903
2904 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2905 {
2906     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2907
2908     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2909
2910 #if ENABLE(DATACUE_VALUE)
2911     if (seeking())
2912         return;
2913
2914     if (!m_metadataTrack)
2915         processMetadataTrack();
2916
2917     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2918         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2919         return;
2920     }
2921
2922     // Set the duration of all incomplete cues before adding new ones.
2923     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2924     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2925         MediaTime start = toMediaTime(item.time);
2926         if (start < earliestStartTime)
2927             earliestStartTime = start;
2928     }
2929     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2930
2931     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2932         MediaTime start = toMediaTime(item.time);
2933         MediaTime end = MediaTime::positiveInfiniteTime();
2934         if (CMTIME_IS_VALID(item.duration))
2935             end = start + toMediaTime(item.duration);
2936
2937         AtomicString type = nullAtom;
2938         if (item.keySpace)
2939             type = metadataType(item.keySpace);
2940
2941         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2942     }
2943 #endif
2944 }
2945
2946 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2947 {
2948     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2949         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2950
2951     NSArray *assetTracks = [m_avAsset tracks];
2952
2953     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2954         AVAssetTrack* assetTrack = [obj assetTrack];
2955
2956         if ([assetTracks containsObject:assetTrack])
2957             return YES;
2958
2959         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2960         if (!hasLoadedMediaSelectionGroups())
2961             return NO;
2962
2963         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2964             return NO;
2965
2966         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2967             return NO;
2968
2969         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2970             return NO;
2971
2972         return YES;
2973     }]];
2974
2975     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2976         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2977
2978     m_cachedTotalBytes = 0;
2979
2980     tracksChanged();
2981     updateStates();
2982 }
2983
2984 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2985 {
2986     m_cachedHasEnabledAudio = hasEnabledAudio;
2987
2988     tracksChanged();
2989     updateStates();
2990 }
2991
2992 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2993 {
2994     m_cachedPresentationSize = size;
2995
2996     sizeChanged();
2997     updateStates();
2998 }
2999
3000 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
3001 {
3002     m_cachedDuration = duration;
3003
3004     invalidateCachedDuration();
3005 }
3006
3007 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
3008 {
3009     m_cachedRate = rate;
3010
3011     updateStates();
3012     rateChanged();
3013 }
3014     
3015 #if ENABLE(WIRELESS_PLAYBACK_TARGET)
3016 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
3017 {
3018     playbackTargetIsWirelessChanged();
3019 }
3020 #endif
3021
3022 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
3023 {
3024     m_cachedCanPlayFastForward = newValue;
3025 }
3026
3027 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
3028 {
3029     m_cachedCanPlayFastReverse = newValue;
3030 }
3031
3032 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
3033 {
3034     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
3035         return MediaPlayerPrivateAVFoundation::resolvedURL();
3036
3037     return URL([m_avAsset resolvedURL]);
3038 }
3039
3040 NSArray* assetMetadataKeyNames()
3041 {
3042     static NSArray* keys;
3043     if (!keys) {
3044         keys = [[NSArray alloc] initWithObjects:@"duration",
3045                     @"naturalSize",
3046                     @"preferredTransform",
3047                     @"preferredVolume",
3048                     @"preferredRate",