Use Ref for SecurityOrigin.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioSourceProviderAVFObjC.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "Cookie.h"
38 #import "ExceptionCodePlaceholder.h"
39 #import "FloatConversion.h"
40 #import "FloatConversion.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandMetadataTextTrackPrivateAVF.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
46 #import "OutOfBandTextTrackPrivateAVF.h"
47 #import "URL.h"
48 #import "Logging.h"
49 #import "MediaSelectionGroupAVFObjC.h"
50 #import "MediaTimeAVFoundation.h"
51 #import "PlatformTimeRanges.h"
52 #import "SecurityOrigin.h"
53 #import "SerializedPlatformRepresentationMac.h"
54 #import "SoftLinking.h"
55 #import "TextTrackRepresentation.h"
56 #import "UUID.h"
57 #import "VideoTrackPrivateAVFObjC.h"
58 #import "WebCoreAVFResourceLoader.h"
59 #import "WebCoreCALayerExtras.h"
60 #import "WebCoreSystemInterface.h"
61 #import <objc/runtime.h>
62 #import <runtime/DataView.h>
63 #import <runtime/JSCInlines.h>
64 #import <runtime/TypedArrayInlines.h>
65 #import <runtime/Uint16Array.h>
66 #import <runtime/Uint32Array.h>
67 #import <runtime/Uint8Array.h>
68 #import <wtf/CurrentTime.h>
69 #import <wtf/Functional.h>
70 #import <wtf/ListHashSet.h>
71 #import <wtf/NeverDestroyed.h>
72 #import <wtf/text/CString.h>
73 #import <wtf/text/StringBuilder.h>
74
75 #if ENABLE(AVF_CAPTIONS)
76 #include "TextTrack.h"
77 #endif
78
79 #import <AVFoundation/AVFoundation.h>
80 #if PLATFORM(IOS)
81 #import "WAKAppKitStubs.h"
82 #import <CoreImage/CoreImage.h>
83 #else
84 #import <Foundation/NSGeometry.h>
85 #import <QuartzCore/CoreImage.h>
86 #endif
87 #import <CoreMedia/CoreMedia.h>
88
89 #if USE(VIDEOTOOLBOX)
90 #import <CoreVideo/CoreVideo.h>
91 #import <VideoToolbox/VideoToolbox.h>
92 #endif
93
94 namespace std {
95 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
96     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
97 };
98 }
99
100 @interface WebVideoContainerLayer : CALayer
101 @end
102
103 @implementation WebVideoContainerLayer
104
105 - (void)setBounds:(CGRect)bounds
106 {
107     [super setBounds:bounds];
108     for (CALayer* layer in self.sublayers)
109         layer.frame = bounds;
110 }
111 @end
112
113 #if ENABLE(AVF_CAPTIONS)
114 // Note: This must be defined before our SOFT_LINK macros:
115 @class AVMediaSelectionOption;
116 @interface AVMediaSelectionOption (OutOfBandExtensions)
117 @property (nonatomic, readonly) NSString* outOfBandSource;
118 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
119 @end
120 #endif
121
122 #if PLATFORM(IOS)
123 @class AVPlayerItem;
124 @interface AVPlayerItem (WebKitExtensions)
125 @property (nonatomic, copy) NSString* dataYouTubeID;
126 @end
127 #endif
128
129 @interface AVURLAsset (WebKitExtensions)
130 @property (nonatomic, readonly) NSURL *resolvedURL;
131 @end
132
133 typedef AVPlayer AVPlayerType;
134 typedef AVPlayerItem AVPlayerItemType;
135 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
136 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
137 typedef AVMetadataItem AVMetadataItemType;
138 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
139 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
140
141 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
142 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
143 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
144 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
145
146 #if USE(VIDEOTOOLBOX)
147 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
148 #endif
149
150 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
151 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
152 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
153 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
154
155 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
156 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
157 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
158 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
159 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
160 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
161 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
162 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
163
164 #if USE(VIDEOTOOLBOX)
165 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
166 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
167 #endif
168
169 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
170 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
171 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
172 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
173 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
174 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
175 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
176
177 SOFT_LINK_CLASS(CoreImage, CIContext)
178 SOFT_LINK_CLASS(CoreImage, CIImage)
179
180 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
193 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
194
195 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
196
197 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
198
199 #define AVPlayer getAVPlayerClass()
200 #define AVPlayerItem getAVPlayerItemClass()
201 #define AVPlayerLayer getAVPlayerLayerClass()
202 #define AVURLAsset getAVURLAssetClass()
203 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
204 #define AVMetadataItem getAVMetadataItemClass()
205
206 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
207 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
208 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
209 #define AVMediaTypeVideo getAVMediaTypeVideo()
210 #define AVMediaTypeAudio getAVMediaTypeAudio()
211 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
212 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
213 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
214 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
215 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
216 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
217 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
218 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
219 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
220 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
221
222 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
223 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
224 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
225
226 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
227 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
228 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
229
230 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
231 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
232 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
233 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
234
235 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
236 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
237 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
238 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
239 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
240 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
241 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
242 #endif
243
244 #if ENABLE(AVF_CAPTIONS)
245 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
255
256 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
257 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
258 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
259 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
260 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
261 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
262 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
263 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
264 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
265 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
266 #endif
267
268 #if ENABLE(DATACUE_VALUE)
269 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
270 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
271 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
272 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
273 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
274
275 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
276 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
277 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
278 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
279 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
280 #endif
281
282 #if PLATFORM(IOS)
283 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
284
285 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
286 #endif
287
288 #define kCMTimeZero getkCMTimeZero()
289
290 using namespace WebCore;
291
292 enum MediaPlayerAVFoundationObservationContext {
293     MediaPlayerAVFoundationObservationContextPlayerItem,
294     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
295     MediaPlayerAVFoundationObservationContextPlayer,
296     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
297 };
298
299 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
300 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
301 #else
302 @interface WebCoreAVFMovieObserver : NSObject
303 #endif
304 {
305     MediaPlayerPrivateAVFoundationObjC* m_callback;
306     int m_delayCallbacks;
307 }
308 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
309 -(void)disconnect;
310 -(void)metadataLoaded;
311 -(void)didEnd:(NSNotification *)notification;
312 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
313 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
314 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
315 - (void)outputSequenceWasFlushed:(id)output;
316 #endif
317 @end
318
319 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
320 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
321     MediaPlayerPrivateAVFoundationObjC* m_callback;
322 }
323 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
324 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
325 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
326 @end
327 #endif
328
329 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
330 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
331     MediaPlayerPrivateAVFoundationObjC *m_callback;
332     dispatch_semaphore_t m_semaphore;
333 }
334 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
335 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
336 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
337 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
338 @end
339 #endif
340
341 namespace WebCore {
342
343 static NSArray *assetMetadataKeyNames();
344 static NSArray *itemKVOProperties();
345 static NSArray* assetTrackMetadataKeyNames();
346
347 #if !LOG_DISABLED
348 static const char *boolString(bool val)
349 {
350     return val ? "true" : "false";
351 }
352 #endif
353
354 #if ENABLE(ENCRYPTED_MEDIA_V2)
355 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
356 static PlayerToPrivateMapType& playerToPrivateMap()
357 {
358     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
359     return map;
360 };
361 #endif
362
363 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
364 static dispatch_queue_t globalLoaderDelegateQueue()
365 {
366     static dispatch_queue_t globalQueue;
367     static dispatch_once_t onceToken;
368     dispatch_once(&onceToken, ^{
369         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
370     });
371     return globalQueue;
372 }
373 #endif
374
375 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
376 static dispatch_queue_t globalPullDelegateQueue()
377 {
378     static dispatch_queue_t globalQueue;
379     static dispatch_once_t onceToken;
380     dispatch_once(&onceToken, ^{
381         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
382     });
383     return globalQueue;
384 }
385 #endif
386
387 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
388
389     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
390 }
391
392 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
393 {
394     if (isAvailable())
395         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
396 }
397
398 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
399     : MediaPlayerPrivateAVFoundation(player)
400     , m_weakPtrFactory(this)
401 #if PLATFORM(IOS)
402     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
403 #endif
404     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
405     , m_videoFrameHasDrawn(false)
406     , m_haveCheckedPlayability(false)
407 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
408     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
409     , m_videoOutputSemaphore(nullptr)
410 #endif
411 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
412     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
413 #endif
414     , m_currentTextTrack(0)
415     , m_cachedRate(0)
416     , m_cachedTotalBytes(0)
417     , m_pendingStatusChanges(0)
418     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
419     , m_cachedLikelyToKeepUp(false)
420     , m_cachedBufferEmpty(false)
421     , m_cachedBufferFull(false)
422     , m_cachedHasEnabledAudio(false)
423     , m_shouldBufferData(true)
424     , m_cachedIsReadyForDisplay(false)
425     , m_haveBeenAskedToCreateLayer(false)
426 #if ENABLE(IOS_AIRPLAY)
427     , m_allowsWirelessVideoPlayback(true)
428 #endif
429 {
430 #if ENABLE(ENCRYPTED_MEDIA_V2)
431     playerToPrivateMap().set(player, this);
432 #endif
433 }
434
435 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
436 {
437 #if ENABLE(ENCRYPTED_MEDIA_V2)
438     playerToPrivateMap().remove(player());
439 #endif
440 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
441     [m_loaderDelegate.get() setCallback:0];
442     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
443
444     for (auto& pair : m_resourceLoaderMap)
445         pair.value->invalidate();
446 #endif
447 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
448     [m_videoOutputDelegate setCallback:0];
449     [m_videoOutput setDelegate:nil queue:0];
450     if (m_videoOutputSemaphore)
451         dispatch_release(m_videoOutputSemaphore);
452 #endif
453
454     if (m_videoLayer)
455         destroyVideoLayer();
456
457     cancelLoad();
458 }
459
460 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
461 {
462     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
463     tearDownVideoRendering();
464
465     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
466     [m_objcObserver.get() disconnect];
467
468     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
469     setIgnoreLoadStateChanges(true);
470     if (m_avAsset) {
471         [m_avAsset.get() cancelLoading];
472         m_avAsset = nil;
473     }
474
475     clearTextTracks();
476
477 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
478     if (m_legibleOutput) {
479         if (m_avPlayerItem)
480             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
481         m_legibleOutput = nil;
482     }
483 #endif
484
485     if (m_avPlayerItem) {
486         for (NSString *keyName in itemKVOProperties())
487             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
488         
489         m_avPlayerItem = nil;
490     }
491     if (m_avPlayer) {
492         if (m_timeObserver)
493             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
494         m_timeObserver = nil;
495         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
496 #if ENABLE(IOS_AIRPLAY)
497         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
498 #endif
499         
500         RetainPtr<AVPlayerType> strongPlayer = WTF::move(m_avPlayer);
501         dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1 * NSEC_PER_SEC), dispatch_get_main_queue(), [strongPlayer] () mutable {
502             strongPlayer.clear();
503         });
504     }
505
506     // Reset cached properties
507     m_pendingStatusChanges = 0;
508     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
509     m_cachedSeekableRanges = nullptr;
510     m_cachedLoadedRanges = nullptr;
511     m_cachedHasEnabledAudio = false;
512     m_cachedPresentationSize = FloatSize();
513     m_cachedDuration = MediaTime::zeroTime();
514
515     for (AVPlayerItemTrack *track in m_cachedTracks.get())
516         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
517     m_cachedTracks = nullptr;
518
519 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
520     if (m_provider)
521         m_provider->setPlayerItem(nullptr);
522 #endif
523
524     setIgnoreLoadStateChanges(false);
525 }
526
527 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
528 {
529     return m_haveBeenAskedToCreateLayer;
530 }
531
532 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
533 {
534 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
535     if (m_videoOutput)
536         return true;
537 #endif
538     return m_imageGenerator;
539 }
540
541 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
542 {
543 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
544     createVideoOutput();
545 #else
546     createImageGenerator();
547 #endif
548 }
549
550 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
551 {
552     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
553
554     if (!m_avAsset || m_imageGenerator)
555         return;
556
557     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
558
559     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
560     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
561     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
562     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
563
564     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
565 }
566
567 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
568 {
569 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
570     destroyVideoOutput();
571 #endif
572     destroyImageGenerator();
573 }
574
575 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
576 {
577     if (!m_imageGenerator)
578         return;
579
580     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
581
582     m_imageGenerator = 0;
583 }
584
585 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
586 {
587     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
588         return;
589
590     auto weakThis = createWeakPtr();
591     callOnMainThread([this, weakThis] {
592         if (!weakThis)
593             return;
594
595         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
596             return;
597         m_haveBeenAskedToCreateLayer = true;
598
599         if (!m_videoLayer)
600             createAVPlayerLayer();
601
602 #if USE(VIDEOTOOLBOX) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
603         if (!m_videoOutput)
604             createVideoOutput();
605 #endif
606
607         player()->client().mediaPlayerRenderingModeChanged(player());
608     });
609 }
610
611 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
612 {
613     if (!m_avPlayer)
614         return;
615
616     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
617     [m_videoLayer setPlayer:m_avPlayer.get()];
618     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
619 #ifndef NDEBUG
620     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
621 #endif
622     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
623     updateVideoLayerGravity();
624     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
625     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
626
627 #if PLATFORM(IOS)
628     [m_videoLayer web_disableAllActions];
629     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
630 #ifndef NDEBUG
631     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
632 #endif
633     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
634     if (m_videoFullscreenLayer) {
635         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
636         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
637     } else {
638         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
639         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
640     }
641 #else
642     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
643 #endif
644 }
645
646 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
647 {
648     if (!m_videoLayer)
649         return;
650
651     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
652
653     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
654     [m_videoLayer.get() setPlayer:nil];
655
656 #if PLATFORM(IOS)
657     if (m_videoFullscreenLayer)
658         [m_videoLayer removeFromSuperlayer];
659     m_videoInlineLayer = nil;
660 #endif
661
662     m_videoLayer = nil;
663 }
664
665 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
666 {
667     if (currentRenderingMode() == MediaRenderingToLayer)
668         return m_cachedIsReadyForDisplay;
669
670     return m_videoFrameHasDrawn;
671 }
672
673 #if ENABLE(AVF_CAPTIONS)
674 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
675 {
676     // FIXME: Match these to correct types:
677     if (kind == PlatformTextTrack::Caption)
678         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
679
680     if (kind == PlatformTextTrack::Subtitle)
681         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
682
683     if (kind == PlatformTextTrack::Description)
684         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
685
686     if (kind == PlatformTextTrack::Forced)
687         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
688
689     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
690 }
691     
692 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
693 {
694     trackModeChanged();
695 }
696     
697 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
698 {
699     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
700     
701     for (auto& textTrack : m_textTracks) {
702         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
703             continue;
704         
705         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
706         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
707         
708         for (auto& track : outOfBandTrackSources) {
709             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
710             
711             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
712                 continue;
713             
714             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
715             if (track->mode() == PlatformTextTrack::Hidden)
716                 mode = InbandTextTrackPrivate::Hidden;
717             else if (track->mode() == PlatformTextTrack::Disabled)
718                 mode = InbandTextTrackPrivate::Disabled;
719             else if (track->mode() == PlatformTextTrack::Showing)
720                 mode = InbandTextTrackPrivate::Showing;
721             
722             textTrack->setMode(mode);
723             break;
724         }
725     }
726 }
727 #endif
728
729
730 static NSURL *canonicalURL(const String& url)
731 {
732     NSURL *cocoaURL = URL(ParsedURLString, url);
733     if (url.isEmpty())
734         return cocoaURL;
735
736     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
737     if (!request)
738         return cocoaURL;
739
740     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
741     if (!canonicalRequest)
742         return cocoaURL;
743
744     return [canonicalRequest URL];
745 }
746
747 #if PLATFORM(IOS)
748 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
749 {
750     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
751     [properties setDictionary:@{
752         NSHTTPCookieName: cookie.name,
753         NSHTTPCookieValue: cookie.value,
754         NSHTTPCookieDomain: cookie.domain,
755         NSHTTPCookiePath: cookie.path,
756         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
757     }];
758     if (cookie.secure)
759         [properties setObject:@YES forKey:NSHTTPCookieSecure];
760     if (cookie.session)
761         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
762
763     return [NSHTTPCookie cookieWithProperties:properties.get()];
764 }
765 #endif
766
767 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
768 {
769     if (m_avAsset)
770         return;
771
772     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
773
774     setDelayCallbacks(true);
775
776     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
777
778     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
779
780     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
781
782     String referrer = player()->referrer();
783     if (!referrer.isEmpty())
784         [headerFields.get() setObject:referrer forKey:@"Referer"];
785
786     String userAgent = player()->userAgent();
787     if (!userAgent.isEmpty())
788         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
789
790     if ([headerFields.get() count])
791         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
792
793     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
794         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
795
796     String identifier = player()->sourceApplicationIdentifier();
797     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
798         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
799
800 #if ENABLE(AVF_CAPTIONS)
801     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
802     if (!outOfBandTrackSources.isEmpty()) {
803         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
804         for (auto& trackSource : outOfBandTrackSources) {
805             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
806             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
807             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
808             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
809             [outOfBandTracks.get() addObject:@{
810                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
811                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
812                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
813                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
814                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
815                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
816             }];
817         }
818
819         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
820     }
821 #endif
822
823 #if PLATFORM(IOS)
824     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
825     if (!networkInterfaceName.isEmpty())
826         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
827 #endif
828
829 #if PLATFORM(IOS)
830     Vector<Cookie> cookies;
831     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
832         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
833         for (auto& cookie : cookies)
834             [nsCookies addObject:toNSHTTPCookie(cookie)];
835
836         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
837     }
838 #endif
839
840     NSURL *cocoaURL = canonicalURL(url);
841     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
842
843 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
844     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
845 #endif
846
847     m_haveCheckedPlayability = false;
848
849     setDelayCallbacks(false);
850 }
851
852 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
853 {
854     if (!m_avPlayer)
855         return;
856
857     if (pthread_main_np()) {
858         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
859         return;
860     }
861
862     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
863     RetainPtr<AVPlayerItemType> strongItem = item;
864     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
865         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
866     });
867 }
868
869 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
870 {
871     if (m_avPlayer)
872         return;
873
874     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
875
876     setDelayCallbacks(true);
877
878     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
879     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
880 #if ENABLE(IOS_AIRPLAY)
881     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
882     updateDisableExternalPlayback();
883 #endif
884
885 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
886     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
887 #endif
888
889 #if ENABLE(IOS_AIRPLAY)
890     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
891 #endif
892
893     if (player()->client().mediaPlayerIsVideo())
894         createAVPlayerLayer();
895
896     if (m_avPlayerItem)
897         setAVPlayerItem(m_avPlayerItem.get());
898
899     setDelayCallbacks(false);
900 }
901
902 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
903 {
904     if (m_avPlayerItem)
905         return;
906
907     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
908
909     setDelayCallbacks(true);
910
911     // Create the player item so we can load media data. 
912     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
913
914     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
915
916     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
917     for (NSString *keyName in itemKVOProperties())
918         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
919
920     if (m_avPlayer)
921         setAVPlayerItem(m_avPlayerItem.get());
922
923 #if PLATFORM(IOS)
924     AtomicString value;
925     if (player()->doesHaveAttribute("data-youtube-id", &value))
926         [m_avPlayerItem.get() setDataYouTubeID: value];
927  #endif
928
929 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
930     const NSTimeInterval legibleOutputAdvanceInterval = 2;
931
932     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
933     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
934     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
935
936     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
937     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
938     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
939     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
940 #endif
941
942 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
943         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
944         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForAudibleMedia()];
945         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForVisualMedia()];
946 #endif
947
948 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
949     if (m_provider)
950         m_provider->setPlayerItem(m_avPlayerItem.get());
951 #endif
952
953     setDelayCallbacks(false);
954 }
955
956 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
957 {
958     if (m_haveCheckedPlayability)
959         return;
960     m_haveCheckedPlayability = true;
961
962     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
963     auto weakThis = createWeakPtr();
964
965     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
966         callOnMainThread([weakThis] {
967             if (weakThis)
968                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
969         });
970     }];
971 }
972
973 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
974 {
975     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
976
977     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
978     dispatch_group_enter(metadataLoadingGroup);
979     auto weakThis = createWeakPtr();
980     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
981
982         callOnMainThread([weakThis, metadataLoadingGroup] {
983             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
984                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
985                     dispatch_group_enter(metadataLoadingGroup);
986                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
987                         dispatch_group_leave(metadataLoadingGroup);
988                     }];
989                 }
990             }
991             dispatch_group_leave(metadataLoadingGroup);
992         });
993     }];
994
995     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
996         callOnMainThread([weakThis] {
997             if (weakThis)
998                 [weakThis->m_objcObserver.get() metadataLoaded];
999         });
1000
1001         dispatch_release(metadataLoadingGroup);
1002     });
1003 }
1004
1005 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1006 {
1007     if (!m_avPlayerItem)
1008         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1009
1010     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1011         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1012     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1013         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1014     if (m_cachedLikelyToKeepUp)
1015         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1016     if (m_cachedBufferFull)
1017         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1018     if (m_cachedBufferEmpty)
1019         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1020
1021     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1022 }
1023
1024 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1025 {
1026     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1027     PlatformMedia pm;
1028     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1029     pm.media.avfMediaPlayer = m_avPlayer.get();
1030     return pm;
1031 }
1032
1033 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1034 {
1035 #if PLATFORM(IOS)
1036     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1037 #else
1038     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1039 #endif
1040 }
1041
1042 #if PLATFORM(IOS)
1043 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1044 {
1045     if (m_videoFullscreenLayer == videoFullscreenLayer)
1046         return;
1047
1048     m_videoFullscreenLayer = videoFullscreenLayer;
1049
1050     [CATransaction begin];
1051     [CATransaction setDisableActions:YES];
1052
1053     if (m_videoFullscreenLayer && m_videoLayer) {
1054         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1055         [m_videoLayer removeFromSuperlayer];
1056         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1057     } else if (m_videoInlineLayer && m_videoLayer) {
1058         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1059         [m_videoLayer removeFromSuperlayer];
1060         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1061     } else if (m_videoLayer)
1062         [m_videoLayer removeFromSuperlayer];
1063
1064     [CATransaction commit];
1065
1066     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1067         syncTextTrackBounds();
1068         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1069     }
1070 #if ENABLE(IOS_AIRPLAY)
1071     updateDisableExternalPlayback();
1072 #endif
1073 }
1074
1075 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1076 {
1077     m_videoFullscreenFrame = frame;
1078     if (!m_videoFullscreenLayer)
1079         return;
1080
1081     if (m_videoLayer) {
1082         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1083         [CATransaction begin];
1084         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1085         [CATransaction commit];
1086         [m_videoLayer web_disableAllActions];
1087     }
1088     syncTextTrackBounds();
1089 }
1090
1091 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1092 {
1093     m_videoFullscreenGravity = gravity;
1094     if (!m_videoLayer)
1095         return;
1096
1097     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1098     if (gravity == MediaPlayer::VideoGravityResize)
1099         videoGravity = AVLayerVideoGravityResize;
1100     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1101         videoGravity = AVLayerVideoGravityResizeAspect;
1102     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1103         videoGravity = AVLayerVideoGravityResizeAspectFill;
1104     else
1105         ASSERT_NOT_REACHED();
1106
1107     [m_videoLayer setVideoGravity:videoGravity];
1108 }
1109
1110 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1111 {
1112     if (m_currentMetaData)
1113         return m_currentMetaData.get();
1114     return nil;
1115 }
1116
1117 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1118 {
1119     if (!m_avPlayerItem)
1120         return emptyString();
1121     
1122     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1123     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1124
1125     return logString.get();
1126 }
1127
1128 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1129 {
1130     if (!m_avPlayerItem)
1131         return emptyString();
1132
1133     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1134     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1135
1136     return logString.get();
1137 }
1138 #endif
1139
1140 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1141 {
1142     [CATransaction begin];
1143     [CATransaction setDisableActions:YES];    
1144     if (m_videoLayer)
1145         [m_videoLayer.get() setHidden:!isVisible];
1146     [CATransaction commit];
1147 }
1148     
1149 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1150 {
1151     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1152     if (!metaDataAvailable())
1153         return;
1154
1155     setDelayCallbacks(true);
1156     m_cachedRate = requestedRate();
1157     [m_avPlayer.get() setRate:requestedRate()];
1158     setDelayCallbacks(false);
1159 }
1160
1161 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1162 {
1163     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1164     if (!metaDataAvailable())
1165         return;
1166
1167     setDelayCallbacks(true);
1168     m_cachedRate = 0;
1169     [m_avPlayer.get() setRate:0];
1170     setDelayCallbacks(false);
1171 }
1172
1173 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1174 {
1175     // Do not ask the asset for duration before it has been loaded or it will fetch the
1176     // answer synchronously.
1177     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1178         return MediaTime::invalidTime();
1179     
1180     CMTime cmDuration;
1181     
1182     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1183     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1184         cmDuration = [m_avPlayerItem.get() duration];
1185     else
1186         cmDuration = [m_avAsset.get() duration];
1187
1188     if (CMTIME_IS_NUMERIC(cmDuration))
1189         return toMediaTime(cmDuration);
1190
1191     if (CMTIME_IS_INDEFINITE(cmDuration))
1192         return MediaTime::positiveInfiniteTime();
1193
1194     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1195     return MediaTime::invalidTime();
1196 }
1197
1198 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1199 {
1200     if (!metaDataAvailable() || !m_avPlayerItem)
1201         return MediaTime::zeroTime();
1202
1203     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1204     if (CMTIME_IS_NUMERIC(itemTime))
1205         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1206
1207     return MediaTime::zeroTime();
1208 }
1209
1210 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1211 {
1212     // setCurrentTime generates several event callbacks, update afterwards.
1213     setDelayCallbacks(true);
1214
1215     if (m_metadataTrack)
1216         m_metadataTrack->flushPartialCues();
1217
1218     CMTime cmTime = toCMTime(time);
1219     CMTime cmBefore = toCMTime(negativeTolerance);
1220     CMTime cmAfter = toCMTime(positiveTolerance);
1221
1222     auto weakThis = createWeakPtr();
1223
1224     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1225         callOnMainThread([weakThis, finished] {
1226             auto _this = weakThis.get();
1227             if (!_this)
1228                 return;
1229
1230             _this->seekCompleted(finished);
1231         });
1232     }];
1233
1234     setDelayCallbacks(false);
1235 }
1236
1237 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1238 {
1239 #if PLATFORM(IOS)
1240     UNUSED_PARAM(volume);
1241     return;
1242 #else
1243     if (!metaDataAvailable())
1244         return;
1245
1246     [m_avPlayer.get() setVolume:volume];
1247 #endif
1248 }
1249
1250 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1251 {
1252     UNUSED_PARAM(closedCaptionsVisible);
1253
1254     if (!metaDataAvailable())
1255         return;
1256
1257     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1258 }
1259
1260 void MediaPlayerPrivateAVFoundationObjC::updateRate()
1261 {
1262     setDelayCallbacks(true);
1263     m_cachedRate = requestedRate();
1264     [m_avPlayer.get() setRate:requestedRate()];
1265     setDelayCallbacks(false);
1266 }
1267
1268 float MediaPlayerPrivateAVFoundationObjC::rate() const
1269 {
1270     if (!metaDataAvailable())
1271         return 0;
1272
1273     return m_cachedRate;
1274 }
1275
1276 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1277 {
1278     auto timeRanges = PlatformTimeRanges::create();
1279
1280     if (!m_avPlayerItem)
1281         return timeRanges;
1282
1283     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1284         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1285         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1286             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1287     }
1288     return timeRanges;
1289 }
1290
1291 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1292 {
1293     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1294         return MediaTime::zeroTime();
1295
1296     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1297     bool hasValidRange = false;
1298     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1299         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1300         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1301             continue;
1302
1303         hasValidRange = true;
1304         MediaTime startOfRange = toMediaTime(timeRange.start);
1305         if (minTimeSeekable > startOfRange)
1306             minTimeSeekable = startOfRange;
1307     }
1308     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1309 }
1310
1311 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1312 {
1313     if (!m_cachedSeekableRanges)
1314         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1315
1316     MediaTime maxTimeSeekable;
1317     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1318         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1319         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1320             continue;
1321         
1322         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1323         if (maxTimeSeekable < endOfRange)
1324             maxTimeSeekable = endOfRange;
1325     }
1326     return maxTimeSeekable;
1327 }
1328
1329 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1330 {
1331 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
1332     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
1333     // when loadedTimeRanges changes when there is no video output. In that case
1334     // update the cached value explicitly.
1335     if (!hasLayerRenderer() && !hasContextRenderer())
1336         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
1337 #endif
1338
1339     if (!m_cachedLoadedRanges)
1340         return MediaTime::zeroTime();
1341
1342     MediaTime maxTimeLoaded;
1343     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1344         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1345         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1346             continue;
1347         
1348         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1349         if (maxTimeLoaded < endOfRange)
1350             maxTimeLoaded = endOfRange;
1351     }
1352
1353     return maxTimeLoaded;   
1354 }
1355
1356 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1357 {
1358     if (!metaDataAvailable())
1359         return 0;
1360
1361     if (m_cachedTotalBytes)
1362         return m_cachedTotalBytes;
1363
1364     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1365         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1366
1367     return m_cachedTotalBytes;
1368 }
1369
1370 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1371 {
1372     m_avAsset = asset;
1373 }
1374
1375 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1376 {
1377     if (!m_avAsset)
1378         return MediaPlayerAVAssetStatusDoesNotExist;
1379
1380     for (NSString *keyName in assetMetadataKeyNames()) {
1381         NSError *error = nil;
1382         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1383 #if !LOG_DISABLED
1384         if (error)
1385             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1386 #endif
1387
1388         if (keyStatus < AVKeyValueStatusLoaded)
1389             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1390         
1391         if (keyStatus == AVKeyValueStatusFailed)
1392             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1393
1394         if (keyStatus == AVKeyValueStatusCancelled)
1395             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1396     }
1397
1398     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1399         return MediaPlayerAVAssetStatusPlayable;
1400
1401     return MediaPlayerAVAssetStatusLoaded;
1402 }
1403
1404 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1405 {
1406     if (!m_avAsset)
1407         return 0;
1408
1409     NSError *error = nil;
1410     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1411     return [error code];
1412 }
1413
1414 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1415 {
1416     if (!metaDataAvailable() || context->paintingDisabled())
1417         return;
1418
1419     setDelayCallbacks(true);
1420     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1421
1422 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1423     if (videoOutputHasAvailableFrame())
1424         paintWithVideoOutput(context, rect);
1425     else
1426 #endif
1427         paintWithImageGenerator(context, rect);
1428
1429     END_BLOCK_OBJC_EXCEPTIONS;
1430     setDelayCallbacks(false);
1431
1432     m_videoFrameHasDrawn = true;
1433 }
1434
1435 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1436 {
1437     if (!metaDataAvailable() || context->paintingDisabled())
1438         return;
1439
1440     // We can ignore the request if we are already rendering to a layer.
1441     if (currentRenderingMode() == MediaRenderingToLayer)
1442         return;
1443
1444     // paint() is best effort, so only paint if we already have an image generator or video output available.
1445     if (!hasContextRenderer())
1446         return;
1447
1448     paintCurrentFrameInContext(context, rect);
1449 }
1450
1451 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1452 {
1453     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1454
1455     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1456     if (image) {
1457         GraphicsContextStateSaver stateSaver(*context);
1458         context->translate(rect.x(), rect.y() + rect.height());
1459         context->scale(FloatSize(1.0f, -1.0f));
1460         context->setImageInterpolationQuality(InterpolationLow);
1461         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1462         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1463         image = 0;
1464     }
1465 }
1466
1467 static HashSet<String> mimeTypeCache()
1468 {
1469     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1470     static bool typeListInitialized = false;
1471
1472     if (typeListInitialized)
1473         return cache;
1474     typeListInitialized = true;
1475
1476     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1477     for (NSString *mimeType in types)
1478         cache.add([mimeType lowercaseString]);
1479
1480     return cache;
1481
1482
1483 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1484 {
1485     if (!m_imageGenerator)
1486         createImageGenerator();
1487     ASSERT(m_imageGenerator);
1488
1489 #if !LOG_DISABLED
1490     double start = monotonicallyIncreasingTime();
1491 #endif
1492
1493     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1494     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1495     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1496
1497 #if !LOG_DISABLED
1498     double duration = monotonicallyIncreasingTime() - start;
1499     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1500 #endif
1501
1502     return image;
1503 }
1504
1505 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1506 {
1507     supportedTypes = mimeTypeCache();
1508
1509
1510 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1511 static bool keySystemIsSupported(const String& keySystem)
1512 {
1513     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1514         return true;
1515     return false;
1516 }
1517 #endif
1518
1519 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1520 {
1521 #if ENABLE(ENCRYPTED_MEDIA)
1522     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1523     // In addition to the steps in the current specification, this method must run the following steps:
1524
1525     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1526     //    If keySystem is null, continue to the next step.
1527     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1528         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1529         if (!keySystemIsSupported(parameters.keySystem))
1530             return MediaPlayer::IsNotSupported;
1531
1532         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1533         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1534     }
1535
1536     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1537 #endif
1538
1539 #if ENABLE(MEDIA_SOURCE)
1540     if (parameters.isMediaSource)
1541         return MediaPlayer::IsNotSupported;
1542 #endif
1543
1544     if (!mimeTypeCache().contains(parameters.type))
1545         return MediaPlayer::IsNotSupported;
1546
1547     // The spec says:
1548     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1549     if (parameters.codecs.isEmpty())
1550         return MediaPlayer::MayBeSupported;
1551
1552     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1553     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1554 }
1555
1556 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1557 {
1558 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1559     if (!keySystem.isEmpty()) {
1560         if (!keySystemIsSupported(keySystem))
1561             return false;
1562
1563         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1564             return false;
1565
1566         return true;
1567     }
1568 #else
1569     UNUSED_PARAM(keySystem);
1570     UNUSED_PARAM(mimeType);
1571 #endif
1572     return false;
1573 }
1574
1575 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1576 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1577 {
1578     String scheme = [[[avRequest request] URL] scheme];
1579     String keyURI = [[[avRequest request] URL] absoluteString];
1580
1581 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1582     if (scheme == "skd") {
1583         // Create an initData with the following layout:
1584         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1585         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1586         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1587         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1588         initDataView->set<uint32_t>(0, keyURISize, true);
1589
1590         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1591         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1592
1593 #if ENABLE(ENCRYPTED_MEDIA)
1594         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1595 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1596         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1597         if (!player()->keyNeeded(initData.get()))
1598 #endif
1599             return false;
1600
1601         m_keyURIToRequestMap.set(keyURI, avRequest);
1602         return true;
1603     }
1604 #endif
1605
1606     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1607     m_resourceLoaderMap.add(avRequest, resourceLoader);
1608     resourceLoader->startLoading();
1609     return true;
1610 }
1611
1612 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1613 {
1614 #if USE(CFNETWORK)
1615     UNUSED_PARAM(nsChallenge);
1616     // FIXME: <rdar://problem/15799844>
1617     return false;
1618 #else
1619     AuthenticationChallenge challenge(nsChallenge);
1620
1621     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1622 #endif
1623 }
1624
1625 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1626 {
1627     String scheme = [[[avRequest request] URL] scheme];
1628
1629     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1630
1631     if (resourceLoader)
1632         resourceLoader->stopLoading();
1633 }
1634
1635 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1636 {
1637     m_resourceLoaderMap.remove(avRequest);
1638 }
1639 #endif
1640
1641 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1642 {
1643     return AVFoundationLibrary() && CoreMediaLibrary();
1644 }
1645
1646 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1647 {
1648     if (!metaDataAvailable())
1649         return timeValue;
1650
1651     // FIXME - impossible to implement until rdar://8721510 is fixed.
1652     return timeValue;
1653 }
1654
1655 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1656 {
1657     if (!m_videoLayer)
1658         return;
1659
1660 #if PLATFORM(IOS)
1661     // Do not attempt to change the video gravity while in full screen mode.
1662     // See setVideoFullscreenGravity().
1663     if (m_videoFullscreenLayer)
1664         return;
1665 #endif
1666
1667     [CATransaction begin];
1668     [CATransaction setDisableActions:YES];    
1669     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1670     [m_videoLayer.get() setVideoGravity:gravity];
1671     [CATransaction commit];
1672 }
1673
1674 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1675 {
1676     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1677         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1678     }];
1679     if (index == NSNotFound)
1680         return nil;
1681     return [tracks objectAtIndex:index];
1682 }
1683
1684 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1685 {
1686     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1687     m_languageOfPrimaryAudioTrack = String();
1688
1689     if (!m_avAsset)
1690         return;
1691
1692     setDelayCharacteristicsChangedNotification(true);
1693
1694     bool haveCCTrack = false;
1695     bool hasCaptions = false;
1696
1697     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1698     // asked about those fairly fequently.
1699     if (!m_avPlayerItem) {
1700         // We don't have a player item yet, so check with the asset because some assets support inspection
1701         // prior to becoming ready to play.
1702         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1703         setHasVideo(firstEnabledVideoTrack);
1704         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1705 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1706         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1707 #endif
1708
1709         presentationSizeDidChange(firstEnabledVideoTrack ? IntSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : IntSize());
1710     } else {
1711         bool hasVideo = false;
1712         bool hasAudio = false;
1713         bool hasMetaData = false;
1714         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1715             if ([track isEnabled]) {
1716                 AVAssetTrack *assetTrack = [track assetTrack];
1717                 NSString *mediaType = [assetTrack mediaType];
1718                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1719                     hasVideo = true;
1720                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1721                     hasAudio = true;
1722                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1723 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1724                     hasCaptions = true;
1725 #endif
1726                     haveCCTrack = true;
1727                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1728                     hasMetaData = true;
1729                 }
1730             }
1731         }
1732
1733 #if ENABLE(VIDEO_TRACK)
1734         updateAudioTracks();
1735         updateVideoTracks();
1736
1737 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1738         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1739         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1740 #endif
1741 #endif
1742
1743         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1744         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1745         // when it is not.
1746         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1747
1748         setHasAudio(hasAudio);
1749 #if ENABLE(DATACUE_VALUE)
1750         if (hasMetaData)
1751             processMetadataTrack();
1752 #endif
1753     }
1754
1755 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1756     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1757     if (legibleGroup && m_cachedTracks) {
1758         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1759         if (hasCaptions)
1760             processMediaSelectionOptions();
1761     }
1762 #endif
1763
1764 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1765     if (!hasCaptions && haveCCTrack)
1766         processLegacyClosedCaptionsTracks();
1767 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1768     if (haveCCTrack)
1769         processLegacyClosedCaptionsTracks();
1770 #endif
1771
1772     setHasClosedCaptions(hasCaptions);
1773
1774     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1775         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1776
1777     sizeChanged();
1778
1779     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1780         characteristicsChanged();
1781
1782     setDelayCharacteristicsChangedNotification(false);
1783 }
1784
1785 #if ENABLE(VIDEO_TRACK)
1786 template <typename RefT, typename PassRefT>
1787 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1788 {
1789     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1790         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1791     }]]]);
1792     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1793
1794     for (auto& oldItem : oldItems) {
1795         if (oldItem->playerItemTrack())
1796             [oldTracks addObject:oldItem->playerItemTrack()];
1797     }
1798
1799     // Find the added & removed AVPlayerItemTracks:
1800     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1801     [removedTracks minusSet:newTracks.get()];
1802
1803     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1804     [addedTracks minusSet:oldTracks.get()];
1805
1806     typedef Vector<RefT> ItemVector;
1807     ItemVector replacementItems;
1808     ItemVector addedItems;
1809     ItemVector removedItems;
1810     for (auto& oldItem : oldItems) {
1811         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1812             removedItems.append(oldItem);
1813         else
1814             replacementItems.append(oldItem);
1815     }
1816
1817     for (AVPlayerItemTrack* track in addedTracks.get())
1818         addedItems.append(itemFactory(track));
1819
1820     replacementItems.appendVector(addedItems);
1821     oldItems.swap(replacementItems);
1822
1823     for (auto& removedItem : removedItems)
1824         (player->*removedFunction)(removedItem);
1825
1826     for (auto& addedItem : addedItems)
1827         (player->*addedFunction)(addedItem);
1828 }
1829
1830 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1831 template <typename RefT, typename PassRefT>
1832 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1833 {
1834     group->updateOptions();
1835
1836     // Only add selection options which do not have an associated persistant track.
1837     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1838     for (auto& option : group->options()) {
1839         if (!option)
1840             continue;
1841         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1842         if (!avOption)
1843             continue;
1844         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1845             newSelectionOptions.add(option);
1846     }
1847
1848     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1849     for (auto& oldItem : oldItems) {
1850         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1851             oldSelectionOptions.add(option);
1852     }
1853
1854     // Find the added & removed AVMediaSelectionOptions:
1855     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1856     for (auto& oldOption : oldSelectionOptions) {
1857         if (!newSelectionOptions.contains(oldOption))
1858             removedSelectionOptions.add(oldOption);
1859     }
1860
1861     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1862     for (auto& newOption : newSelectionOptions) {
1863         if (!oldSelectionOptions.contains(newOption))
1864             addedSelectionOptions.add(newOption);
1865     }
1866
1867     typedef Vector<RefT> ItemVector;
1868     ItemVector replacementItems;
1869     ItemVector addedItems;
1870     ItemVector removedItems;
1871     for (auto& oldItem : oldItems) {
1872         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1873             removedItems.append(oldItem);
1874         else
1875             replacementItems.append(oldItem);
1876     }
1877
1878     for (auto& option : addedSelectionOptions)
1879         addedItems.append(itemFactory(*option.get()));
1880
1881     replacementItems.appendVector(addedItems);
1882     oldItems.swap(replacementItems);
1883     
1884     for (auto& removedItem : removedItems)
1885         (player->*removedFunction)(removedItem);
1886     
1887     for (auto& addedItem : addedItems)
1888         (player->*addedFunction)(addedItem);
1889 }
1890 #endif
1891
1892 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1893 {
1894 #if !LOG_DISABLED
1895     size_t count = m_audioTracks.size();
1896 #endif
1897
1898     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1899
1900 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1901     if (!m_audibleGroup) {
1902         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
1903             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
1904     }
1905
1906     if (m_audibleGroup)
1907         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1908 #endif
1909
1910     for (auto& track : m_audioTracks)
1911         track->resetPropertiesFromTrack();
1912
1913 #if !LOG_DISABLED
1914     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
1915 #endif
1916 }
1917
1918 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1919 {
1920 #if !LOG_DISABLED
1921     size_t count = m_videoTracks.size();
1922 #endif
1923
1924     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1925
1926 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1927     if (!m_visualGroup) {
1928         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
1929             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
1930     }
1931
1932     if (m_visualGroup)
1933         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1934 #endif
1935
1936     for (auto& track : m_audioTracks)
1937         track->resetPropertiesFromTrack();
1938
1939 #if !LOG_DISABLED
1940     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
1941 #endif
1942 }
1943
1944 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
1945 {
1946 #if PLATFORM(IOS)
1947     if (m_videoFullscreenLayer)
1948         return true;
1949 #endif
1950     return false;
1951 }
1952
1953 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
1954 {
1955 #if PLATFORM(IOS)
1956     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
1957         return;
1958     
1959     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
1960     [m_textTrackRepresentationLayer setFrame:textFrame];
1961 #endif
1962 }
1963
1964 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1965 {
1966 #if PLATFORM(IOS)
1967     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
1968     if (representationLayer == m_textTrackRepresentationLayer) {
1969         syncTextTrackBounds();
1970         return;
1971     }
1972
1973     if (m_textTrackRepresentationLayer)
1974         [m_textTrackRepresentationLayer removeFromSuperlayer];
1975
1976     m_textTrackRepresentationLayer = representationLayer;
1977
1978     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1979         syncTextTrackBounds();
1980         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1981     }
1982
1983 #else
1984     UNUSED_PARAM(representation);
1985 #endif
1986 }
1987 #endif // ENABLE(VIDEO_TRACK)
1988
1989 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1990 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
1991 {
1992     if (!m_provider)
1993         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
1994     return m_provider.get();
1995 }
1996 #endif
1997
1998 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1999 {
2000     if (!m_avAsset)
2001         return;
2002
2003     setNaturalSize(roundedIntSize(m_cachedPresentationSize));
2004 }
2005     
2006 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2007 {
2008     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2009         return false;
2010     
2011     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2012     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2013     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2014 }
2015
2016 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2017 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2018 {
2019     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2020
2021     if (!m_avPlayerItem || m_videoOutput)
2022         return;
2023
2024 #if USE(VIDEOTOOLBOX)
2025 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
2026     NSDictionary* attributes = nil;
2027 #else
2028     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
2029 #endif
2030 #else
2031     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2032                                 nil];
2033 #endif
2034     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2035     ASSERT(m_videoOutput);
2036
2037     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2038
2039     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2040
2041     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2042 }
2043
2044 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2045 {
2046     if (!m_videoOutput)
2047         return;
2048
2049     if (m_avPlayerItem)
2050         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2051     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2052
2053     m_videoOutput = 0;
2054 }
2055
2056 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2057 {
2058     if (!m_videoOutput)
2059         createVideoOutput();
2060     ASSERT(m_videoOutput);
2061
2062 #if !LOG_DISABLED
2063     double start = monotonicallyIncreasingTime();
2064 #endif
2065
2066     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2067
2068     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2069         return 0;
2070
2071     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2072     if (!buffer)
2073         return 0;
2074
2075 #if USE(VIDEOTOOLBOX)
2076     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2077     if (!m_pixelTransferSession) {
2078         VTPixelTransferSessionRef session = 0;
2079         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2080         m_pixelTransferSession = adoptCF(session);
2081     }
2082
2083     CVPixelBufferRef outputBuffer;
2084     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2085     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2086     buffer = adoptCF(outputBuffer);
2087 #endif
2088
2089 #if !LOG_DISABLED
2090     double duration = monotonicallyIncreasingTime() - start;
2091     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2092 #endif
2093
2094     return buffer;
2095 }
2096
2097 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2098 {
2099     if (!m_avPlayerItem)
2100         return false;
2101
2102     if (m_lastImage)
2103         return true;
2104
2105     if (!m_videoOutput)
2106         createVideoOutput();
2107
2108     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2109 }
2110
2111 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2112 {
2113     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2114     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2115     return CVPixelBufferGetBaseAddress(pixelBuffer);
2116 }
2117
2118 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2119 {
2120     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2121     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2122 }
2123
2124 static void CVPixelBufferReleaseInfoCallback(void* info)
2125 {
2126     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2127     CFRelease(pixelBuffer);
2128 }
2129
2130 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2131 {
2132     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2133     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2134
2135     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2136     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2137     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2138     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2139     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2140
2141     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2142     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2143     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2144
2145     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2146 }
2147
2148 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2149 {
2150     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2151
2152     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2153     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2154     // should be displayed.
2155     if (pixelBuffer)
2156         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2157 }
2158
2159 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
2160 {
2161     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2162         waitForVideoOutputMediaDataWillChange();
2163
2164     updateLastImage();
2165
2166     if (!m_lastImage)
2167         return;
2168
2169     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2170     if (!firstEnabledVideoTrack)
2171         return;
2172
2173     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2174
2175     GraphicsContextStateSaver stateSaver(*context);
2176     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2177     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2178     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2179
2180     context->concatCTM(videoTransform);
2181     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2182
2183     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2184     // video frame, destroy it now that it is no longer needed.
2185     if (m_imageGenerator)
2186         destroyImageGenerator();
2187
2188 }
2189
2190 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2191 {
2192     updateLastImage();
2193     return m_lastImage.get();
2194 }
2195
2196 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2197 {
2198     if (!m_videoOutputSemaphore)
2199         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2200
2201     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2202
2203     // Wait for 1 second.
2204     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2205
2206     if (result)
2207         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2208 }
2209
2210 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2211 {
2212     dispatch_semaphore_signal(m_videoOutputSemaphore);
2213 }
2214 #endif
2215
2216 #if ENABLE(ENCRYPTED_MEDIA)
2217 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2218 {
2219     if (!keySystemIsSupported(keySystem))
2220         return MediaPlayer::KeySystemNotSupported;
2221
2222     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2223     String keyURI;
2224     String keyID;
2225     RefPtr<Uint8Array> certificate;
2226     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2227         return MediaPlayer::InvalidPlayerState;
2228
2229     if (!m_keyURIToRequestMap.contains(keyURI))
2230         return MediaPlayer::InvalidPlayerState;
2231
2232     String sessionID = createCanonicalUUIDString();
2233
2234     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2235
2236     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2237     NSString* assetStr = keyID;
2238     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2239     NSError* error = 0;
2240     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2241
2242     if (!keyRequest) {
2243         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2244         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2245         return MediaPlayer::NoError;
2246     }
2247
2248     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2249     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2250     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2251
2252     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2253     m_sessionIDToRequestMap.set(sessionID, avRequest);
2254     m_keyURIToRequestMap.remove(keyURI);
2255
2256     return MediaPlayer::NoError;
2257 }
2258
2259 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2260 {
2261     if (!keySystemIsSupported(keySystem))
2262         return MediaPlayer::KeySystemNotSupported;
2263
2264     if (!m_sessionIDToRequestMap.contains(sessionID))
2265         return MediaPlayer::InvalidPlayerState;
2266
2267     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2268     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2269     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2270     [avRequest.get() finishLoading];
2271     m_sessionIDToRequestMap.remove(sessionID);
2272
2273     player()->keyAdded(keySystem, sessionID);
2274
2275     UNUSED_PARAM(initDataPtr);
2276     UNUSED_PARAM(initDataLength);
2277     return MediaPlayer::NoError;
2278 }
2279
2280 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2281 {
2282     if (!keySystemIsSupported(keySystem))
2283         return MediaPlayer::KeySystemNotSupported;
2284
2285     if (!m_sessionIDToRequestMap.contains(sessionID))
2286         return MediaPlayer::InvalidPlayerState;
2287
2288     m_sessionIDToRequestMap.remove(sessionID);
2289     return MediaPlayer::NoError;
2290 }
2291 #endif
2292
2293 #if ENABLE(ENCRYPTED_MEDIA_V2)
2294 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2295 {
2296     return m_keyURIToRequestMap.take(keyURI);
2297 }
2298
2299 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2300 {
2301     if (!keySystemIsSupported(keySystem))
2302         return nullptr;
2303
2304     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2305 }
2306 #endif
2307
2308 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2309 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2310 {
2311 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2312     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2313 #endif
2314
2315     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2316     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2317
2318         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2319         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2320             continue;
2321
2322         bool newCCTrack = true;
2323         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2324             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2325                 continue;
2326
2327             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2328             if (track->avPlayerItemTrack() == playerItemTrack) {
2329                 removedTextTracks.remove(i - 1);
2330                 newCCTrack = false;
2331                 break;
2332             }
2333         }
2334
2335         if (!newCCTrack)
2336             continue;
2337         
2338         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2339     }
2340
2341     processNewAndRemovedTextTracks(removedTextTracks);
2342 }
2343 #endif
2344
2345 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2346 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2347 {
2348     if (!m_avAsset)
2349         return false;
2350
2351     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2352         return false;
2353
2354     return true;
2355 }
2356
2357 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2358 {
2359     if (!hasLoadedMediaSelectionGroups())
2360         return nil;
2361
2362     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2363 }
2364
2365 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2366 {
2367     if (!hasLoadedMediaSelectionGroups())
2368         return nil;
2369
2370     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2371 }
2372
2373 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2374 {
2375     if (!hasLoadedMediaSelectionGroups())
2376         return nil;
2377
2378     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2379 }
2380
2381 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2382 {
2383     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2384     if (!legibleGroup) {
2385         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2386         return;
2387     }
2388
2389     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2390     // but set the selected legible track to nil so text tracks will not be automatically configured.
2391     if (!m_textTracks.size())
2392         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2393
2394     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2395     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2396     for (AVMediaSelectionOptionType *option in legibleOptions) {
2397         bool newTrack = true;
2398         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2399             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2400                 continue;
2401             
2402             RetainPtr<AVMediaSelectionOptionType> currentOption;
2403 #if ENABLE(AVF_CAPTIONS)
2404             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2405                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2406                 currentOption = track->mediaSelectionOption();
2407             } else
2408 #endif
2409             {
2410                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2411                 currentOption = track->mediaSelectionOption();
2412             }
2413             
2414             if ([currentOption.get() isEqual:option]) {
2415                 removedTextTracks.remove(i - 1);
2416                 newTrack = false;
2417                 break;
2418             }
2419         }
2420         if (!newTrack)
2421             continue;
2422
2423 #if ENABLE(AVF_CAPTIONS)
2424         if ([option outOfBandSource]) {
2425             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2426             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2427             continue;
2428         }
2429 #endif
2430
2431         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2432     }
2433
2434     processNewAndRemovedTextTracks(removedTextTracks);
2435 }
2436
2437 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2438 {
2439     if (m_metadataTrack)
2440         return;
2441
2442     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2443     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2444     player()->addTextTrack(m_metadataTrack);
2445 }
2446
2447 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2448 {
2449     if (!m_currentTextTrack)
2450         return;
2451
2452     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2453 }
2454
2455 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2456 {
2457     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2458
2459     if (!m_currentTextTrack)
2460         return;
2461     
2462     m_currentTextTrack->resetCueValues();
2463 }
2464 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2465
2466 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2467 {
2468     if (m_currentTextTrack == track)
2469         return;
2470
2471     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2472         
2473     m_currentTextTrack = track;
2474
2475     if (track) {
2476         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2477             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2478 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2479 #if ENABLE(AVF_CAPTIONS)
2480         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2481             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2482 #endif
2483         else
2484             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2485 #endif
2486     } else {
2487 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2488         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2489 #endif
2490         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2491     }
2492
2493 }
2494
2495 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2496 {
2497     if (!m_languageOfPrimaryAudioTrack.isNull())
2498         return m_languageOfPrimaryAudioTrack;
2499
2500     if (!m_avPlayerItem.get())
2501         return emptyString();
2502
2503 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2504     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2505     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2506     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2507     if (currentlySelectedAudibleOption) {
2508         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2509         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2510
2511         return m_languageOfPrimaryAudioTrack;
2512     }
2513 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2514
2515     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2516     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2517     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2518     if (!tracks || [tracks count] != 1) {
2519         m_languageOfPrimaryAudioTrack = emptyString();
2520         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2521         return m_languageOfPrimaryAudioTrack;
2522     }
2523
2524     AVAssetTrack *track = [tracks objectAtIndex:0];
2525     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2526
2527 #if !LOG_DISABLED
2528     if (m_languageOfPrimaryAudioTrack == emptyString())
2529         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2530     else
2531         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2532 #endif
2533
2534     return m_languageOfPrimaryAudioTrack;
2535 }
2536
2537 #if ENABLE(IOS_AIRPLAY) && PLATFORM(IOS)
2538 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2539 {
2540     if (!m_avPlayer)
2541         return false;
2542
2543     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2544     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2545     return wirelessTarget;
2546 }
2547
2548 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2549 {
2550     if (!m_avPlayer)
2551         return MediaPlayer::TargetTypeNone;
2552
2553     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2554     case wkExternalPlaybackTypeNone:
2555         return MediaPlayer::TargetTypeNone;
2556     case wkExternalPlaybackTypeAirPlay:
2557         return MediaPlayer::TargetTypeAirPlay;
2558     case wkExternalPlaybackTypeTVOut:
2559         return MediaPlayer::TargetTypeTVOut;
2560     }
2561
2562     ASSERT_NOT_REACHED();
2563     return MediaPlayer::TargetTypeNone;
2564 }
2565
2566 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2567 {
2568     if (!m_avPlayer)
2569         return emptyString();
2570     
2571     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2572     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2573
2574     return wirelessTargetName;
2575 }
2576
2577 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2578 {
2579     if (!m_avPlayer)
2580         return !m_allowsWirelessVideoPlayback;
2581     
2582     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2583     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2584
2585     return !m_allowsWirelessVideoPlayback;
2586 }
2587
2588 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2589 {
2590     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2591     m_allowsWirelessVideoPlayback = !disabled;
2592     if (!m_avPlayer)
2593         return;
2594     
2595     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2596 }
2597
2598 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2599 {
2600     if (!m_avPlayer)
2601         return;
2602
2603     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2604 }
2605 #endif
2606
2607 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2608 {
2609     m_cachedItemStatus = status;
2610
2611     updateStates();
2612 }
2613
2614 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2615 {
2616     m_pendingStatusChanges++;
2617 }
2618
2619 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2620 {
2621     m_cachedLikelyToKeepUp = likelyToKeepUp;
2622
2623     ASSERT(m_pendingStatusChanges);
2624     if (!--m_pendingStatusChanges)
2625         updateStates();
2626 }
2627
2628 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2629 {
2630     m_pendingStatusChanges++;
2631 }
2632
2633 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2634 {
2635     m_cachedBufferEmpty = bufferEmpty;
2636
2637     ASSERT(m_pendingStatusChanges);
2638     if (!--m_pendingStatusChanges)
2639         updateStates();
2640 }
2641
2642 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2643 {
2644     m_pendingStatusChanges++;
2645 }
2646
2647 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2648 {
2649     m_cachedBufferFull = bufferFull;
2650
2651     ASSERT(m_pendingStatusChanges);
2652     if (!--m_pendingStatusChanges)
2653         updateStates();
2654 }
2655
2656 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2657 {
2658     m_cachedSeekableRanges = seekableRanges;
2659
2660     seekableTimeRangesChanged();
2661     updateStates();
2662 }
2663
2664 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2665 {
2666     m_cachedLoadedRanges = loadedRanges;
2667
2668     loadedTimeRangesChanged();
2669     updateStates();
2670 }
2671
2672 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2673 {
2674     m_cachedIsReadyForDisplay = isReady;
2675     if (!hasVideo() && isReady)
2676         tracksChanged();
2677     updateStates();
2678 }
2679
2680 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2681 {
2682     tracksChanged();
2683     updateStates();
2684 }
2685
2686 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2687 {
2688     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2689     if (m_shouldBufferData == shouldBuffer)
2690         return;
2691
2692     m_shouldBufferData = shouldBuffer;
2693     
2694     if (!m_avPlayer)
2695         return;
2696
2697     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2698 }
2699
2700 #if ENABLE(DATACUE_VALUE)
2701 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2702 {
2703     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2704     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2705     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2706     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2707     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2708
2709     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2710         return quickTimeUserData;
2711     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2712         return isoUserData;
2713     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2714         return quickTimeMetadata;
2715     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2716         return iTunesMetadata;
2717     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2718         return id3Metadata;
2719
2720     return emptyAtom;
2721 }
2722 #endif
2723
2724 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2725 {
2726     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2727
2728     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2729
2730 #if ENABLE(DATACUE_VALUE)
2731     if (seeking())
2732         return;
2733
2734     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2735         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2736         return;
2737     }
2738
2739     if (!m_metadataTrack)
2740         processMetadataTrack();
2741
2742     // Set the duration of all incomplete cues before adding new ones.
2743     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2744     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2745         MediaTime start = toMediaTime(item.time);
2746         if (start < earliestStartTime)
2747             earliestStartTime = start;
2748     }
2749     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2750
2751     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2752         MediaTime start = toMediaTime(item.time);
2753         MediaTime end = MediaTime::positiveInfiniteTime();
2754         if (CMTIME_IS_VALID(item.duration))
2755             end = start + toMediaTime(item.duration);
2756
2757         AtomicString type = nullAtom;
2758         if (item.keySpace)
2759             type = metadataType(item.keySpace);
2760
2761         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2762     }
2763 #endif
2764 }
2765
2766 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2767 {
2768     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2769         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2770
2771     NSArray *assetTracks = [m_avAsset tracks];
2772
2773     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2774         AVAssetTrack* assetTrack = [obj assetTrack];
2775
2776         if ([assetTracks containsObject:assetTrack])
2777             return YES;
2778
2779         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2780         if (!hasLoadedMediaSelectionGroups())
2781             return NO;
2782
2783         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2784             return NO;
2785
2786         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2787             return NO;
2788
2789         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2790             return NO;
2791
2792         return YES;
2793     }]];
2794
2795     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2796         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2797
2798     m_cachedTotalBytes = 0;
2799
2800     tracksChanged();
2801     updateStates();
2802 }
2803
2804 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2805 {
2806     m_cachedHasEnabledAudio = hasEnabledAudio;
2807
2808     tracksChanged();
2809     updateStates();
2810 }
2811
2812 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2813 {
2814     m_cachedPresentationSize = size;
2815
2816     sizeChanged();
2817     updateStates();
2818 }
2819
2820 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
2821 {
2822     m_cachedDuration = duration;
2823
2824     invalidateCachedDuration();
2825 }
2826
2827 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2828 {
2829     m_cachedRate = rate;
2830
2831     updateStates();
2832     rateChanged();
2833 }
2834     
2835 #if ENABLE(IOS_AIRPLAY)
2836 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2837 {
2838     playbackTargetIsWirelessChanged();
2839 }
2840 #endif
2841
2842 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
2843 {
2844     m_cachedCanPlayFastForward = newValue;
2845 }
2846
2847 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
2848 {
2849     m_cachedCanPlayFastReverse = newValue;
2850 }
2851
2852 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
2853 {
2854     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2855         return MediaPlayerPrivateAVFoundation::resolvedURL();
2856
2857     return URL([m_avAsset resolvedURL]);
2858 }
2859
2860 NSArray* assetMetadataKeyNames()
2861 {
2862     static NSArray* keys;
2863     if (!keys) {
2864         keys = [[NSArray alloc] initWithObjects:@"duration",
2865                     @"naturalSize",
2866                     @"preferredTransform",
2867                     @"preferredVolume",
2868                     @"preferredRate",
2869                     @"playable",
2870                     @"resolvedURL",
2871                     @"tracks",
2872                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2873                    nil];
2874     }
2875     return keys;
2876 }
2877
2878 NSArray* itemKVOProperties()
2879 {
2880     static NSArray* keys;
2881     if (!keys) {
2882         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2883                 @"status",
2884                 @"asset",
2885                 @"tracks",
2886                 @"seekableTimeRanges",
2887                 @"loadedTimeRanges",
2888                 @"playbackLikelyToKeepUp",
2889                 @"playbackBufferFull",
2890                 @"playbackBufferEmpty",
2891                 @"duration",
2892                 @"hasEnabledAudio",
2893                 @"timedMetadata",
2894                 @"canPlayFastForward",
2895                 @"canPlayFastReverse",
2896                 nil];
2897     }
2898     return keys;
2899 }
2900
2901 NSArray* assetTrackMetadataKeyNames()
2902 {
2903     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
2904     return keys;
2905 }
2906
2907 } // namespace WebCore
2908
2909 @implementation WebCoreAVFMovieObserver
2910
2911 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2912 {
2913     self = [super init];
2914     if (!self)
2915         return nil;
2916     m_callback = callback;
2917     return self;
2918 }
2919
2920 - (void)disconnect
2921 {
2922     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2923     m_callback = 0;
2924 }
2925
2926 - (void)metadataLoaded
2927 {
2928     if (!m_callback)
2929         return;
2930
2931     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2932 }
2933
2934 - (void)didEnd:(NSNotification *)unusedNotification
2935 {
2936     UNUSED_PARAM(unusedNotification);
2937     if (!m_callback)
2938         return;
2939     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2940 }
2941
2942 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2943 {
2944     UNUSED_PARAM(object);
2945     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2946
2947     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2948
2949     if (!m_callback)
2950         return;
2951
2952     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2953
2954     WTF::Function<void ()> function;
2955
2956     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
2957         if ([keyPath isEqualToString:@"readyForDisplay"])
2958             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
2959     }
2960
2961     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
2962         if ([keyPath isEqualToString:@"enabled"])
2963             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
2964     }
2965
2966     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2967         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2968             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2969         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2970             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2971         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2972             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2973     }
2974
2975     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2976         // A value changed for an AVPlayerItem
2977         if ([keyPath isEqualToString:@"status"])
2978             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2979         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2980             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2981         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2982             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2983         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2984             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2985         else if ([keyPath isEqualToString:@"asset"])
2986             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2987         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2988             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2989         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2990             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2991         else if ([keyPath isEqualToString:@"tracks"])
2992             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2993         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2994             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2995         else if ([keyPath isEqualToString:@"presentationSize"])
2996             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2997         else if ([keyPath isEqualToString:@"duration"])
2998             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
2999         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
3000             MediaTime now;
3001             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
3002             if (CMTIME_IS_NUMERIC(itemTime))
3003                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3004             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3005         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3006             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3007         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3008             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3009     }
3010
3011     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3012         // A value changed for an AVPlayer.
3013         if ([keyPath isEqualToString:@"rate"])
3014             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3015 #if ENABLE(IOS_AIRPLAY)
3016         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
3017             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3018 #endif
3019     }
3020     
3021     if (function.isNull())
3022         return;
3023
3024     auto weakThis = m_callback->createWeakPtr();
3025     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3026         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3027         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3028         if (!weakThis)
3029             return;
3030         function();
3031     }));
3032 }
3033
3034 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3035 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3036 {
3037     UNUSED_PARAM(output);
3038     UNUSED_PARAM(nativeSamples);
3039
3040     if (!m_callback)
3041         return;
3042
3043     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3044     RetainPtr<NSArray> strongStrings = strings;
3045     RetainPtr<NSArray> strongSamples = nativeSamples;
3046     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3047         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3048         if (!callback)
3049             return;
3050         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3051     });
3052 }
3053
3054 - (void)outputSequenceWasFlushed:(id)output
3055 {
3056     UNUSED_PARAM(output);
3057
3058     if (!m_callback)
3059         return;
3060     
3061     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3062     callOnMainThread([strongSelf] {
3063         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3064             callback->flushCues();
3065     });
3066 }
3067 #endif
3068
3069 @end
3070
3071 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3072 @implementation WebCoreAVFLoaderDelegate
3073
3074 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3075 {
3076     self = [super init];
3077     if (!self)
3078         return nil;
3079     m_callback = callback;
3080     return self;
3081 }
3082
3083 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3084 {
3085     UNUSED_PARAM(resourceLoader);
3086     if (!m_callback)
3087         return NO;
3088
3089     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3090     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3091     callOnMainThread([strongSelf, strongRequest] {
3092         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3093         if (!callback) {
3094             [strongRequest finishLoadingWithError:nil];
3095             return;
3096         }
3097
3098         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3099             [strongRequest finishLoadingWithError:nil];
3100     });
3101
3102     return YES;
3103 }
3104
3105 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3106 {
3107     UNUSED_PARAM(resourceLoader);
3108     if (!m_callback)
3109         return NO;
3110
3111     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3112         return NO;
3113
3114     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3115     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3116     callOnMainThread([strongSelf, strongChallenge] {
3117         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3118         if (!callback) {
3119             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3120             return;
3121         }
3122
3123         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3124             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3125     });
3126
3127     return YES;
3128 }
3129
3130 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3131 {
3132     UNUSED_PARAM(resourceLoader);
3133     if (!m_callback)
3134         return;
3135
3136     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3137     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3138     callOnMainThread([strongSelf, strongRequest] {
3139         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3140         if (callback)
3141             callback->didCancelLoadingRequest(strongRequest.get());
3142     });
3143 }
3144
3145 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3146 {
3147     m_callback = callback;
3148 }
3149 @end
3150 #endif
3151
3152 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3153 @implementation WebCoreAVFPullDelegate
3154 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3155 {
3156     self = [super init];
3157     if (self)
3158         m_callback = callback;
3159     return self;
3160 }
3161
3162 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3163 {
3164     m_callback = callback;
3165 }
3166
3167 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3168 {
3169     if (m_callback)
3170         m_callback->outputMediaDataWillChange(output);
3171 }
3172
3173 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3174 {
3175     UNUSED_PARAM(output);
3176     // No-op.
3177 }
3178 @end
3179 #endif
3180
3181 #endif