Layout Test http/tests/media/track-in-band-hls-metadata.html is flaky
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioSourceProviderAVFObjC.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "Cookie.h"
38 #import "ExceptionCodePlaceholder.h"
39 #import "FloatConversion.h"
40 #import "FloatConversion.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandMetadataTextTrackPrivateAVF.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
46 #import "OutOfBandTextTrackPrivateAVF.h"
47 #import "URL.h"
48 #import "Logging.h"
49 #import "MediaSelectionGroupAVFObjC.h"
50 #import "MediaTimeAVFoundation.h"
51 #import "PlatformTimeRanges.h"
52 #import "SecurityOrigin.h"
53 #import "SerializedPlatformRepresentationMac.h"
54 #import "SoftLinking.h"
55 #import "TextTrackRepresentation.h"
56 #import "UUID.h"
57 #import "VideoTrackPrivateAVFObjC.h"
58 #import "WebCoreAVFResourceLoader.h"
59 #import "WebCoreCALayerExtras.h"
60 #import "WebCoreSystemInterface.h"
61 #import <objc/runtime.h>
62 #import <runtime/DataView.h>
63 #import <runtime/JSCInlines.h>
64 #import <runtime/TypedArrayInlines.h>
65 #import <runtime/Uint16Array.h>
66 #import <runtime/Uint32Array.h>
67 #import <runtime/Uint8Array.h>
68 #import <wtf/CurrentTime.h>
69 #import <wtf/Functional.h>
70 #import <wtf/ListHashSet.h>
71 #import <wtf/NeverDestroyed.h>
72 #import <wtf/text/CString.h>
73 #import <wtf/text/StringBuilder.h>
74
75 #if ENABLE(AVF_CAPTIONS)
76 #include "TextTrack.h"
77 #endif
78
79 #import <AVFoundation/AVFoundation.h>
80 #if PLATFORM(IOS)
81 #import "WAKAppKitStubs.h"
82 #import <CoreImage/CoreImage.h>
83 #else
84 #import <Foundation/NSGeometry.h>
85 #import <QuartzCore/CoreImage.h>
86 #endif
87 #import <CoreMedia/CoreMedia.h>
88
89 #if USE(VIDEOTOOLBOX)
90 #import <CoreVideo/CoreVideo.h>
91 #import <VideoToolbox/VideoToolbox.h>
92 #endif
93
94 namespace std {
95 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
96     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
97 };
98 }
99
100 @interface WebVideoContainerLayer : CALayer
101 @end
102
103 @implementation WebVideoContainerLayer
104
105 - (void)setBounds:(CGRect)bounds
106 {
107     [super setBounds:bounds];
108     for (CALayer* layer in self.sublayers)
109         layer.frame = bounds;
110 }
111 @end
112
113 #if ENABLE(AVF_CAPTIONS)
114 // Note: This must be defined before our SOFT_LINK macros:
115 @class AVMediaSelectionOption;
116 @interface AVMediaSelectionOption (OutOfBandExtensions)
117 @property (nonatomic, readonly) NSString* outOfBandSource;
118 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
119 @end
120 #endif
121
122 #if PLATFORM(IOS)
123 @class AVPlayerItem;
124 @interface AVPlayerItem (WebKitExtensions)
125 @property (nonatomic, copy) NSString* dataYouTubeID;
126 @end
127 #endif
128
129 @interface AVURLAsset (WebKitExtensions)
130 @property (nonatomic, readonly) NSURL *resolvedURL;
131 @end
132
133 typedef AVPlayer AVPlayerType;
134 typedef AVPlayerItem AVPlayerItemType;
135 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
136 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
137 typedef AVMetadataItem AVMetadataItemType;
138 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
139 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
140
141 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
142 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
143 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
144 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
145
146 #if USE(VIDEOTOOLBOX)
147 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
148 #endif
149
150 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
151 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
152 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
153 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
154
155 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
156 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
157 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
158 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
159 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
160 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
161 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
162 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
163
164 #if USE(VIDEOTOOLBOX)
165 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
166 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
167 #endif
168
169 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
170 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
171 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
172 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
173 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
174 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
175 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
176
177 SOFT_LINK_CLASS(CoreImage, CIContext)
178 SOFT_LINK_CLASS(CoreImage, CIImage)
179
180 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
192 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
193 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
194
195 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
196
197 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
198
199 #define AVPlayer getAVPlayerClass()
200 #define AVPlayerItem getAVPlayerItemClass()
201 #define AVPlayerLayer getAVPlayerLayerClass()
202 #define AVURLAsset getAVURLAssetClass()
203 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
204 #define AVMetadataItem getAVMetadataItemClass()
205
206 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
207 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
208 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
209 #define AVMediaTypeVideo getAVMediaTypeVideo()
210 #define AVMediaTypeAudio getAVMediaTypeAudio()
211 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
212 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
213 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
214 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
215 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
216 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
217 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
218 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
219 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
220 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
221
222 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
223 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
224 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
225
226 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
227 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
228 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
229
230 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
231 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
232 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
233 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
234
235 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
236 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
237 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
238 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
239 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
240 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
241 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
242 #endif
243
244 #if ENABLE(AVF_CAPTIONS)
245 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
254 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
255
256 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
257 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
258 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
259 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
260 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
261 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
262 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
263 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
264 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
265 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
266 #endif
267
268 #if ENABLE(DATACUE_VALUE)
269 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
270 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
271 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
272 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
273 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
274
275 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
276 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
277 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
278 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
279 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
280 #endif
281
282 #if PLATFORM(IOS)
283 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
284
285 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
286 #endif
287
288 #define kCMTimeZero getkCMTimeZero()
289
290 using namespace WebCore;
291
292 enum MediaPlayerAVFoundationObservationContext {
293     MediaPlayerAVFoundationObservationContextPlayerItem,
294     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
295     MediaPlayerAVFoundationObservationContextPlayer,
296     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
297 };
298
299 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
300 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
301 #else
302 @interface WebCoreAVFMovieObserver : NSObject
303 #endif
304 {
305     MediaPlayerPrivateAVFoundationObjC* m_callback;
306     int m_delayCallbacks;
307 }
308 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
309 -(void)disconnect;
310 -(void)metadataLoaded;
311 -(void)didEnd:(NSNotification *)notification;
312 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
313 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
314 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
315 - (void)outputSequenceWasFlushed:(id)output;
316 #endif
317 @end
318
319 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
320 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
321     MediaPlayerPrivateAVFoundationObjC* m_callback;
322 }
323 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
324 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
325 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
326 @end
327 #endif
328
329 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
330 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
331     MediaPlayerPrivateAVFoundationObjC *m_callback;
332     dispatch_semaphore_t m_semaphore;
333 }
334 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
335 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
336 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
337 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
338 @end
339 #endif
340
341 namespace WebCore {
342
343 static NSArray *assetMetadataKeyNames();
344 static NSArray *itemKVOProperties();
345 static NSArray* assetTrackMetadataKeyNames();
346
347 #if !LOG_DISABLED
348 static const char *boolString(bool val)
349 {
350     return val ? "true" : "false";
351 }
352 #endif
353
354 #if ENABLE(ENCRYPTED_MEDIA_V2)
355 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
356 static PlayerToPrivateMapType& playerToPrivateMap()
357 {
358     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
359     return map;
360 };
361 #endif
362
363 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
364 static dispatch_queue_t globalLoaderDelegateQueue()
365 {
366     static dispatch_queue_t globalQueue;
367     static dispatch_once_t onceToken;
368     dispatch_once(&onceToken, ^{
369         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
370     });
371     return globalQueue;
372 }
373 #endif
374
375 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
376 static dispatch_queue_t globalPullDelegateQueue()
377 {
378     static dispatch_queue_t globalQueue;
379     static dispatch_once_t onceToken;
380     dispatch_once(&onceToken, ^{
381         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
382     });
383     return globalQueue;
384 }
385 #endif
386
387 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
388
389     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
390 }
391
392 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
393 {
394     if (isAvailable())
395         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
396 }
397
398 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
399     : MediaPlayerPrivateAVFoundation(player)
400     , m_weakPtrFactory(this)
401 #if PLATFORM(IOS)
402     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
403 #endif
404     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
405     , m_videoFrameHasDrawn(false)
406     , m_haveCheckedPlayability(false)
407 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
408     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
409     , m_videoOutputSemaphore(nullptr)
410 #endif
411 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
412     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
413 #endif
414     , m_currentTextTrack(0)
415     , m_cachedRate(0)
416     , m_cachedTotalBytes(0)
417     , m_pendingStatusChanges(0)
418     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
419     , m_cachedLikelyToKeepUp(false)
420     , m_cachedBufferEmpty(false)
421     , m_cachedBufferFull(false)
422     , m_cachedHasEnabledAudio(false)
423     , m_shouldBufferData(true)
424     , m_cachedIsReadyForDisplay(false)
425     , m_haveBeenAskedToCreateLayer(false)
426 #if ENABLE(IOS_AIRPLAY)
427     , m_allowsWirelessVideoPlayback(true)
428 #endif
429 {
430 #if ENABLE(ENCRYPTED_MEDIA_V2)
431     playerToPrivateMap().set(player, this);
432 #endif
433 }
434
435 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
436 {
437 #if ENABLE(ENCRYPTED_MEDIA_V2)
438     playerToPrivateMap().remove(player());
439 #endif
440 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
441     [m_loaderDelegate.get() setCallback:0];
442     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
443
444     for (auto& pair : m_resourceLoaderMap)
445         pair.value->invalidate();
446 #endif
447 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
448     [m_videoOutputDelegate setCallback:0];
449     [m_videoOutput setDelegate:nil queue:0];
450     if (m_videoOutputSemaphore)
451         dispatch_release(m_videoOutputSemaphore);
452 #endif
453
454     if (m_videoLayer)
455         destroyVideoLayer();
456
457     cancelLoad();
458 }
459
460 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
461 {
462     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
463     tearDownVideoRendering();
464
465     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
466     [m_objcObserver.get() disconnect];
467
468     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
469     setIgnoreLoadStateChanges(true);
470     if (m_avAsset) {
471         [m_avAsset.get() cancelLoading];
472         m_avAsset = nil;
473     }
474
475     clearTextTracks();
476
477 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
478     if (m_legibleOutput) {
479         if (m_avPlayerItem)
480             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
481         m_legibleOutput = nil;
482     }
483 #endif
484
485     if (m_avPlayerItem) {
486         for (NSString *keyName in itemKVOProperties())
487             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
488         
489         m_avPlayerItem = nil;
490     }
491     if (m_avPlayer) {
492         if (m_timeObserver)
493             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
494         m_timeObserver = nil;
495         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
496 #if ENABLE(IOS_AIRPLAY)
497         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
498 #endif
499         m_avPlayer = nil;
500     }
501
502     // Reset cached properties
503     m_pendingStatusChanges = 0;
504     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
505     m_cachedSeekableRanges = nullptr;
506     m_cachedLoadedRanges = nullptr;
507     m_cachedHasEnabledAudio = false;
508     m_cachedPresentationSize = FloatSize();
509     m_cachedDuration = MediaTime::zeroTime();
510
511     for (AVPlayerItemTrack *track in m_cachedTracks.get())
512         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
513     m_cachedTracks = nullptr;
514
515 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
516     if (m_provider)
517         m_provider->setPlayerItem(nullptr);
518 #endif
519
520     setIgnoreLoadStateChanges(false);
521 }
522
523 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
524 {
525     return m_haveBeenAskedToCreateLayer;
526 }
527
528 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
529 {
530 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
531     if (m_videoOutput)
532         return true;
533 #endif
534     return m_imageGenerator;
535 }
536
537 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
538 {
539 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
540     createVideoOutput();
541 #else
542     createImageGenerator();
543 #endif
544 }
545
546 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
547 {
548     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
549
550     if (!m_avAsset || m_imageGenerator)
551         return;
552
553     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
554
555     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
556     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
557     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
558     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
559
560     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
561 }
562
563 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
564 {
565 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
566     destroyVideoOutput();
567 #endif
568     destroyImageGenerator();
569 }
570
571 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
572 {
573     if (!m_imageGenerator)
574         return;
575
576     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
577
578     m_imageGenerator = 0;
579 }
580
581 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
582 {
583     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
584         return;
585
586     auto weakThis = createWeakPtr();
587     callOnMainThread([this, weakThis] {
588         if (!weakThis)
589             return;
590
591         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
592             return;
593         m_haveBeenAskedToCreateLayer = true;
594
595         if (!m_videoLayer)
596             createAVPlayerLayer();
597
598 #if USE(VIDEOTOOLBOX) && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
599         if (!m_videoOutput)
600             createVideoOutput();
601 #endif
602
603         player()->client().mediaPlayerRenderingModeChanged(player());
604     });
605 }
606
607 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
608 {
609     if (!m_avPlayer)
610         return;
611
612     m_videoLayer = adoptNS([allocAVPlayerLayerInstance() init]);
613     [m_videoLayer setPlayer:m_avPlayer.get()];
614     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
615 #ifndef NDEBUG
616     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
617 #endif
618     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
619     updateVideoLayerGravity();
620     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
621     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
622
623 #if PLATFORM(IOS)
624     [m_videoLayer web_disableAllActions];
625     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
626 #ifndef NDEBUG
627     [m_videoInlineLayer setName:@"WebVideoContainerLayer"];
628 #endif
629     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
630     if (m_videoFullscreenLayer) {
631         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
632         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
633     } else {
634         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
635         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
636     }
637 #else
638     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
639 #endif
640 }
641
642 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
643 {
644     if (!m_videoLayer)
645         return;
646
647     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
648
649     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
650     [m_videoLayer.get() setPlayer:nil];
651
652 #if PLATFORM(IOS)
653     if (m_videoFullscreenLayer)
654         [m_videoLayer removeFromSuperlayer];
655     m_videoInlineLayer = nil;
656 #endif
657
658     m_videoLayer = nil;
659 }
660
661 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
662 {
663     if (currentRenderingMode() == MediaRenderingToLayer)
664         return m_cachedIsReadyForDisplay;
665
666     return m_videoFrameHasDrawn;
667 }
668
669 #if ENABLE(AVF_CAPTIONS)
670 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
671 {
672     // FIXME: Match these to correct types:
673     if (kind == PlatformTextTrack::Caption)
674         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
675
676     if (kind == PlatformTextTrack::Subtitle)
677         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
678
679     if (kind == PlatformTextTrack::Description)
680         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
681
682     if (kind == PlatformTextTrack::Forced)
683         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
684
685     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
686 }
687     
688 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
689 {
690     trackModeChanged();
691 }
692     
693 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
694 {
695     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
696     
697     for (auto& textTrack : m_textTracks) {
698         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
699             continue;
700         
701         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
702         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
703         
704         for (auto& track : outOfBandTrackSources) {
705             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
706             
707             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
708                 continue;
709             
710             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
711             if (track->mode() == PlatformTextTrack::Hidden)
712                 mode = InbandTextTrackPrivate::Hidden;
713             else if (track->mode() == PlatformTextTrack::Disabled)
714                 mode = InbandTextTrackPrivate::Disabled;
715             else if (track->mode() == PlatformTextTrack::Showing)
716                 mode = InbandTextTrackPrivate::Showing;
717             
718             textTrack->setMode(mode);
719             break;
720         }
721     }
722 }
723 #endif
724
725
726 static NSURL *canonicalURL(const String& url)
727 {
728     NSURL *cocoaURL = URL(ParsedURLString, url);
729     if (url.isEmpty())
730         return cocoaURL;
731
732     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
733     if (!request)
734         return cocoaURL;
735
736     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
737     if (!canonicalRequest)
738         return cocoaURL;
739
740     return [canonicalRequest URL];
741 }
742
743 #if PLATFORM(IOS)
744 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
745 {
746     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
747     [properties setDictionary:@{
748         NSHTTPCookieName: cookie.name,
749         NSHTTPCookieValue: cookie.value,
750         NSHTTPCookieDomain: cookie.domain,
751         NSHTTPCookiePath: cookie.path,
752         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
753     }];
754     if (cookie.secure)
755         [properties setObject:@YES forKey:NSHTTPCookieSecure];
756     if (cookie.session)
757         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
758
759     return [NSHTTPCookie cookieWithProperties:properties.get()];
760 }
761 #endif
762
763 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
764 {
765     if (m_avAsset)
766         return;
767
768     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
769
770     setDelayCallbacks(true);
771
772     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
773
774     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
775
776     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
777
778     String referrer = player()->referrer();
779     if (!referrer.isEmpty())
780         [headerFields.get() setObject:referrer forKey:@"Referer"];
781
782     String userAgent = player()->userAgent();
783     if (!userAgent.isEmpty())
784         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
785
786     if ([headerFields.get() count])
787         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
788
789     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
790         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
791
792     String identifier = player()->sourceApplicationIdentifier();
793     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
794         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
795
796 #if ENABLE(AVF_CAPTIONS)
797     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
798     if (!outOfBandTrackSources.isEmpty()) {
799         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
800         for (auto& trackSource : outOfBandTrackSources) {
801             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
802             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
803             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
804             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
805             [outOfBandTracks.get() addObject:@{
806                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
807                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
808                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
809                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
810                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
811                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
812             }];
813         }
814
815         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
816     }
817 #endif
818
819 #if PLATFORM(IOS)
820     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
821     if (!networkInterfaceName.isEmpty())
822         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
823 #endif
824
825 #if PLATFORM(IOS)
826     Vector<Cookie> cookies;
827     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
828         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
829         for (auto& cookie : cookies)
830             [nsCookies addObject:toNSHTTPCookie(cookie)];
831
832         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
833     }
834 #endif
835
836     NSURL *cocoaURL = canonicalURL(url);
837     m_avAsset = adoptNS([allocAVURLAssetInstance() initWithURL:cocoaURL options:options.get()]);
838
839 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
840     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
841 #endif
842
843     m_haveCheckedPlayability = false;
844
845     setDelayCallbacks(false);
846 }
847
848 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
849 {
850     if (!m_avPlayer)
851         return;
852
853     if (pthread_main_np()) {
854         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
855         return;
856     }
857
858     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
859     RetainPtr<AVPlayerItemType> strongItem = item;
860     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
861         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
862     });
863 }
864
865 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
866 {
867     if (m_avPlayer)
868         return;
869
870     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
871
872     setDelayCallbacks(true);
873
874     m_avPlayer = adoptNS([allocAVPlayerInstance() init]);
875     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
876 #if ENABLE(IOS_AIRPLAY)
877     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
878     updateDisableExternalPlayback();
879 #endif
880
881 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
882     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
883 #endif
884
885 #if ENABLE(IOS_AIRPLAY)
886     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
887 #endif
888
889     if (player()->client().mediaPlayerIsVideo())
890         createAVPlayerLayer();
891
892     if (m_avPlayerItem)
893         setAVPlayerItem(m_avPlayerItem.get());
894
895     setDelayCallbacks(false);
896 }
897
898 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
899 {
900     if (m_avPlayerItem)
901         return;
902
903     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
904
905     setDelayCallbacks(true);
906
907     // Create the player item so we can load media data. 
908     m_avPlayerItem = adoptNS([allocAVPlayerItemInstance() initWithAsset:m_avAsset.get()]);
909
910     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
911
912     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
913     for (NSString *keyName in itemKVOProperties())
914         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
915
916     if (m_avPlayer)
917         setAVPlayerItem(m_avPlayerItem.get());
918
919 #if PLATFORM(IOS)
920     AtomicString value;
921     if (player()->doesHaveAttribute("data-youtube-id", &value))
922         [m_avPlayerItem.get() setDataYouTubeID: value];
923  #endif
924
925 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
926     const NSTimeInterval legibleOutputAdvanceInterval = 2;
927
928     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
929     m_legibleOutput = adoptNS([allocAVPlayerItemLegibleOutputInstance() initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
930     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
931
932     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
933     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
934     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
935     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
936 #endif
937
938 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
939         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
940         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForAudibleMedia()];
941         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForVisualMedia()];
942 #endif
943
944 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
945     if (m_provider)
946         m_provider->setPlayerItem(m_avPlayerItem.get());
947 #endif
948
949     setDelayCallbacks(false);
950 }
951
952 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
953 {
954     if (m_haveCheckedPlayability)
955         return;
956     m_haveCheckedPlayability = true;
957
958     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
959     auto weakThis = createWeakPtr();
960
961     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
962         callOnMainThread([weakThis] {
963             if (weakThis)
964                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
965         });
966     }];
967 }
968
969 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
970 {
971     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
972
973     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
974     dispatch_group_enter(metadataLoadingGroup);
975     auto weakThis = createWeakPtr();
976     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
977
978         callOnMainThread([weakThis, metadataLoadingGroup] {
979             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
980                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
981                     dispatch_group_enter(metadataLoadingGroup);
982                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
983                         dispatch_group_leave(metadataLoadingGroup);
984                     }];
985                 }
986             }
987             dispatch_group_leave(metadataLoadingGroup);
988         });
989     }];
990
991     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
992         callOnMainThread([weakThis] {
993             if (weakThis)
994                 [weakThis->m_objcObserver.get() metadataLoaded];
995         });
996
997         dispatch_release(metadataLoadingGroup);
998     });
999 }
1000
1001 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
1002 {
1003     if (!m_avPlayerItem)
1004         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
1005
1006     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
1007         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
1008     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1009         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1010     if (m_cachedLikelyToKeepUp)
1011         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1012     if (m_cachedBufferFull)
1013         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1014     if (m_cachedBufferEmpty)
1015         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1016
1017     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1018 }
1019
1020 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1021 {
1022     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1023     PlatformMedia pm;
1024     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1025     pm.media.avfMediaPlayer = m_avPlayer.get();
1026     return pm;
1027 }
1028
1029 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1030 {
1031 #if PLATFORM(IOS)
1032     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1033 #else
1034     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1035 #endif
1036 }
1037
1038 #if PLATFORM(IOS)
1039 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1040 {
1041     if (m_videoFullscreenLayer == videoFullscreenLayer)
1042         return;
1043
1044     m_videoFullscreenLayer = videoFullscreenLayer;
1045
1046     [CATransaction begin];
1047     [CATransaction setDisableActions:YES];
1048
1049     if (m_videoFullscreenLayer && m_videoLayer) {
1050         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1051         [m_videoLayer removeFromSuperlayer];
1052         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1053     } else if (m_videoInlineLayer && m_videoLayer) {
1054         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1055         [m_videoLayer removeFromSuperlayer];
1056         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1057     } else if (m_videoLayer)
1058         [m_videoLayer removeFromSuperlayer];
1059
1060     [CATransaction commit];
1061
1062     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1063         syncTextTrackBounds();
1064         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1065     }
1066 #if ENABLE(IOS_AIRPLAY)
1067     updateDisableExternalPlayback();
1068 #endif
1069 }
1070
1071 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1072 {
1073     m_videoFullscreenFrame = frame;
1074     if (!m_videoFullscreenLayer)
1075         return;
1076
1077     if (m_videoLayer) {
1078         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1079         [CATransaction begin];
1080         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1081         [CATransaction commit];
1082         [m_videoLayer web_disableAllActions];
1083     }
1084     syncTextTrackBounds();
1085 }
1086
1087 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1088 {
1089     m_videoFullscreenGravity = gravity;
1090     if (!m_videoLayer)
1091         return;
1092
1093     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1094     if (gravity == MediaPlayer::VideoGravityResize)
1095         videoGravity = AVLayerVideoGravityResize;
1096     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1097         videoGravity = AVLayerVideoGravityResizeAspect;
1098     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1099         videoGravity = AVLayerVideoGravityResizeAspectFill;
1100     else
1101         ASSERT_NOT_REACHED();
1102
1103     [m_videoLayer setVideoGravity:videoGravity];
1104 }
1105
1106 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1107 {
1108     if (m_currentMetaData)
1109         return m_currentMetaData.get();
1110     return nil;
1111 }
1112
1113 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1114 {
1115     if (!m_avPlayerItem)
1116         return emptyString();
1117     
1118     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1119     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1120
1121     return logString.get();
1122 }
1123
1124 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1125 {
1126     if (!m_avPlayerItem)
1127         return emptyString();
1128
1129     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1130     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1131
1132     return logString.get();
1133 }
1134 #endif
1135
1136 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1137 {
1138     [CATransaction begin];
1139     [CATransaction setDisableActions:YES];    
1140     if (m_videoLayer)
1141         [m_videoLayer.get() setHidden:!isVisible];
1142     [CATransaction commit];
1143 }
1144     
1145 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1146 {
1147     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1148     if (!metaDataAvailable())
1149         return;
1150
1151     setDelayCallbacks(true);
1152     m_cachedRate = requestedRate();
1153     [m_avPlayer.get() setRate:requestedRate()];
1154     setDelayCallbacks(false);
1155 }
1156
1157 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1158 {
1159     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1160     if (!metaDataAvailable())
1161         return;
1162
1163     setDelayCallbacks(true);
1164     m_cachedRate = 0;
1165     [m_avPlayer.get() setRate:0];
1166     setDelayCallbacks(false);
1167 }
1168
1169 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1170 {
1171     // Do not ask the asset for duration before it has been loaded or it will fetch the
1172     // answer synchronously.
1173     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1174         return MediaTime::invalidTime();
1175     
1176     CMTime cmDuration;
1177     
1178     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1179     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1180         cmDuration = [m_avPlayerItem.get() duration];
1181     else
1182         cmDuration = [m_avAsset.get() duration];
1183
1184     if (CMTIME_IS_NUMERIC(cmDuration))
1185         return toMediaTime(cmDuration);
1186
1187     if (CMTIME_IS_INDEFINITE(cmDuration))
1188         return MediaTime::positiveInfiniteTime();
1189
1190     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %s", this, toString(MediaTime::invalidTime()).utf8().data());
1191     return MediaTime::invalidTime();
1192 }
1193
1194 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1195 {
1196     if (!metaDataAvailable() || !m_avPlayerItem)
1197         return MediaTime::zeroTime();
1198
1199     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1200     if (CMTIME_IS_NUMERIC(itemTime))
1201         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1202
1203     return MediaTime::zeroTime();
1204 }
1205
1206 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1207 {
1208     // setCurrentTime generates several event callbacks, update afterwards.
1209     setDelayCallbacks(true);
1210
1211     if (m_metadataTrack)
1212         m_metadataTrack->flushPartialCues();
1213
1214     CMTime cmTime = toCMTime(time);
1215     CMTime cmBefore = toCMTime(negativeTolerance);
1216     CMTime cmAfter = toCMTime(positiveTolerance);
1217
1218     auto weakThis = createWeakPtr();
1219
1220     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1221         callOnMainThread([weakThis, finished] {
1222             auto _this = weakThis.get();
1223             if (!_this)
1224                 return;
1225
1226             _this->seekCompleted(finished);
1227         });
1228     }];
1229
1230     setDelayCallbacks(false);
1231 }
1232
1233 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1234 {
1235 #if PLATFORM(IOS)
1236     UNUSED_PARAM(volume);
1237     return;
1238 #else
1239     if (!metaDataAvailable())
1240         return;
1241
1242     [m_avPlayer.get() setVolume:volume];
1243 #endif
1244 }
1245
1246 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1247 {
1248     UNUSED_PARAM(closedCaptionsVisible);
1249
1250     if (!metaDataAvailable())
1251         return;
1252
1253     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1254 }
1255
1256 void MediaPlayerPrivateAVFoundationObjC::setRateDouble(double rate)
1257
1258 {
1259     setDelayCallbacks(true);
1260     m_cachedRate = rate;
1261     [m_avPlayer.get() setRate:rate];
1262     setDelayCallbacks(false);
1263 }
1264
1265 double MediaPlayerPrivateAVFoundationObjC::rate() const
1266 {
1267     if (!metaDataAvailable())
1268         return 0;
1269
1270     return m_cachedRate;
1271 }
1272
1273 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1274 {
1275     auto timeRanges = PlatformTimeRanges::create();
1276
1277     if (!m_avPlayerItem)
1278         return timeRanges;
1279
1280     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1281         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1282         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1283             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1284     }
1285     return timeRanges;
1286 }
1287
1288 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1289 {
1290     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1291         return MediaTime::zeroTime();
1292
1293     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1294     bool hasValidRange = false;
1295     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1296         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1297         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1298             continue;
1299
1300         hasValidRange = true;
1301         MediaTime startOfRange = toMediaTime(timeRange.start);
1302         if (minTimeSeekable > startOfRange)
1303             minTimeSeekable = startOfRange;
1304     }
1305     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1306 }
1307
1308 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1309 {
1310     if (!m_cachedSeekableRanges)
1311         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1312
1313     MediaTime maxTimeSeekable;
1314     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1315         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1316         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1317             continue;
1318         
1319         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1320         if (maxTimeSeekable < endOfRange)
1321             maxTimeSeekable = endOfRange;
1322     }
1323     return maxTimeSeekable;
1324 }
1325
1326 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1327 {
1328 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
1329     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
1330     // when loadedTimeRanges changes when there is no video output. In that case
1331     // update the cached value explicitly.
1332     if (!hasLayerRenderer() && !hasContextRenderer())
1333         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
1334 #endif
1335
1336     if (!m_cachedLoadedRanges)
1337         return MediaTime::zeroTime();
1338
1339     MediaTime maxTimeLoaded;
1340     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1341         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1342         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1343             continue;
1344         
1345         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1346         if (maxTimeLoaded < endOfRange)
1347             maxTimeLoaded = endOfRange;
1348     }
1349
1350     return maxTimeLoaded;   
1351 }
1352
1353 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1354 {
1355     if (!metaDataAvailable())
1356         return 0;
1357
1358     if (m_cachedTotalBytes)
1359         return m_cachedTotalBytes;
1360
1361     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1362         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1363
1364     return m_cachedTotalBytes;
1365 }
1366
1367 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1368 {
1369     m_avAsset = asset;
1370 }
1371
1372 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1373 {
1374     if (!m_avAsset)
1375         return MediaPlayerAVAssetStatusDoesNotExist;
1376
1377     for (NSString *keyName in assetMetadataKeyNames()) {
1378         NSError *error = nil;
1379         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1380 #if !LOG_DISABLED
1381         if (error)
1382             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1383 #endif
1384
1385         if (keyStatus < AVKeyValueStatusLoaded)
1386             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1387         
1388         if (keyStatus == AVKeyValueStatusFailed)
1389             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1390
1391         if (keyStatus == AVKeyValueStatusCancelled)
1392             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1393     }
1394
1395     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1396         return MediaPlayerAVAssetStatusPlayable;
1397
1398     return MediaPlayerAVAssetStatusLoaded;
1399 }
1400
1401 long MediaPlayerPrivateAVFoundationObjC::assetErrorCode() const
1402 {
1403     if (!m_avAsset)
1404         return 0;
1405
1406     NSError *error = nil;
1407     [m_avAsset statusOfValueForKey:@"playable" error:&error];
1408     return [error code];
1409 }
1410
1411 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1412 {
1413     if (!metaDataAvailable() || context->paintingDisabled())
1414         return;
1415
1416     setDelayCallbacks(true);
1417     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1418
1419 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1420     if (videoOutputHasAvailableFrame())
1421         paintWithVideoOutput(context, rect);
1422     else
1423 #endif
1424         paintWithImageGenerator(context, rect);
1425
1426     END_BLOCK_OBJC_EXCEPTIONS;
1427     setDelayCallbacks(false);
1428
1429     m_videoFrameHasDrawn = true;
1430 }
1431
1432 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1433 {
1434     if (!metaDataAvailable() || context->paintingDisabled())
1435         return;
1436
1437     // We can ignore the request if we are already rendering to a layer.
1438     if (currentRenderingMode() == MediaRenderingToLayer)
1439         return;
1440
1441     // paint() is best effort, so only paint if we already have an image generator or video output available.
1442     if (!hasContextRenderer())
1443         return;
1444
1445     paintCurrentFrameInContext(context, rect);
1446 }
1447
1448 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1449 {
1450     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1451
1452     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1453     if (image) {
1454         GraphicsContextStateSaver stateSaver(*context);
1455         context->translate(rect.x(), rect.y() + rect.height());
1456         context->scale(FloatSize(1.0f, -1.0f));
1457         context->setImageInterpolationQuality(InterpolationLow);
1458         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1459         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1460         image = 0;
1461     }
1462 }
1463
1464 static HashSet<String> mimeTypeCache()
1465 {
1466     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1467     static bool typeListInitialized = false;
1468
1469     if (typeListInitialized)
1470         return cache;
1471     typeListInitialized = true;
1472
1473     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1474     for (NSString *mimeType in types)
1475         cache.add([mimeType lowercaseString]);
1476
1477     return cache;
1478
1479
1480 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1481 {
1482     if (!m_imageGenerator)
1483         createImageGenerator();
1484     ASSERT(m_imageGenerator);
1485
1486 #if !LOG_DISABLED
1487     double start = monotonicallyIncreasingTime();
1488 #endif
1489
1490     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1491     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1492     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1493
1494 #if !LOG_DISABLED
1495     double duration = monotonicallyIncreasingTime() - start;
1496     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1497 #endif
1498
1499     return image;
1500 }
1501
1502 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1503 {
1504     supportedTypes = mimeTypeCache();
1505
1506
1507 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1508 static bool keySystemIsSupported(const String& keySystem)
1509 {
1510     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1511         return true;
1512     return false;
1513 }
1514 #endif
1515
1516 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1517 {
1518 #if ENABLE(ENCRYPTED_MEDIA)
1519     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1520     // In addition to the steps in the current specification, this method must run the following steps:
1521
1522     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1523     //    If keySystem is null, continue to the next step.
1524     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1525         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1526         if (!keySystemIsSupported(parameters.keySystem))
1527             return MediaPlayer::IsNotSupported;
1528
1529         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1530         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1531     }
1532
1533     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1534 #endif
1535
1536 #if ENABLE(MEDIA_SOURCE)
1537     if (parameters.isMediaSource)
1538         return MediaPlayer::IsNotSupported;
1539 #endif
1540
1541     if (!mimeTypeCache().contains(parameters.type))
1542         return MediaPlayer::IsNotSupported;
1543
1544     // The spec says:
1545     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1546     if (parameters.codecs.isEmpty())
1547         return MediaPlayer::MayBeSupported;
1548
1549     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1550     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1551 }
1552
1553 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1554 {
1555 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1556     if (!keySystem.isEmpty()) {
1557         if (!keySystemIsSupported(keySystem))
1558             return false;
1559
1560         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1561             return false;
1562
1563         return true;
1564     }
1565 #else
1566     UNUSED_PARAM(keySystem);
1567     UNUSED_PARAM(mimeType);
1568 #endif
1569     return false;
1570 }
1571
1572 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1573 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1574 {
1575     String scheme = [[[avRequest request] URL] scheme];
1576     String keyURI = [[[avRequest request] URL] absoluteString];
1577
1578 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1579     if (scheme == "skd") {
1580         // Create an initData with the following layout:
1581         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1582         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1583         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1584         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1585         initDataView->set<uint32_t>(0, keyURISize, true);
1586
1587         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1588         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1589
1590 #if ENABLE(ENCRYPTED_MEDIA)
1591         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1592 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1593         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1594         if (!player()->keyNeeded(initData.get()))
1595 #endif
1596             return false;
1597
1598         m_keyURIToRequestMap.set(keyURI, avRequest);
1599         return true;
1600     }
1601 #endif
1602
1603     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1604     m_resourceLoaderMap.add(avRequest, resourceLoader);
1605     resourceLoader->startLoading();
1606     return true;
1607 }
1608
1609 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1610 {
1611 #if USE(CFNETWORK)
1612     UNUSED_PARAM(nsChallenge);
1613     // FIXME: <rdar://problem/15799844>
1614     return false;
1615 #else
1616     AuthenticationChallenge challenge(nsChallenge);
1617
1618     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1619 #endif
1620 }
1621
1622 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1623 {
1624     String scheme = [[[avRequest request] URL] scheme];
1625
1626     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1627
1628     if (resourceLoader)
1629         resourceLoader->stopLoading();
1630 }
1631
1632 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1633 {
1634     m_resourceLoaderMap.remove(avRequest);
1635 }
1636 #endif
1637
1638 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1639 {
1640     return AVFoundationLibrary() && CoreMediaLibrary();
1641 }
1642
1643 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1644 {
1645     if (!metaDataAvailable())
1646         return timeValue;
1647
1648     // FIXME - impossible to implement until rdar://8721510 is fixed.
1649     return timeValue;
1650 }
1651
1652 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1653 {
1654     if (!m_videoLayer)
1655         return;
1656
1657 #if PLATFORM(IOS)
1658     // Do not attempt to change the video gravity while in full screen mode.
1659     // See setVideoFullscreenGravity().
1660     if (m_videoFullscreenLayer)
1661         return;
1662 #endif
1663
1664     [CATransaction begin];
1665     [CATransaction setDisableActions:YES];    
1666     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1667     [m_videoLayer.get() setVideoGravity:gravity];
1668     [CATransaction commit];
1669 }
1670
1671 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1672 {
1673     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1674         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1675     }];
1676     if (index == NSNotFound)
1677         return nil;
1678     return [tracks objectAtIndex:index];
1679 }
1680
1681 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1682 {
1683     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1684     m_languageOfPrimaryAudioTrack = String();
1685
1686     if (!m_avAsset)
1687         return;
1688
1689     setDelayCharacteristicsChangedNotification(true);
1690
1691     bool haveCCTrack = false;
1692     bool hasCaptions = false;
1693
1694     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1695     // asked about those fairly fequently.
1696     if (!m_avPlayerItem) {
1697         // We don't have a player item yet, so check with the asset because some assets support inspection
1698         // prior to becoming ready to play.
1699         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1700         setHasVideo(firstEnabledVideoTrack);
1701         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1702 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1703         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1704 #endif
1705
1706         presentationSizeDidChange(firstEnabledVideoTrack ? IntSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : IntSize());
1707     } else {
1708         bool hasVideo = false;
1709         bool hasAudio = false;
1710         bool hasMetaData = false;
1711         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1712             if ([track isEnabled]) {
1713                 AVAssetTrack *assetTrack = [track assetTrack];
1714                 NSString *mediaType = [assetTrack mediaType];
1715                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1716                     hasVideo = true;
1717                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1718                     hasAudio = true;
1719                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1720 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1721                     hasCaptions = true;
1722 #endif
1723                     haveCCTrack = true;
1724                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1725                     hasMetaData = true;
1726                 }
1727             }
1728         }
1729
1730 #if ENABLE(VIDEO_TRACK)
1731         updateAudioTracks();
1732         updateVideoTracks();
1733
1734 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1735         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1736         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1737 #endif
1738 #endif
1739
1740         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1741         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1742         // when it is not.
1743         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1744
1745         setHasAudio(hasAudio);
1746 #if ENABLE(DATACUE_VALUE)
1747         if (hasMetaData)
1748             processMetadataTrack();
1749 #endif
1750     }
1751
1752 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1753     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1754     if (legibleGroup && m_cachedTracks) {
1755         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1756         if (hasCaptions)
1757             processMediaSelectionOptions();
1758     }
1759 #endif
1760
1761 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1762     if (!hasCaptions && haveCCTrack)
1763         processLegacyClosedCaptionsTracks();
1764 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1765     if (haveCCTrack)
1766         processLegacyClosedCaptionsTracks();
1767 #endif
1768
1769     setHasClosedCaptions(hasCaptions);
1770
1771     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1772         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1773
1774     sizeChanged();
1775
1776     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1777         characteristicsChanged();
1778
1779     setDelayCharacteristicsChangedNotification(false);
1780 }
1781
1782 #if ENABLE(VIDEO_TRACK)
1783 template <typename RefT, typename PassRefT>
1784 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1785 {
1786     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1787         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1788     }]]]);
1789     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1790
1791     for (auto& oldItem : oldItems) {
1792         if (oldItem->playerItemTrack())
1793             [oldTracks addObject:oldItem->playerItemTrack()];
1794     }
1795
1796     // Find the added & removed AVPlayerItemTracks:
1797     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1798     [removedTracks minusSet:newTracks.get()];
1799
1800     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1801     [addedTracks minusSet:oldTracks.get()];
1802
1803     typedef Vector<RefT> ItemVector;
1804     ItemVector replacementItems;
1805     ItemVector addedItems;
1806     ItemVector removedItems;
1807     for (auto& oldItem : oldItems) {
1808         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1809             removedItems.append(oldItem);
1810         else
1811             replacementItems.append(oldItem);
1812     }
1813
1814     for (AVPlayerItemTrack* track in addedTracks.get())
1815         addedItems.append(itemFactory(track));
1816
1817     replacementItems.appendVector(addedItems);
1818     oldItems.swap(replacementItems);
1819
1820     for (auto& removedItem : removedItems)
1821         (player->*removedFunction)(removedItem);
1822
1823     for (auto& addedItem : addedItems)
1824         (player->*addedFunction)(addedItem);
1825 }
1826
1827 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1828 template <typename RefT, typename PassRefT>
1829 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1830 {
1831     group->updateOptions();
1832
1833     // Only add selection options which do not have an associated persistant track.
1834     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1835     for (auto& option : group->options()) {
1836         if (!option)
1837             continue;
1838         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1839         if (!avOption)
1840             continue;
1841         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1842             newSelectionOptions.add(option);
1843     }
1844
1845     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1846     for (auto& oldItem : oldItems) {
1847         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1848             oldSelectionOptions.add(option);
1849     }
1850
1851     // Find the added & removed AVMediaSelectionOptions:
1852     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1853     for (auto& oldOption : oldSelectionOptions) {
1854         if (!newSelectionOptions.contains(oldOption))
1855             removedSelectionOptions.add(oldOption);
1856     }
1857
1858     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1859     for (auto& newOption : newSelectionOptions) {
1860         if (!oldSelectionOptions.contains(newOption))
1861             addedSelectionOptions.add(newOption);
1862     }
1863
1864     typedef Vector<RefT> ItemVector;
1865     ItemVector replacementItems;
1866     ItemVector addedItems;
1867     ItemVector removedItems;
1868     for (auto& oldItem : oldItems) {
1869         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1870             removedItems.append(oldItem);
1871         else
1872             replacementItems.append(oldItem);
1873     }
1874
1875     for (auto& option : addedSelectionOptions)
1876         addedItems.append(itemFactory(*option.get()));
1877
1878     replacementItems.appendVector(addedItems);
1879     oldItems.swap(replacementItems);
1880     
1881     for (auto& removedItem : removedItems)
1882         (player->*removedFunction)(removedItem);
1883     
1884     for (auto& addedItem : addedItems)
1885         (player->*addedFunction)(addedItem);
1886 }
1887 #endif
1888
1889 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1890 {
1891 #if !LOG_DISABLED
1892     size_t count = m_audioTracks.size();
1893 #endif
1894
1895     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1896
1897 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1898     if (!m_audibleGroup) {
1899         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
1900             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
1901     }
1902
1903     if (m_audibleGroup)
1904         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1905 #endif
1906
1907     for (auto& track : m_audioTracks)
1908         track->resetPropertiesFromTrack();
1909
1910 #if !LOG_DISABLED
1911     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
1912 #endif
1913 }
1914
1915 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1916 {
1917 #if !LOG_DISABLED
1918     size_t count = m_videoTracks.size();
1919 #endif
1920
1921     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1922
1923 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1924     if (!m_visualGroup) {
1925         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
1926             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
1927     }
1928
1929     if (m_visualGroup)
1930         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1931 #endif
1932
1933     for (auto& track : m_audioTracks)
1934         track->resetPropertiesFromTrack();
1935
1936 #if !LOG_DISABLED
1937     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
1938 #endif
1939 }
1940
1941 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
1942 {
1943 #if PLATFORM(IOS)
1944     if (m_videoFullscreenLayer)
1945         return true;
1946 #endif
1947     return false;
1948 }
1949
1950 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
1951 {
1952 #if PLATFORM(IOS)
1953     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
1954         return;
1955     
1956     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
1957     [m_textTrackRepresentationLayer setFrame:textFrame];
1958 #endif
1959 }
1960
1961 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1962 {
1963 #if PLATFORM(IOS)
1964     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
1965     if (representationLayer == m_textTrackRepresentationLayer) {
1966         syncTextTrackBounds();
1967         return;
1968     }
1969
1970     if (m_textTrackRepresentationLayer)
1971         [m_textTrackRepresentationLayer removeFromSuperlayer];
1972
1973     m_textTrackRepresentationLayer = representationLayer;
1974
1975     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1976         syncTextTrackBounds();
1977         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1978     }
1979
1980 #else
1981     UNUSED_PARAM(representation);
1982 #endif
1983 }
1984 #endif // ENABLE(VIDEO_TRACK)
1985
1986 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1987 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
1988 {
1989     if (!m_provider)
1990         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
1991     return m_provider.get();
1992 }
1993 #endif
1994
1995 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1996 {
1997     if (!m_avAsset)
1998         return;
1999
2000     setNaturalSize(roundedIntSize(m_cachedPresentationSize));
2001 }
2002     
2003 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
2004 {
2005     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2006         return false;
2007     
2008     Ref<SecurityOrigin> resolvedOrigin(SecurityOrigin::create(resolvedURL()));
2009     Ref<SecurityOrigin> requestedOrigin(SecurityOrigin::createFromString(assetURL()));
2010     return resolvedOrigin.get().isSameSchemeHostPort(&requestedOrigin.get());
2011 }
2012
2013 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2014 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
2015 {
2016     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
2017
2018     if (!m_avPlayerItem || m_videoOutput)
2019         return;
2020
2021 #if USE(VIDEOTOOLBOX)
2022 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
2023     NSDictionary* attributes = nil;
2024 #else
2025     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
2026 #endif
2027 #else
2028     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2029                                 nil];
2030 #endif
2031     m_videoOutput = adoptNS([allocAVPlayerItemVideoOutputInstance() initWithPixelBufferAttributes:attributes]);
2032     ASSERT(m_videoOutput);
2033
2034     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2035
2036     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2037
2038     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2039 }
2040
2041 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2042 {
2043     if (!m_videoOutput)
2044         return;
2045
2046     if (m_avPlayerItem)
2047         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2048     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2049
2050     m_videoOutput = 0;
2051 }
2052
2053 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2054 {
2055     if (!m_videoOutput)
2056         createVideoOutput();
2057     ASSERT(m_videoOutput);
2058
2059 #if !LOG_DISABLED
2060     double start = monotonicallyIncreasingTime();
2061 #endif
2062
2063     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2064
2065     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2066         return 0;
2067
2068     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2069     if (!buffer)
2070         return 0;
2071
2072 #if USE(VIDEOTOOLBOX)
2073     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2074     if (!m_pixelTransferSession) {
2075         VTPixelTransferSessionRef session = 0;
2076         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2077         m_pixelTransferSession = adoptCF(session);
2078     }
2079
2080     CVPixelBufferRef outputBuffer;
2081     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2082     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2083     buffer = adoptCF(outputBuffer);
2084 #endif
2085
2086 #if !LOG_DISABLED
2087     double duration = monotonicallyIncreasingTime() - start;
2088     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2089 #endif
2090
2091     return buffer;
2092 }
2093
2094 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2095 {
2096     if (!m_avPlayerItem)
2097         return false;
2098
2099     if (m_lastImage)
2100         return true;
2101
2102     if (!m_videoOutput)
2103         createVideoOutput();
2104
2105     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2106 }
2107
2108 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2109 {
2110     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2111     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2112     return CVPixelBufferGetBaseAddress(pixelBuffer);
2113 }
2114
2115 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2116 {
2117     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2118     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2119 }
2120
2121 static void CVPixelBufferReleaseInfoCallback(void* info)
2122 {
2123     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2124     CFRelease(pixelBuffer);
2125 }
2126
2127 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2128 {
2129     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2130     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2131
2132     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2133     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2134     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2135     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2136     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2137
2138     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2139     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2140     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2141
2142     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2143 }
2144
2145 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2146 {
2147     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2148
2149     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2150     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2151     // should be displayed.
2152     if (pixelBuffer)
2153         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2154 }
2155
2156 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
2157 {
2158     if (m_videoOutput && !m_lastImage && !videoOutputHasAvailableFrame())
2159         waitForVideoOutputMediaDataWillChange();
2160
2161     updateLastImage();
2162
2163     if (!m_lastImage)
2164         return;
2165
2166     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2167     if (!firstEnabledVideoTrack)
2168         return;
2169
2170     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2171
2172     GraphicsContextStateSaver stateSaver(*context);
2173     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2174     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2175     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2176
2177     context->concatCTM(videoTransform);
2178     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2179
2180     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2181     // video frame, destroy it now that it is no longer needed.
2182     if (m_imageGenerator)
2183         destroyImageGenerator();
2184
2185 }
2186
2187 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2188 {
2189     updateLastImage();
2190     return m_lastImage.get();
2191 }
2192
2193 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2194 {
2195     if (!m_videoOutputSemaphore)
2196         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2197
2198     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2199
2200     // Wait for 1 second.
2201     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2202
2203     if (result)
2204         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2205 }
2206
2207 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2208 {
2209     dispatch_semaphore_signal(m_videoOutputSemaphore);
2210 }
2211 #endif
2212
2213 #if ENABLE(ENCRYPTED_MEDIA)
2214 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2215 {
2216     if (!keySystemIsSupported(keySystem))
2217         return MediaPlayer::KeySystemNotSupported;
2218
2219     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2220     String keyURI;
2221     String keyID;
2222     RefPtr<Uint8Array> certificate;
2223     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2224         return MediaPlayer::InvalidPlayerState;
2225
2226     if (!m_keyURIToRequestMap.contains(keyURI))
2227         return MediaPlayer::InvalidPlayerState;
2228
2229     String sessionID = createCanonicalUUIDString();
2230
2231     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2232
2233     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2234     NSString* assetStr = keyID;
2235     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2236     NSError* error = 0;
2237     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2238
2239     if (!keyRequest) {
2240         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2241         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2242         return MediaPlayer::NoError;
2243     }
2244
2245     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2246     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2247     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2248
2249     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2250     m_sessionIDToRequestMap.set(sessionID, avRequest);
2251     m_keyURIToRequestMap.remove(keyURI);
2252
2253     return MediaPlayer::NoError;
2254 }
2255
2256 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2257 {
2258     if (!keySystemIsSupported(keySystem))
2259         return MediaPlayer::KeySystemNotSupported;
2260
2261     if (!m_sessionIDToRequestMap.contains(sessionID))
2262         return MediaPlayer::InvalidPlayerState;
2263
2264     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2265     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2266     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2267     [avRequest.get() finishLoading];
2268     m_sessionIDToRequestMap.remove(sessionID);
2269
2270     player()->keyAdded(keySystem, sessionID);
2271
2272     UNUSED_PARAM(initDataPtr);
2273     UNUSED_PARAM(initDataLength);
2274     return MediaPlayer::NoError;
2275 }
2276
2277 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2278 {
2279     if (!keySystemIsSupported(keySystem))
2280         return MediaPlayer::KeySystemNotSupported;
2281
2282     if (!m_sessionIDToRequestMap.contains(sessionID))
2283         return MediaPlayer::InvalidPlayerState;
2284
2285     m_sessionIDToRequestMap.remove(sessionID);
2286     return MediaPlayer::NoError;
2287 }
2288 #endif
2289
2290 #if ENABLE(ENCRYPTED_MEDIA_V2)
2291 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2292 {
2293     return m_keyURIToRequestMap.take(keyURI);
2294 }
2295
2296 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2297 {
2298     if (!keySystemIsSupported(keySystem))
2299         return nullptr;
2300
2301     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2302 }
2303 #endif
2304
2305 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2306 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2307 {
2308 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2309     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2310 #endif
2311
2312     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2313     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2314
2315         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2316         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2317             continue;
2318
2319         bool newCCTrack = true;
2320         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2321             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2322                 continue;
2323
2324             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2325             if (track->avPlayerItemTrack() == playerItemTrack) {
2326                 removedTextTracks.remove(i - 1);
2327                 newCCTrack = false;
2328                 break;
2329             }
2330         }
2331
2332         if (!newCCTrack)
2333             continue;
2334         
2335         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2336     }
2337
2338     processNewAndRemovedTextTracks(removedTextTracks);
2339 }
2340 #endif
2341
2342 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2343 bool MediaPlayerPrivateAVFoundationObjC::hasLoadedMediaSelectionGroups()
2344 {
2345     if (!m_avAsset)
2346         return false;
2347
2348     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2349         return false;
2350
2351     return true;
2352 }
2353
2354 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2355 {
2356     if (!hasLoadedMediaSelectionGroups())
2357         return nil;
2358
2359     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2360 }
2361
2362 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2363 {
2364     if (!hasLoadedMediaSelectionGroups())
2365         return nil;
2366
2367     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2368 }
2369
2370 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2371 {
2372     if (!hasLoadedMediaSelectionGroups())
2373         return nil;
2374
2375     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2376 }
2377
2378 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2379 {
2380     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2381     if (!legibleGroup) {
2382         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2383         return;
2384     }
2385
2386     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2387     // but set the selected legible track to nil so text tracks will not be automatically configured.
2388     if (!m_textTracks.size())
2389         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2390
2391     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2392     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2393     for (AVMediaSelectionOptionType *option in legibleOptions) {
2394         bool newTrack = true;
2395         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2396             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2397                 continue;
2398             
2399             RetainPtr<AVMediaSelectionOptionType> currentOption;
2400 #if ENABLE(AVF_CAPTIONS)
2401             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2402                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2403                 currentOption = track->mediaSelectionOption();
2404             } else
2405 #endif
2406             {
2407                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2408                 currentOption = track->mediaSelectionOption();
2409             }
2410             
2411             if ([currentOption.get() isEqual:option]) {
2412                 removedTextTracks.remove(i - 1);
2413                 newTrack = false;
2414                 break;
2415             }
2416         }
2417         if (!newTrack)
2418             continue;
2419
2420 #if ENABLE(AVF_CAPTIONS)
2421         if ([option outOfBandSource]) {
2422             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2423             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2424             continue;
2425         }
2426 #endif
2427
2428         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2429     }
2430
2431     processNewAndRemovedTextTracks(removedTextTracks);
2432 }
2433
2434 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2435 {
2436     if (m_metadataTrack)
2437         return;
2438
2439     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2440     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2441     player()->addTextTrack(m_metadataTrack);
2442 }
2443
2444 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2445 {
2446     if (!m_currentTextTrack)
2447         return;
2448
2449     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2450 }
2451
2452 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2453 {
2454     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2455
2456     if (!m_currentTextTrack)
2457         return;
2458     
2459     m_currentTextTrack->resetCueValues();
2460 }
2461 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2462
2463 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2464 {
2465     if (m_currentTextTrack == track)
2466         return;
2467
2468     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2469         
2470     m_currentTextTrack = track;
2471
2472     if (track) {
2473         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2474             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2475 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2476 #if ENABLE(AVF_CAPTIONS)
2477         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2478             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2479 #endif
2480         else
2481             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2482 #endif
2483     } else {
2484 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2485         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2486 #endif
2487         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2488     }
2489
2490 }
2491
2492 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2493 {
2494     if (!m_languageOfPrimaryAudioTrack.isNull())
2495         return m_languageOfPrimaryAudioTrack;
2496
2497     if (!m_avPlayerItem.get())
2498         return emptyString();
2499
2500 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2501     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2502     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2503     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2504     if (currentlySelectedAudibleOption) {
2505         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2506         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2507
2508         return m_languageOfPrimaryAudioTrack;
2509     }
2510 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2511
2512     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2513     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2514     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2515     if (!tracks || [tracks count] != 1) {
2516         m_languageOfPrimaryAudioTrack = emptyString();
2517         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2518         return m_languageOfPrimaryAudioTrack;
2519     }
2520
2521     AVAssetTrack *track = [tracks objectAtIndex:0];
2522     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2523
2524 #if !LOG_DISABLED
2525     if (m_languageOfPrimaryAudioTrack == emptyString())
2526         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2527     else
2528         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2529 #endif
2530
2531     return m_languageOfPrimaryAudioTrack;
2532 }
2533
2534 #if ENABLE(IOS_AIRPLAY) && PLATFORM(IOS)
2535 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2536 {
2537     if (!m_avPlayer)
2538         return false;
2539
2540     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2541     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2542     return wirelessTarget;
2543 }
2544
2545 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2546 {
2547     if (!m_avPlayer)
2548         return MediaPlayer::TargetTypeNone;
2549
2550     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2551     case wkExternalPlaybackTypeNone:
2552         return MediaPlayer::TargetTypeNone;
2553     case wkExternalPlaybackTypeAirPlay:
2554         return MediaPlayer::TargetTypeAirPlay;
2555     case wkExternalPlaybackTypeTVOut:
2556         return MediaPlayer::TargetTypeTVOut;
2557     }
2558
2559     ASSERT_NOT_REACHED();
2560     return MediaPlayer::TargetTypeNone;
2561 }
2562
2563 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2564 {
2565     if (!m_avPlayer)
2566         return emptyString();
2567     
2568     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2569     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2570
2571     return wirelessTargetName;
2572 }
2573
2574 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2575 {
2576     if (!m_avPlayer)
2577         return !m_allowsWirelessVideoPlayback;
2578     
2579     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2580     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2581
2582     return !m_allowsWirelessVideoPlayback;
2583 }
2584
2585 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2586 {
2587     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2588     m_allowsWirelessVideoPlayback = !disabled;
2589     if (!m_avPlayer)
2590         return;
2591     
2592     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2593 }
2594
2595 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2596 {
2597     if (!m_avPlayer)
2598         return;
2599
2600     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2601 }
2602 #endif
2603
2604 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2605 {
2606     m_cachedItemStatus = status;
2607
2608     updateStates();
2609 }
2610
2611 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2612 {
2613     m_pendingStatusChanges++;
2614 }
2615
2616 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2617 {
2618     m_cachedLikelyToKeepUp = likelyToKeepUp;
2619
2620     ASSERT(m_pendingStatusChanges);
2621     if (!--m_pendingStatusChanges)
2622         updateStates();
2623 }
2624
2625 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2626 {
2627     m_pendingStatusChanges++;
2628 }
2629
2630 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2631 {
2632     m_cachedBufferEmpty = bufferEmpty;
2633
2634     ASSERT(m_pendingStatusChanges);
2635     if (!--m_pendingStatusChanges)
2636         updateStates();
2637 }
2638
2639 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2640 {
2641     m_pendingStatusChanges++;
2642 }
2643
2644 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2645 {
2646     m_cachedBufferFull = bufferFull;
2647
2648     ASSERT(m_pendingStatusChanges);
2649     if (!--m_pendingStatusChanges)
2650         updateStates();
2651 }
2652
2653 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2654 {
2655     m_cachedSeekableRanges = seekableRanges;
2656
2657     seekableTimeRangesChanged();
2658     updateStates();
2659 }
2660
2661 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2662 {
2663     m_cachedLoadedRanges = loadedRanges;
2664
2665     loadedTimeRangesChanged();
2666     updateStates();
2667 }
2668
2669 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2670 {
2671     m_cachedIsReadyForDisplay = isReady;
2672     if (!hasVideo() && isReady)
2673         tracksChanged();
2674     updateStates();
2675 }
2676
2677 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2678 {
2679     tracksChanged();
2680     updateStates();
2681 }
2682
2683 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2684 {
2685     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2686     if (m_shouldBufferData == shouldBuffer)
2687         return;
2688
2689     m_shouldBufferData = shouldBuffer;
2690     
2691     if (!m_avPlayer)
2692         return;
2693
2694     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2695 }
2696
2697 #if ENABLE(DATACUE_VALUE)
2698 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2699 {
2700     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2701     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2702     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2703     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2704     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2705
2706     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2707         return quickTimeUserData;
2708     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2709         return isoUserData;
2710     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2711         return quickTimeMetadata;
2712     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2713         return iTunesMetadata;
2714     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2715         return id3Metadata;
2716
2717     return emptyAtom;
2718 }
2719 #endif
2720
2721 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2722 {
2723     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2724
2725     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2726
2727 #if ENABLE(DATACUE_VALUE)
2728     if (seeking())
2729         return;
2730
2731     if (!m_metadataTrack)
2732         processMetadataTrack();
2733
2734     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2735         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2736         return;
2737     }
2738
2739     // Set the duration of all incomplete cues before adding new ones.
2740     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2741     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2742         MediaTime start = toMediaTime(item.time);
2743         if (start < earliestStartTime)
2744             earliestStartTime = start;
2745     }
2746     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2747
2748     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2749         MediaTime start = toMediaTime(item.time);
2750         MediaTime end = MediaTime::positiveInfiniteTime();
2751         if (CMTIME_IS_VALID(item.duration))
2752             end = start + toMediaTime(item.duration);
2753
2754         AtomicString type = nullAtom;
2755         if (item.keySpace)
2756             type = metadataType(item.keySpace);
2757
2758         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2759     }
2760 #endif
2761 }
2762
2763 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2764 {
2765     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2766         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2767
2768     NSArray *assetTracks = [m_avAsset tracks];
2769
2770     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2771         AVAssetTrack* assetTrack = [obj assetTrack];
2772
2773         if ([assetTracks containsObject:assetTrack])
2774             return YES;
2775
2776         // Track is a streaming track. Omit if it belongs to a valid AVMediaSelectionGroup.
2777         if (!hasLoadedMediaSelectionGroups())
2778             return NO;
2779
2780         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicAudible] && safeMediaSelectionGroupForAudibleMedia())
2781             return NO;
2782
2783         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicVisual] && safeMediaSelectionGroupForVisualMedia())
2784             return NO;
2785
2786         if ([assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible] && safeMediaSelectionGroupForLegibleMedia())
2787             return NO;
2788
2789         return YES;
2790     }]];
2791
2792     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2793         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2794
2795     m_cachedTotalBytes = 0;
2796
2797     tracksChanged();
2798     updateStates();
2799 }
2800
2801 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2802 {
2803     m_cachedHasEnabledAudio = hasEnabledAudio;
2804
2805     tracksChanged();
2806     updateStates();
2807 }
2808
2809 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2810 {
2811     m_cachedPresentationSize = size;
2812
2813     sizeChanged();
2814     updateStates();
2815 }
2816
2817 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
2818 {
2819     m_cachedDuration = duration;
2820
2821     invalidateCachedDuration();
2822 }
2823
2824 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2825 {
2826     m_cachedRate = rate;
2827
2828     updateStates();
2829     rateChanged();
2830 }
2831     
2832 #if ENABLE(IOS_AIRPLAY)
2833 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2834 {
2835     playbackTargetIsWirelessChanged();
2836 }
2837 #endif
2838
2839 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
2840 {
2841     m_cachedCanPlayFastForward = newValue;
2842 }
2843
2844 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
2845 {
2846     m_cachedCanPlayFastReverse = newValue;
2847 }
2848
2849 URL MediaPlayerPrivateAVFoundationObjC::resolvedURL() const
2850 {
2851     if (!m_avAsset || [m_avAsset statusOfValueForKey:@"resolvedURL" error:nullptr] != AVKeyValueStatusLoaded)
2852         return MediaPlayerPrivateAVFoundation::resolvedURL();
2853
2854     return URL([m_avAsset resolvedURL]);
2855 }
2856
2857 NSArray* assetMetadataKeyNames()
2858 {
2859     static NSArray* keys;
2860     if (!keys) {
2861         keys = [[NSArray alloc] initWithObjects:@"duration",
2862                     @"naturalSize",
2863                     @"preferredTransform",
2864                     @"preferredVolume",
2865                     @"preferredRate",
2866                     @"playable",
2867                     @"resolvedURL",
2868                     @"tracks",
2869                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2870                    nil];
2871     }
2872     return keys;
2873 }
2874
2875 NSArray* itemKVOProperties()
2876 {
2877     static NSArray* keys;
2878     if (!keys) {
2879         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2880                 @"status",
2881                 @"asset",
2882                 @"tracks",
2883                 @"seekableTimeRanges",
2884                 @"loadedTimeRanges",
2885                 @"playbackLikelyToKeepUp",
2886                 @"playbackBufferFull",
2887                 @"playbackBufferEmpty",
2888                 @"duration",
2889                 @"hasEnabledAudio",
2890                 @"timedMetadata",
2891                 @"canPlayFastForward",
2892                 @"canPlayFastReverse",
2893                 nil];
2894     }
2895     return keys;
2896 }
2897
2898 NSArray* assetTrackMetadataKeyNames()
2899 {
2900     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
2901     return keys;
2902 }
2903
2904 } // namespace WebCore
2905
2906 @implementation WebCoreAVFMovieObserver
2907
2908 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2909 {
2910     self = [super init];
2911     if (!self)
2912         return nil;
2913     m_callback = callback;
2914     return self;
2915 }
2916
2917 - (void)disconnect
2918 {
2919     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2920     m_callback = 0;
2921 }
2922
2923 - (void)metadataLoaded
2924 {
2925     if (!m_callback)
2926         return;
2927
2928     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2929 }
2930
2931 - (void)didEnd:(NSNotification *)unusedNotification
2932 {
2933     UNUSED_PARAM(unusedNotification);
2934     if (!m_callback)
2935         return;
2936     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2937 }
2938
2939 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2940 {
2941     UNUSED_PARAM(object);
2942     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2943
2944     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2945
2946     if (!m_callback)
2947         return;
2948
2949     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2950
2951     WTF::Function<void ()> function;
2952
2953     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
2954         if ([keyPath isEqualToString:@"readyForDisplay"])
2955             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
2956     }
2957
2958     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
2959         if ([keyPath isEqualToString:@"enabled"])
2960             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
2961     }
2962
2963     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2964         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2965             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2966         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2967             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2968         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2969             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2970     }
2971
2972     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2973         // A value changed for an AVPlayerItem
2974         if ([keyPath isEqualToString:@"status"])
2975             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2976         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2977             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2978         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2979             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2980         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2981             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2982         else if ([keyPath isEqualToString:@"asset"])
2983             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2984         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2985             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2986         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2987             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2988         else if ([keyPath isEqualToString:@"tracks"])
2989             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2990         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2991             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2992         else if ([keyPath isEqualToString:@"presentationSize"])
2993             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2994         else if ([keyPath isEqualToString:@"duration"])
2995             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
2996         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
2997             MediaTime now;
2998             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
2999             if (CMTIME_IS_NUMERIC(itemTime))
3000                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
3001             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
3002         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
3003             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
3004         else if ([keyPath isEqualToString:@"canPlayFastForward"])
3005             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
3006     }
3007
3008     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
3009         // A value changed for an AVPlayer.
3010         if ([keyPath isEqualToString:@"rate"])
3011             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
3012 #if ENABLE(IOS_AIRPLAY)
3013         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
3014             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
3015 #endif
3016     }
3017     
3018     if (function.isNull())
3019         return;
3020
3021     auto weakThis = m_callback->createWeakPtr();
3022     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
3023         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
3024         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
3025         if (!weakThis)
3026             return;
3027         function();
3028     }));
3029 }
3030
3031 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
3032 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
3033 {
3034     UNUSED_PARAM(output);
3035     UNUSED_PARAM(nativeSamples);
3036
3037     if (!m_callback)
3038         return;
3039
3040     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3041     RetainPtr<NSArray> strongStrings = strings;
3042     RetainPtr<NSArray> strongSamples = nativeSamples;
3043     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
3044         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3045         if (!callback)
3046             return;
3047         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
3048     });
3049 }
3050
3051 - (void)outputSequenceWasFlushed:(id)output
3052 {
3053     UNUSED_PARAM(output);
3054
3055     if (!m_callback)
3056         return;
3057     
3058     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3059     callOnMainThread([strongSelf] {
3060         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3061             callback->flushCues();
3062     });
3063 }
3064 #endif
3065
3066 @end
3067
3068 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3069 @implementation WebCoreAVFLoaderDelegate
3070
3071 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3072 {
3073     self = [super init];
3074     if (!self)
3075         return nil;
3076     m_callback = callback;
3077     return self;
3078 }
3079
3080 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3081 {
3082     UNUSED_PARAM(resourceLoader);
3083     if (!m_callback)
3084         return NO;
3085
3086     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3087     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3088     callOnMainThread([strongSelf, strongRequest] {
3089         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3090         if (!callback) {
3091             [strongRequest finishLoadingWithError:nil];
3092             return;
3093         }
3094
3095         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3096             [strongRequest finishLoadingWithError:nil];
3097     });
3098
3099     return YES;
3100 }
3101
3102 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3103 {
3104     UNUSED_PARAM(resourceLoader);
3105     if (!m_callback)
3106         return NO;
3107
3108     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3109         return NO;
3110
3111     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3112     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3113     callOnMainThread([strongSelf, strongChallenge] {
3114         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3115         if (!callback) {
3116             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3117             return;
3118         }
3119
3120         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3121             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3122     });
3123
3124     return YES;
3125 }
3126
3127 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3128 {
3129     UNUSED_PARAM(resourceLoader);
3130     if (!m_callback)
3131         return;
3132
3133     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3134     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3135     callOnMainThread([strongSelf, strongRequest] {
3136         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3137         if (callback)
3138             callback->didCancelLoadingRequest(strongRequest.get());
3139     });
3140 }
3141
3142 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3143 {
3144     m_callback = callback;
3145 }
3146 @end
3147 #endif
3148
3149 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3150 @implementation WebCoreAVFPullDelegate
3151 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3152 {
3153     self = [super init];
3154     if (self)
3155         m_callback = callback;
3156     return self;
3157 }
3158
3159 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3160 {
3161     m_callback = callback;
3162 }
3163
3164 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3165 {
3166     if (m_callback)
3167         m_callback->outputMediaDataWillChange(output);
3168 }
3169
3170 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3171 {
3172     UNUSED_PARAM(output);
3173     // No-op.
3174 }
3175 @end
3176 #endif
3177
3178 #endif