MediaPlayerPrivateAVFoundation::hasAudio() returns false even when there is an audibl...
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioSourceProviderAVFObjC.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "Cookie.h"
38 #import "ExceptionCodePlaceholder.h"
39 #import "FloatConversion.h"
40 #import "FloatConversion.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandMetadataTextTrackPrivateAVF.h"
44 #import "InbandTextTrackPrivateAVFObjC.h"
45 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
46 #import "OutOfBandTextTrackPrivateAVF.h"
47 #import "URL.h"
48 #import "Logging.h"
49 #import "MediaSelectionGroupAVFObjC.h"
50 #import "MediaTimeAVFoundation.h"
51 #import "PlatformTimeRanges.h"
52 #import "SecurityOrigin.h"
53 #import "SerializedPlatformRepresentationMac.h"
54 #import "SoftLinking.h"
55 #import "TextTrackRepresentation.h"
56 #import "UUID.h"
57 #import "VideoTrackPrivateAVFObjC.h"
58 #import "WebCoreAVFResourceLoader.h"
59 #import "WebCoreCALayerExtras.h"
60 #import "WebCoreSystemInterface.h"
61 #import <objc/runtime.h>
62 #import <runtime/DataView.h>
63 #import <runtime/JSCInlines.h>
64 #import <runtime/TypedArrayInlines.h>
65 #import <runtime/Uint16Array.h>
66 #import <runtime/Uint32Array.h>
67 #import <runtime/Uint8Array.h>
68 #import <wtf/CurrentTime.h>
69 #import <wtf/Functional.h>
70 #import <wtf/ListHashSet.h>
71 #import <wtf/NeverDestroyed.h>
72 #import <wtf/text/CString.h>
73 #import <wtf/text/StringBuilder.h>
74
75 #if ENABLE(AVF_CAPTIONS)
76 #include "TextTrack.h"
77 #endif
78
79 #import <Foundation/NSGeometry.h>
80 #import <AVFoundation/AVFoundation.h>
81 #if PLATFORM(IOS)
82 #import <CoreImage/CoreImage.h>
83 #else
84 #import <QuartzCore/CoreImage.h>
85 #endif
86 #import <CoreMedia/CoreMedia.h>
87
88 #if USE(VIDEOTOOLBOX)
89 #import <CoreVideo/CoreVideo.h>
90 #import <VideoToolbox/VideoToolbox.h>
91 #endif
92
93 namespace std {
94 template <> struct iterator_traits<HashSet<RefPtr<WebCore::MediaSelectionOptionAVFObjC>>::iterator> {
95     typedef RefPtr<WebCore::MediaSelectionOptionAVFObjC> value_type;
96 };
97 }
98
99 @interface WebVideoContainerLayer : CALayer
100 @end
101
102 @implementation WebVideoContainerLayer
103
104 - (void)setBounds:(CGRect)bounds
105 {
106     [super setBounds:bounds];
107     for (CALayer* layer in self.sublayers)
108         layer.frame = bounds;
109 }
110 @end
111
112 #if ENABLE(AVF_CAPTIONS)
113 // Note: This must be defined before our SOFT_LINK macros:
114 @class AVMediaSelectionOption;
115 @interface AVMediaSelectionOption (OutOfBandExtensions)
116 @property (nonatomic, readonly) NSString* outOfBandSource;
117 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
118 @end
119 #endif
120
121 #if PLATFORM(IOS)
122 @class AVPlayerItem;
123 @interface AVPlayerItem (WebKitExtensions)
124 @property (nonatomic, copy) NSString* dataYouTubeID;
125 @end
126 #endif
127
128 @interface AVURLAsset (WebKitExtensions)
129 @property (nonatomic, readonly) NSURL *resolvedURL;
130 @end
131
132 typedef AVPlayer AVPlayerType;
133 typedef AVPlayerItem AVPlayerItemType;
134 typedef AVPlayerItemLegibleOutput AVPlayerItemLegibleOutputType;
135 typedef AVPlayerItemVideoOutput AVPlayerItemVideoOutputType;
136 typedef AVMetadataItem AVMetadataItemType;
137 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
138 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
139
140 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
141 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
142 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
143 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
144
145 #if USE(VIDEOTOOLBOX)
146 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
147 #endif
148
149 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
150 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
151 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
152 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
153
154 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
155 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
156 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
157 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
158 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
159 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
160 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
161 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
162
163 #if USE(VIDEOTOOLBOX)
164 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
165 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
166 #endif
167
168 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
169 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
170 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
171 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
172 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
173 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
174 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
175
176 SOFT_LINK_CLASS(CoreImage, CIContext)
177 SOFT_LINK_CLASS(CoreImage, CIImage)
178
179 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
180 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
181 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
182 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
183 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
184 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
185 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
186 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
187 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
188 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
189 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
190 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
191 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
192 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
193
194 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
195
196 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
197
198 #define AVPlayer getAVPlayerClass()
199 #define AVPlayerItem getAVPlayerItemClass()
200 #define AVPlayerLayer getAVPlayerLayerClass()
201 #define AVURLAsset getAVURLAssetClass()
202 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
203 #define AVMetadataItem getAVMetadataItemClass()
204
205 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
206 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
207 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
208 #define AVMediaTypeVideo getAVMediaTypeVideo()
209 #define AVMediaTypeAudio getAVMediaTypeAudio()
210 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
211 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
212 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
213 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
214 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
215 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
216 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
217 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
218 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
219 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
220
221 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
222 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
223 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
224
225 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
226 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
227 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
228
229 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
230 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
231 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
232 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
233
234 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
235 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
236 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
237 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
238 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
239 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
240 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
241 #endif
242
243 #if ENABLE(AVF_CAPTIONS)
244 SOFT_LINK_POINTER(AVFoundation, AVURLAssetHTTPCookiesKey, NSString*)
245 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
246 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
247 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
248 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
249 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
250 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
251 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
252 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
253 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
254
255 #define AVURLAssetHTTPCookiesKey getAVURLAssetHTTPCookiesKey()
256 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
257 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
258 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
259 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
260 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
261 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
262 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
263 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
264 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
265 #endif
266
267 #if ENABLE(DATACUE_VALUE)
268 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
269 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
270 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
271 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
272 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
273
274 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
275 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
276 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
277 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
278 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
279 #endif
280
281 #if PLATFORM(IOS)
282 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
283
284 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
285 #endif
286
287 #define kCMTimeZero getkCMTimeZero()
288
289 using namespace WebCore;
290
291 enum MediaPlayerAVFoundationObservationContext {
292     MediaPlayerAVFoundationObservationContextPlayerItem,
293     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
294     MediaPlayerAVFoundationObservationContextPlayer,
295     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
296 };
297
298 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
299 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
300 #else
301 @interface WebCoreAVFMovieObserver : NSObject
302 #endif
303 {
304     MediaPlayerPrivateAVFoundationObjC* m_callback;
305     int m_delayCallbacks;
306 }
307 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
308 -(void)disconnect;
309 -(void)metadataLoaded;
310 -(void)didEnd:(NSNotification *)notification;
311 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
312 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
313 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
314 - (void)outputSequenceWasFlushed:(id)output;
315 #endif
316 @end
317
318 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
319 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
320     MediaPlayerPrivateAVFoundationObjC* m_callback;
321 }
322 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
323 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
324 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
325 @end
326 #endif
327
328 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
329 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
330     MediaPlayerPrivateAVFoundationObjC *m_callback;
331     dispatch_semaphore_t m_semaphore;
332 }
333 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
334 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
335 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
336 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
337 @end
338 #endif
339
340 namespace WebCore {
341
342 static NSArray *assetMetadataKeyNames();
343 static NSArray *itemKVOProperties();
344 static NSArray* assetTrackMetadataKeyNames();
345
346 #if !LOG_DISABLED
347 static const char *boolString(bool val)
348 {
349     return val ? "true" : "false";
350 }
351 #endif
352
353 #if ENABLE(ENCRYPTED_MEDIA_V2)
354 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
355 static PlayerToPrivateMapType& playerToPrivateMap()
356 {
357     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
358     return map;
359 };
360 #endif
361
362 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
363 static dispatch_queue_t globalLoaderDelegateQueue()
364 {
365     static dispatch_queue_t globalQueue;
366     static dispatch_once_t onceToken;
367     dispatch_once(&onceToken, ^{
368         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
369     });
370     return globalQueue;
371 }
372 #endif
373
374 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
375 static dispatch_queue_t globalPullDelegateQueue()
376 {
377     static dispatch_queue_t globalQueue;
378     static dispatch_once_t onceToken;
379     dispatch_once(&onceToken, ^{
380         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
381     });
382     return globalQueue;
383 }
384 #endif
385
386 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
387
388     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
389 }
390
391 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
392 {
393     if (isAvailable())
394         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
395 }
396
397 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
398     : MediaPlayerPrivateAVFoundation(player)
399     , m_weakPtrFactory(this)
400 #if PLATFORM(IOS)
401     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
402 #endif
403     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
404     , m_videoFrameHasDrawn(false)
405     , m_haveCheckedPlayability(false)
406 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
407     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
408     , m_videoOutputSemaphore(nullptr)
409 #endif
410 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
411     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
412 #endif
413     , m_currentTextTrack(0)
414     , m_cachedRate(0)
415     , m_cachedTotalBytes(0)
416     , m_pendingStatusChanges(0)
417     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
418     , m_cachedLikelyToKeepUp(false)
419     , m_cachedBufferEmpty(false)
420     , m_cachedBufferFull(false)
421     , m_cachedHasEnabledAudio(false)
422     , m_shouldBufferData(true)
423     , m_cachedIsReadyForDisplay(false)
424     , m_haveBeenAskedToCreateLayer(false)
425 #if ENABLE(IOS_AIRPLAY)
426     , m_allowsWirelessVideoPlayback(true)
427 #endif
428 {
429 #if ENABLE(ENCRYPTED_MEDIA_V2)
430     playerToPrivateMap().set(player, this);
431 #endif
432 }
433
434 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
435 {
436 #if ENABLE(ENCRYPTED_MEDIA_V2)
437     playerToPrivateMap().remove(player());
438 #endif
439 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
440     [m_loaderDelegate.get() setCallback:0];
441     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
442
443     for (auto& pair : m_resourceLoaderMap)
444         pair.value->invalidate();
445 #endif
446 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
447     [m_videoOutputDelegate setCallback:0];
448     [m_videoOutput setDelegate:nil queue:0];
449     if (m_videoOutputSemaphore)
450         dispatch_release(m_videoOutputSemaphore);
451 #endif
452
453     if (m_videoLayer)
454         destroyVideoLayer();
455
456     cancelLoad();
457 }
458
459 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
460 {
461     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
462     tearDownVideoRendering();
463
464     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
465     [m_objcObserver.get() disconnect];
466
467     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
468     setIgnoreLoadStateChanges(true);
469     if (m_avAsset) {
470         [m_avAsset.get() cancelLoading];
471         m_avAsset = nil;
472     }
473
474     clearTextTracks();
475
476 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
477     if (m_legibleOutput) {
478         if (m_avPlayerItem)
479             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
480         m_legibleOutput = nil;
481     }
482 #endif
483
484     if (m_avPlayerItem) {
485         for (NSString *keyName in itemKVOProperties())
486             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
487         
488         m_avPlayerItem = nil;
489     }
490     if (m_avPlayer) {
491         if (m_timeObserver)
492             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
493         m_timeObserver = nil;
494         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
495 #if ENABLE(IOS_AIRPLAY)
496         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
497 #endif
498         m_avPlayer = nil;
499     }
500
501     // Reset cached properties
502     m_pendingStatusChanges = 0;
503     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
504     m_cachedSeekableRanges = nullptr;
505     m_cachedLoadedRanges = nullptr;
506     m_cachedHasEnabledAudio = false;
507     m_cachedPresentationSize = FloatSize();
508     m_cachedDuration = MediaTime::zeroTime();
509
510     for (AVPlayerItemTrack *track in m_cachedTracks.get())
511         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
512     m_cachedTracks = nullptr;
513
514 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
515     if (m_provider)
516         m_provider->setPlayerItem(nullptr);
517 #endif
518
519     setIgnoreLoadStateChanges(false);
520 }
521
522 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
523 {
524     return m_haveBeenAskedToCreateLayer;
525 }
526
527 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
528 {
529 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
530     if (m_videoOutput)
531         return true;
532 #endif
533     return m_imageGenerator;
534 }
535
536 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
537 {
538 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
539     createVideoOutput();
540 #else
541     createImageGenerator();
542 #endif
543 }
544
545 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
546 {
547     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
548
549     if (!m_avAsset || m_imageGenerator)
550         return;
551
552     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
553
554     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
555     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
556     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
557     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
558
559     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
560 }
561
562 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
563 {
564 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
565     destroyVideoOutput();
566 #endif
567     destroyImageGenerator();
568 }
569
570 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
571 {
572     if (!m_imageGenerator)
573         return;
574
575     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
576
577     m_imageGenerator = 0;
578 }
579
580 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
581 {
582     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
583         return;
584
585     auto weakThis = createWeakPtr();
586     callOnMainThread([this, weakThis] {
587         if (!weakThis)
588             return;
589
590         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
591             return;
592         m_haveBeenAskedToCreateLayer = true;
593
594         if (!m_videoLayer)
595             createAVPlayerLayer();
596
597         player()->client().mediaPlayerRenderingModeChanged(player());
598     });
599 }
600
601 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
602 {
603     if (!m_avPlayer)
604         return;
605
606     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
607     [m_videoLayer setPlayer:m_avPlayer.get()];
608     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
609 #ifndef NDEBUG
610     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
611 #endif
612     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
613     updateVideoLayerGravity();
614     IntSize defaultSize = player()->client().mediaPlayerContentBoxRect().pixelSnappedSize();
615     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
616
617 #if PLATFORM(IOS)
618     [m_videoLayer web_disableAllActions];
619     m_videoInlineLayer = adoptNS([[WebVideoContainerLayer alloc] init]);
620     [m_videoInlineLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
621     if (m_videoFullscreenLayer) {
622         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
623         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
624     } else {
625         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
626         [m_videoLayer setFrame:m_videoInlineLayer.get().bounds];
627     }
628 #else
629     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
630 #endif
631 }
632
633 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
634 {
635     if (!m_videoLayer)
636         return;
637
638     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
639
640     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
641     [m_videoLayer.get() setPlayer:nil];
642
643 #if PLATFORM(IOS)
644     if (m_videoFullscreenLayer)
645         [m_videoLayer removeFromSuperlayer];
646     m_videoInlineLayer = nil;
647 #endif
648
649     m_videoLayer = nil;
650 }
651
652 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
653 {
654     if (currentRenderingMode() == MediaRenderingToLayer)
655         return m_cachedIsReadyForDisplay;
656
657     return m_videoFrameHasDrawn;
658 }
659
660 #if ENABLE(AVF_CAPTIONS)
661 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
662 {
663     // FIXME: Match these to correct types:
664     if (kind == PlatformTextTrack::Caption)
665         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
666
667     if (kind == PlatformTextTrack::Subtitle)
668         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
669
670     if (kind == PlatformTextTrack::Description)
671         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
672
673     if (kind == PlatformTextTrack::Forced)
674         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
675
676     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
677 }
678     
679 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
680 {
681     trackModeChanged();
682 }
683     
684 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
685 {
686     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
687     
688     for (auto& textTrack : m_textTracks) {
689         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
690             continue;
691         
692         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
693         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
694         
695         for (auto& track : outOfBandTrackSources) {
696             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
697             
698             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
699                 continue;
700             
701             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
702             if (track->mode() == PlatformTextTrack::Hidden)
703                 mode = InbandTextTrackPrivate::Hidden;
704             else if (track->mode() == PlatformTextTrack::Disabled)
705                 mode = InbandTextTrackPrivate::Disabled;
706             else if (track->mode() == PlatformTextTrack::Showing)
707                 mode = InbandTextTrackPrivate::Showing;
708             
709             textTrack->setMode(mode);
710             break;
711         }
712     }
713 }
714 #endif
715
716
717 static NSURL *canonicalURL(const String& url)
718 {
719     NSURL *cocoaURL = URL(ParsedURLString, url);
720     if (url.isEmpty())
721         return cocoaURL;
722
723     RetainPtr<NSURLRequest> request = adoptNS([[NSURLRequest alloc] initWithURL:cocoaURL]);
724     if (!request)
725         return cocoaURL;
726
727     NSURLRequest *canonicalRequest = [NSURLProtocol canonicalRequestForRequest:request.get()];
728     if (!canonicalRequest)
729         return cocoaURL;
730
731     return [canonicalRequest URL];
732 }
733
734 #if PLATFORM(IOS)
735 static NSHTTPCookie* toNSHTTPCookie(const Cookie& cookie)
736 {
737     RetainPtr<NSMutableDictionary> properties = adoptNS([[NSMutableDictionary alloc] init]);
738     [properties setDictionary:@{
739         NSHTTPCookieName: cookie.name,
740         NSHTTPCookieValue: cookie.value,
741         NSHTTPCookieDomain: cookie.domain,
742         NSHTTPCookiePath: cookie.path,
743         NSHTTPCookieExpires: [NSDate dateWithTimeIntervalSince1970:(cookie.expires / 1000)],
744     }];
745     if (cookie.secure)
746         [properties setObject:@YES forKey:NSHTTPCookieSecure];
747     if (cookie.session)
748         [properties setObject:@YES forKey:NSHTTPCookieDiscard];
749
750     return [NSHTTPCookie cookieWithProperties:properties.get()];
751 }
752 #endif
753
754 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
755 {
756     if (m_avAsset)
757         return;
758
759     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
760
761     setDelayCallbacks(true);
762
763     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
764
765     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
766
767     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
768
769     String referrer = player()->referrer();
770     if (!referrer.isEmpty())
771         [headerFields.get() setObject:referrer forKey:@"Referer"];
772
773     String userAgent = player()->userAgent();
774     if (!userAgent.isEmpty())
775         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
776
777     if ([headerFields.get() count])
778         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
779
780     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
781         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
782
783     String identifier = player()->sourceApplicationIdentifier();
784     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
785         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
786
787 #if ENABLE(AVF_CAPTIONS)
788     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
789     if (!outOfBandTrackSources.isEmpty()) {
790         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
791         for (auto& trackSource : outOfBandTrackSources) {
792             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
793             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
794             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
795             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
796             [outOfBandTracks.get() addObject:@{
797                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
798                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
799                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
800                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
801                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
802                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
803             }];
804         }
805
806         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
807     }
808 #endif
809
810 #if PLATFORM(IOS)
811     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
812     if (!networkInterfaceName.isEmpty())
813         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
814 #endif
815
816 #if PLATFORM(IOS)
817     Vector<Cookie> cookies;
818     if (player()->getRawCookies(URL(ParsedURLString, url), cookies)) {
819         RetainPtr<NSMutableArray> nsCookies = adoptNS([[NSMutableArray alloc] initWithCapacity:cookies.size()]);
820         for (auto& cookie : cookies)
821             [nsCookies addObject:toNSHTTPCookie(cookie)];
822
823         [options setObject:nsCookies.get() forKey:AVURLAssetHTTPCookiesKey];
824     }
825 #endif
826
827     NSURL *cocoaURL = canonicalURL(url);
828     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
829
830 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
831     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
832 #endif
833
834     m_haveCheckedPlayability = false;
835
836     setDelayCallbacks(false);
837 }
838
839 void MediaPlayerPrivateAVFoundationObjC::setAVPlayerItem(AVPlayerItemType *item)
840 {
841     if (!m_avPlayer)
842         return;
843
844     if (pthread_main_np()) {
845         [m_avPlayer replaceCurrentItemWithPlayerItem:item];
846         return;
847     }
848
849     RetainPtr<AVPlayerType> strongPlayer = m_avPlayer.get();
850     RetainPtr<AVPlayerItemType> strongItem = item;
851     dispatch_async(dispatch_get_main_queue(), [strongPlayer, strongItem] {
852         [strongPlayer replaceCurrentItemWithPlayerItem:strongItem.get()];
853     });
854 }
855
856 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
857 {
858     if (m_avPlayer)
859         return;
860
861     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
862
863     setDelayCallbacks(true);
864
865     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
866     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
867 #if ENABLE(IOS_AIRPLAY)
868     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
869     updateDisableExternalPlayback();
870 #endif
871
872 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
873     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:NO];
874 #endif
875
876 #if ENABLE(IOS_AIRPLAY)
877     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
878 #endif
879
880     if (player()->client().mediaPlayerIsVideo())
881         createAVPlayerLayer();
882
883     if (m_avPlayerItem)
884         setAVPlayerItem(m_avPlayerItem.get());
885
886     setDelayCallbacks(false);
887 }
888
889 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
890 {
891     if (m_avPlayerItem)
892         return;
893
894     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
895
896     setDelayCallbacks(true);
897
898     // Create the player item so we can load media data. 
899     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
900
901     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
902
903     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
904     for (NSString *keyName in itemKVOProperties())
905         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
906
907     if (m_avPlayer)
908         setAVPlayerItem(m_avPlayerItem.get());
909
910 #if PLATFORM(IOS)
911     AtomicString value;
912     if (player()->doesHaveAttribute("data-youtube-id", &value))
913         [m_avPlayerItem.get() setDataYouTubeID: value];
914  #endif
915
916 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
917     const NSTimeInterval legibleOutputAdvanceInterval = 2;
918
919     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
920     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
921     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
922
923     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
924     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
925     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
926     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
927 #endif
928
929 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
930         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
931         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForAudibleMedia()];
932         [m_avPlayerItem selectMediaOptionAutomaticallyInMediaSelectionGroup:safeMediaSelectionGroupForVisualMedia()];
933 #endif
934
935 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
936     if (m_provider)
937         m_provider->setPlayerItem(m_avPlayerItem.get());
938 #endif
939
940     setDelayCallbacks(false);
941 }
942
943 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
944 {
945     if (m_haveCheckedPlayability)
946         return;
947     m_haveCheckedPlayability = true;
948
949     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
950     auto weakThis = createWeakPtr();
951
952     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
953         callOnMainThread([weakThis] {
954             if (weakThis)
955                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
956         });
957     }];
958 }
959
960 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
961 {
962     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
963
964     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
965     dispatch_group_enter(metadataLoadingGroup);
966     auto weakThis = createWeakPtr();
967     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
968
969         callOnMainThread([weakThis, metadataLoadingGroup] {
970             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
971                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
972                     dispatch_group_enter(metadataLoadingGroup);
973                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
974                         dispatch_group_leave(metadataLoadingGroup);
975                     }];
976                 }
977             }
978             dispatch_group_leave(metadataLoadingGroup);
979         });
980     }];
981
982     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
983         callOnMainThread([weakThis] {
984             if (weakThis)
985                 [weakThis->m_objcObserver.get() metadataLoaded];
986         });
987
988         dispatch_release(metadataLoadingGroup);
989     });
990 }
991
992 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
993 {
994     if (!m_avPlayerItem)
995         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
996
997     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
998         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
999     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
1000         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
1001     if (m_cachedLikelyToKeepUp)
1002         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
1003     if (m_cachedBufferFull)
1004         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
1005     if (m_cachedBufferEmpty)
1006         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
1007
1008     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
1009 }
1010
1011 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
1012 {
1013     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
1014     PlatformMedia pm;
1015     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
1016     pm.media.avfMediaPlayer = m_avPlayer.get();
1017     return pm;
1018 }
1019
1020 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
1021 {
1022 #if PLATFORM(IOS)
1023     return m_haveBeenAskedToCreateLayer ? m_videoInlineLayer.get() : nullptr;
1024 #else
1025     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
1026 #endif
1027 }
1028
1029 #if PLATFORM(IOS)
1030 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
1031 {
1032     if (m_videoFullscreenLayer == videoFullscreenLayer)
1033         return;
1034
1035     m_videoFullscreenLayer = videoFullscreenLayer;
1036
1037     [CATransaction begin];
1038     [CATransaction setDisableActions:YES];
1039
1040     if (m_videoFullscreenLayer && m_videoLayer) {
1041         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
1042         [m_videoLayer removeFromSuperlayer];
1043         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
1044     } else if (m_videoInlineLayer && m_videoLayer) {
1045         [m_videoLayer setFrame:[m_videoInlineLayer bounds]];
1046         [m_videoLayer removeFromSuperlayer];
1047         [m_videoInlineLayer insertSublayer:m_videoLayer.get() atIndex:0];
1048     } else if (m_videoLayer)
1049         [m_videoLayer removeFromSuperlayer];
1050
1051     [CATransaction commit];
1052
1053     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1054         syncTextTrackBounds();
1055         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1056     }
1057 #if ENABLE(IOS_AIRPLAY)
1058     updateDisableExternalPlayback();
1059 #endif
1060 }
1061
1062 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
1063 {
1064     m_videoFullscreenFrame = frame;
1065     if (!m_videoFullscreenLayer)
1066         return;
1067
1068     if (m_videoLayer) {
1069         [m_videoLayer setStyle:nil]; // This enables actions, i.e. implicit animations.
1070         [CATransaction begin];
1071         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
1072         [CATransaction commit];
1073         [m_videoLayer web_disableAllActions];
1074     }
1075     syncTextTrackBounds();
1076 }
1077
1078 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
1079 {
1080     m_videoFullscreenGravity = gravity;
1081     if (!m_videoLayer)
1082         return;
1083
1084     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
1085     if (gravity == MediaPlayer::VideoGravityResize)
1086         videoGravity = AVLayerVideoGravityResize;
1087     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
1088         videoGravity = AVLayerVideoGravityResizeAspect;
1089     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
1090         videoGravity = AVLayerVideoGravityResizeAspectFill;
1091     else
1092         ASSERT_NOT_REACHED();
1093
1094     [m_videoLayer setVideoGravity:videoGravity];
1095 }
1096
1097 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
1098 {
1099     if (m_currentMetaData)
1100         return m_currentMetaData.get();
1101     return nil;
1102 }
1103
1104 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
1105 {
1106     if (!m_avPlayerItem)
1107         return emptyString();
1108     
1109     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
1110     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1111
1112     return logString.get();
1113 }
1114
1115 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
1116 {
1117     if (!m_avPlayerItem)
1118         return emptyString();
1119
1120     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
1121     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
1122
1123     return logString.get();
1124 }
1125 #endif
1126
1127 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
1128 {
1129     [CATransaction begin];
1130     [CATransaction setDisableActions:YES];    
1131     if (m_videoLayer)
1132         [m_videoLayer.get() setHidden:!isVisible];
1133     [CATransaction commit];
1134 }
1135     
1136 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1137 {
1138     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1139     if (!metaDataAvailable())
1140         return;
1141
1142     setDelayCallbacks(true);
1143     m_cachedRate = requestedRate();
1144     [m_avPlayer.get() setRate:requestedRate()];
1145     setDelayCallbacks(false);
1146 }
1147
1148 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1149 {
1150     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1151     if (!metaDataAvailable())
1152         return;
1153
1154     setDelayCallbacks(true);
1155     m_cachedRate = 0;
1156     [m_avPlayer.get() setRate:0];
1157     setDelayCallbacks(false);
1158 }
1159
1160 MediaTime MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1161 {
1162     // Do not ask the asset for duration before it has been loaded or it will fetch the
1163     // answer synchronously.
1164     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1165         return MediaTime::invalidTime();
1166     
1167     CMTime cmDuration;
1168     
1169     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1170     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1171         cmDuration = [m_avPlayerItem.get() duration];
1172     else
1173         cmDuration = [m_avAsset.get() duration];
1174
1175     if (CMTIME_IS_NUMERIC(cmDuration))
1176         return toMediaTime(cmDuration);
1177
1178     if (CMTIME_IS_INDEFINITE(cmDuration))
1179         return MediaTime::positiveInfiniteTime();
1180
1181     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, toString(MediaTime::invalidTime()).utf8().data());
1182     return MediaTime::invalidTime();
1183 }
1184
1185 MediaTime MediaPlayerPrivateAVFoundationObjC::currentMediaTime() const
1186 {
1187     if (!metaDataAvailable() || !m_avPlayerItem)
1188         return MediaTime::zeroTime();
1189
1190     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1191     if (CMTIME_IS_NUMERIC(itemTime))
1192         return std::max(toMediaTime(itemTime), MediaTime::zeroTime());
1193
1194     return MediaTime::zeroTime();
1195 }
1196
1197 void MediaPlayerPrivateAVFoundationObjC::seekToTime(const MediaTime& time, const MediaTime& negativeTolerance, const MediaTime& positiveTolerance)
1198 {
1199     // setCurrentTime generates several event callbacks, update afterwards.
1200     setDelayCallbacks(true);
1201
1202     if (m_metadataTrack)
1203         m_metadataTrack->flushPartialCues();
1204
1205     CMTime cmTime = toCMTime(time);
1206     CMTime cmBefore = toCMTime(negativeTolerance);
1207     CMTime cmAfter = toCMTime(positiveTolerance);
1208
1209     auto weakThis = createWeakPtr();
1210
1211     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1212         callOnMainThread([weakThis, finished] {
1213             auto _this = weakThis.get();
1214             if (!_this)
1215                 return;
1216
1217             _this->seekCompleted(finished);
1218         });
1219     }];
1220
1221     setDelayCallbacks(false);
1222 }
1223
1224 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1225 {
1226 #if PLATFORM(IOS)
1227     UNUSED_PARAM(volume);
1228     return;
1229 #else
1230     if (!metaDataAvailable())
1231         return;
1232
1233     [m_avPlayer.get() setVolume:volume];
1234 #endif
1235 }
1236
1237 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1238 {
1239     UNUSED_PARAM(closedCaptionsVisible);
1240
1241     if (!metaDataAvailable())
1242         return;
1243
1244     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1245 }
1246
1247 void MediaPlayerPrivateAVFoundationObjC::updateRate()
1248 {
1249     setDelayCallbacks(true);
1250     m_cachedRate = requestedRate();
1251     [m_avPlayer.get() setRate:requestedRate()];
1252     setDelayCallbacks(false);
1253 }
1254
1255 float MediaPlayerPrivateAVFoundationObjC::rate() const
1256 {
1257     if (!metaDataAvailable())
1258         return 0;
1259
1260     return m_cachedRate;
1261 }
1262
1263 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1264 {
1265     auto timeRanges = PlatformTimeRanges::create();
1266
1267     if (!m_avPlayerItem)
1268         return timeRanges;
1269
1270     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1271         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1272         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1273             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1274     }
1275     return timeRanges;
1276 }
1277
1278 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1279 {
1280     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1281         return MediaTime::zeroTime();
1282
1283     MediaTime minTimeSeekable = MediaTime::positiveInfiniteTime();
1284     bool hasValidRange = false;
1285     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1286         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1287         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1288             continue;
1289
1290         hasValidRange = true;
1291         MediaTime startOfRange = toMediaTime(timeRange.start);
1292         if (minTimeSeekable > startOfRange)
1293             minTimeSeekable = startOfRange;
1294     }
1295     return hasValidRange ? minTimeSeekable : MediaTime::zeroTime();
1296 }
1297
1298 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1299 {
1300     if (!m_cachedSeekableRanges)
1301         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1302
1303     MediaTime maxTimeSeekable;
1304     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1305         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1306         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1307             continue;
1308         
1309         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1310         if (maxTimeSeekable < endOfRange)
1311             maxTimeSeekable = endOfRange;
1312     }
1313     return maxTimeSeekable;
1314 }
1315
1316 MediaTime MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1317 {
1318 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
1319     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
1320     // when loadedTimeRanges changes when there is no video output. In that case
1321     // update the cached value explicitly.
1322     if (!hasLayerRenderer() && !hasContextRenderer())
1323         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
1324 #endif
1325
1326     if (!m_cachedLoadedRanges)
1327         return MediaTime::zeroTime();
1328
1329     MediaTime maxTimeLoaded;
1330     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1331         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1332         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1333             continue;
1334         
1335         MediaTime endOfRange = toMediaTime(CMTimeRangeGetEnd(timeRange));
1336         if (maxTimeLoaded < endOfRange)
1337             maxTimeLoaded = endOfRange;
1338     }
1339
1340     return maxTimeLoaded;   
1341 }
1342
1343 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1344 {
1345     if (!metaDataAvailable())
1346         return 0;
1347
1348     if (m_cachedTotalBytes)
1349         return m_cachedTotalBytes;
1350
1351     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1352         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1353
1354     return m_cachedTotalBytes;
1355 }
1356
1357 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1358 {
1359     m_avAsset = asset;
1360 }
1361
1362 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1363 {
1364     if (!m_avAsset)
1365         return MediaPlayerAVAssetStatusDoesNotExist;
1366
1367     for (NSString *keyName in assetMetadataKeyNames()) {
1368         NSError *error = nil;
1369         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1370 #if !LOG_DISABLED
1371         if (error)
1372             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1373 #endif
1374
1375         if (keyStatus < AVKeyValueStatusLoaded)
1376             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1377         
1378         if (keyStatus == AVKeyValueStatusFailed)
1379             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1380
1381         if (keyStatus == AVKeyValueStatusCancelled)
1382             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1383     }
1384
1385     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1386         return MediaPlayerAVAssetStatusPlayable;
1387
1388     return MediaPlayerAVAssetStatusLoaded;
1389 }
1390
1391 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1392 {
1393     if (!metaDataAvailable() || context->paintingDisabled())
1394         return;
1395
1396     setDelayCallbacks(true);
1397     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1398
1399 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1400     if (videoOutputHasAvailableFrame())
1401         paintWithVideoOutput(context, rect);
1402     else
1403 #endif
1404         paintWithImageGenerator(context, rect);
1405
1406     END_BLOCK_OBJC_EXCEPTIONS;
1407     setDelayCallbacks(false);
1408
1409     m_videoFrameHasDrawn = true;
1410 }
1411
1412 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1413 {
1414     if (!metaDataAvailable() || context->paintingDisabled())
1415         return;
1416
1417     // We can ignore the request if we are already rendering to a layer.
1418     if (currentRenderingMode() == MediaRenderingToLayer)
1419         return;
1420
1421     // paint() is best effort, so only paint if we already have an image generator or video output available.
1422     if (!hasContextRenderer())
1423         return;
1424
1425     paintCurrentFrameInContext(context, rect);
1426 }
1427
1428 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1429 {
1430     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(%p)", this);
1431
1432     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1433     if (image) {
1434         GraphicsContextStateSaver stateSaver(*context);
1435         context->translate(rect.x(), rect.y() + rect.height());
1436         context->scale(FloatSize(1.0f, -1.0f));
1437         context->setImageInterpolationQuality(InterpolationLow);
1438         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1439         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1440         image = 0;
1441     }
1442 }
1443
1444 static HashSet<String> mimeTypeCache()
1445 {
1446     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1447     static bool typeListInitialized = false;
1448
1449     if (typeListInitialized)
1450         return cache;
1451     typeListInitialized = true;
1452
1453     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1454     for (NSString *mimeType in types)
1455         cache.add(mimeType);
1456
1457     return cache;
1458
1459
1460 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1461 {
1462     if (!m_imageGenerator)
1463         createImageGenerator();
1464     ASSERT(m_imageGenerator);
1465
1466 #if !LOG_DISABLED
1467     double start = monotonicallyIncreasingTime();
1468 #endif
1469
1470     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1471     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1472     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1473
1474 #if !LOG_DISABLED
1475     double duration = monotonicallyIncreasingTime() - start;
1476     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1477 #endif
1478
1479     return image;
1480 }
1481
1482 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1483 {
1484     supportedTypes = mimeTypeCache();
1485
1486
1487 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1488 static bool keySystemIsSupported(const String& keySystem)
1489 {
1490     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1491         return true;
1492     return false;
1493 }
1494 #endif
1495
1496 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1497 {
1498 #if ENABLE(ENCRYPTED_MEDIA)
1499     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1500     // In addition to the steps in the current specification, this method must run the following steps:
1501
1502     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1503     //    If keySystem is null, continue to the next step.
1504     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1505         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1506         if (!keySystemIsSupported(parameters.keySystem))
1507             return MediaPlayer::IsNotSupported;
1508
1509         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1510         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1511     }
1512
1513     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1514 #endif
1515
1516 #if ENABLE(MEDIA_SOURCE)
1517     if (parameters.isMediaSource)
1518         return MediaPlayer::IsNotSupported;
1519 #endif
1520
1521     if (!mimeTypeCache().contains(parameters.type))
1522         return MediaPlayer::IsNotSupported;
1523
1524     // The spec says:
1525     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1526     if (parameters.codecs.isEmpty())
1527         return MediaPlayer::MayBeSupported;
1528
1529     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1530     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1531 }
1532
1533 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1534 {
1535 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1536     if (!keySystem.isEmpty()) {
1537         if (!keySystemIsSupported(keySystem))
1538             return false;
1539
1540         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1541             return false;
1542
1543         return true;
1544     }
1545 #else
1546     UNUSED_PARAM(keySystem);
1547     UNUSED_PARAM(mimeType);
1548 #endif
1549     return false;
1550 }
1551
1552 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1553 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1554 {
1555     String scheme = [[[avRequest request] URL] scheme];
1556     String keyURI = [[[avRequest request] URL] absoluteString];
1557
1558 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1559     if (scheme == "skd") {
1560         // Create an initData with the following layout:
1561         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1562         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1563         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1564         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1565         initDataView->set<uint32_t>(0, keyURISize, true);
1566
1567         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1568         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1569
1570 #if ENABLE(ENCRYPTED_MEDIA)
1571         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1572 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1573         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1574         if (!player()->keyNeeded(initData.get()))
1575 #endif
1576             return false;
1577
1578         m_keyURIToRequestMap.set(keyURI, avRequest);
1579         return true;
1580     }
1581 #endif
1582
1583     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1584     m_resourceLoaderMap.add(avRequest, resourceLoader);
1585     resourceLoader->startLoading();
1586     return true;
1587 }
1588
1589 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1590 {
1591 #if USE(CFNETWORK)
1592     UNUSED_PARAM(nsChallenge);
1593     // FIXME: <rdar://problem/15799844>
1594     return false;
1595 #else
1596     AuthenticationChallenge challenge(nsChallenge);
1597
1598     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1599 #endif
1600 }
1601
1602 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1603 {
1604     String scheme = [[[avRequest request] URL] scheme];
1605
1606     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1607
1608     if (resourceLoader)
1609         resourceLoader->stopLoading();
1610 }
1611
1612 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1613 {
1614     m_resourceLoaderMap.remove(avRequest);
1615 }
1616 #endif
1617
1618 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1619 {
1620     return AVFoundationLibrary() && CoreMediaLibrary();
1621 }
1622
1623 MediaTime MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(const MediaTime& timeValue) const
1624 {
1625     if (!metaDataAvailable())
1626         return timeValue;
1627
1628     // FIXME - impossible to implement until rdar://8721510 is fixed.
1629     return timeValue;
1630 }
1631
1632 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1633 {
1634     if (!m_videoLayer)
1635         return;
1636
1637 #if PLATFORM(IOS)
1638     // Do not attempt to change the video gravity while in full screen mode.
1639     // See setVideoFullscreenGravity().
1640     if (m_videoFullscreenLayer)
1641         return;
1642 #endif
1643
1644     [CATransaction begin];
1645     [CATransaction setDisableActions:YES];    
1646     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1647     [m_videoLayer.get() setVideoGravity:gravity];
1648     [CATransaction commit];
1649 }
1650
1651 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1652 {
1653     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1654         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1655     }];
1656     if (index == NSNotFound)
1657         return nil;
1658     return [tracks objectAtIndex:index];
1659 }
1660
1661 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1662 {
1663     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1664     m_languageOfPrimaryAudioTrack = String();
1665
1666     if (!m_avAsset)
1667         return;
1668
1669     setDelayCharacteristicsChangedNotification(true);
1670
1671     bool haveCCTrack = false;
1672     bool hasCaptions = false;
1673
1674     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1675     // asked about those fairly fequently.
1676     if (!m_avPlayerItem) {
1677         // We don't have a player item yet, so check with the asset because some assets support inspection
1678         // prior to becoming ready to play.
1679         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1680         setHasVideo(firstEnabledVideoTrack);
1681         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1682 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1683         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1684 #endif
1685
1686         presentationSizeDidChange(firstEnabledVideoTrack ? IntSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : IntSize());
1687     } else {
1688         bool hasVideo = false;
1689         bool hasAudio = false;
1690         bool hasMetaData = false;
1691         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1692             if ([track isEnabled]) {
1693                 AVAssetTrack *assetTrack = [track assetTrack];
1694                 NSString *mediaType = [assetTrack mediaType];
1695                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1696                     hasVideo = true;
1697                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1698                     hasAudio = true;
1699                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1700 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1701                     hasCaptions = true;
1702 #endif
1703                     haveCCTrack = true;
1704                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1705                     hasMetaData = true;
1706                 }
1707             }
1708         }
1709
1710 #if ENABLE(VIDEO_TRACK)
1711         updateAudioTracks();
1712         updateVideoTracks();
1713
1714 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1715         hasAudio |= (m_audibleGroup && m_audibleGroup->selectedOption());
1716         hasVideo |= (m_visualGroup && m_visualGroup->selectedOption());
1717 #endif
1718 #endif
1719
1720         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1721         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1722         // when it is not.
1723         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1724
1725         setHasAudio(hasAudio);
1726 #if ENABLE(DATACUE_VALUE)
1727         if (hasMetaData)
1728             processMetadataTrack();
1729 #endif
1730     }
1731
1732 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1733     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1734     if (legibleGroup && m_cachedTracks) {
1735         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1736         if (hasCaptions)
1737             processMediaSelectionOptions();
1738     }
1739 #endif
1740
1741 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1742     if (!hasCaptions && haveCCTrack)
1743         processLegacyClosedCaptionsTracks();
1744 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1745     if (haveCCTrack)
1746         processLegacyClosedCaptionsTracks();
1747 #endif
1748
1749     setHasClosedCaptions(hasCaptions);
1750
1751     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1752         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1753
1754     sizeChanged();
1755
1756     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1757         characteristicsChanged();
1758
1759     setDelayCharacteristicsChangedNotification(false);
1760 }
1761
1762 #if ENABLE(VIDEO_TRACK)
1763 template <typename RefT, typename PassRefT>
1764 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1765 {
1766     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1767         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1768     }]]]);
1769     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1770
1771     for (auto& oldItem : oldItems) {
1772         if (oldItem->playerItemTrack())
1773             [oldTracks addObject:oldItem->playerItemTrack()];
1774     }
1775
1776     // Find the added & removed AVPlayerItemTracks:
1777     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1778     [removedTracks minusSet:newTracks.get()];
1779
1780     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1781     [addedTracks minusSet:oldTracks.get()];
1782
1783     typedef Vector<RefT> ItemVector;
1784     ItemVector replacementItems;
1785     ItemVector addedItems;
1786     ItemVector removedItems;
1787     for (auto& oldItem : oldItems) {
1788         if (oldItem->playerItemTrack() && [removedTracks containsObject:oldItem->playerItemTrack()])
1789             removedItems.append(oldItem);
1790         else
1791             replacementItems.append(oldItem);
1792     }
1793
1794     for (AVPlayerItemTrack* track in addedTracks.get())
1795         addedItems.append(itemFactory(track));
1796
1797     replacementItems.appendVector(addedItems);
1798     oldItems.swap(replacementItems);
1799
1800     for (auto& removedItem : removedItems)
1801         (player->*removedFunction)(removedItem);
1802
1803     for (auto& addedItem : addedItems)
1804         (player->*addedFunction)(addedItem);
1805 }
1806
1807 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1808 template <typename RefT, typename PassRefT>
1809 void determineChangedTracksFromNewTracksAndOldItems(MediaSelectionGroupAVFObjC* group, Vector<RefT>& oldItems, RefT (*itemFactory)(MediaSelectionOptionAVFObjC&), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1810 {
1811     group->updateOptions();
1812
1813     // Only add selection options which do not have an associated persistant track.
1814     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> newSelectionOptions;
1815     for (auto& option : group->options()) {
1816         if (!option)
1817             continue;
1818         AVMediaSelectionOptionType* avOption = option->avMediaSelectionOption();
1819         if (!avOption)
1820             continue;
1821         if (![avOption respondsToSelector:@selector(track)] || ![avOption performSelector:@selector(track)])
1822             newSelectionOptions.add(option);
1823     }
1824
1825     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> oldSelectionOptions;
1826     for (auto& oldItem : oldItems) {
1827         if (MediaSelectionOptionAVFObjC *option = oldItem->mediaSelectionOption())
1828             oldSelectionOptions.add(option);
1829     }
1830
1831     // Find the added & removed AVMediaSelectionOptions:
1832     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> removedSelectionOptions;
1833     for (auto& oldOption : oldSelectionOptions) {
1834         if (!newSelectionOptions.contains(oldOption))
1835             removedSelectionOptions.add(oldOption);
1836     }
1837
1838     ListHashSet<RefPtr<MediaSelectionOptionAVFObjC>> addedSelectionOptions;
1839     for (auto& newOption : newSelectionOptions) {
1840         if (!oldSelectionOptions.contains(newOption))
1841             addedSelectionOptions.add(newOption);
1842     }
1843
1844     typedef Vector<RefT> ItemVector;
1845     ItemVector replacementItems;
1846     ItemVector addedItems;
1847     ItemVector removedItems;
1848     for (auto& oldItem : oldItems) {
1849         if (oldItem->mediaSelectionOption() && removedSelectionOptions.contains(oldItem->mediaSelectionOption()))
1850             removedItems.append(oldItem);
1851         else
1852             replacementItems.append(oldItem);
1853     }
1854
1855     for (auto& option : addedSelectionOptions)
1856         addedItems.append(itemFactory(*option.get()));
1857
1858     replacementItems.appendVector(addedItems);
1859     oldItems.swap(replacementItems);
1860     
1861     for (auto& removedItem : removedItems)
1862         (player->*removedFunction)(removedItem);
1863     
1864     for (auto& addedItem : addedItems)
1865         (player->*addedFunction)(addedItem);
1866 }
1867 #endif
1868
1869 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1870 {
1871 #if !LOG_DISABLED
1872     size_t count = m_audioTracks.size();
1873 #endif
1874
1875     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1876
1877 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1878     if (!m_audibleGroup) {
1879         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForAudibleMedia())
1880             m_audibleGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
1881     }
1882
1883     if (m_audibleGroup)
1884         determineChangedTracksFromNewTracksAndOldItems(m_audibleGroup.get(), m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1885 #endif
1886
1887     for (auto& track : m_audioTracks)
1888         track->resetPropertiesFromTrack();
1889
1890 #if !LOG_DISABLED
1891     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
1892 #endif
1893 }
1894
1895 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1896 {
1897 #if !LOG_DISABLED
1898     size_t count = m_videoTracks.size();
1899 #endif
1900
1901     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1902
1903 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1904     if (!m_visualGroup) {
1905         if (AVMediaSelectionGroupType *group = safeMediaSelectionGroupForVisualMedia())
1906             m_visualGroup = MediaSelectionGroupAVFObjC::create(m_avPlayerItem.get(), group);
1907     }
1908
1909     if (m_visualGroup)
1910         determineChangedTracksFromNewTracksAndOldItems(m_visualGroup.get(), m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1911 #endif
1912
1913     for (auto& track : m_audioTracks)
1914         track->resetPropertiesFromTrack();
1915
1916 #if !LOG_DISABLED
1917     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
1918 #endif
1919 }
1920
1921 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
1922 {
1923 #if PLATFORM(IOS)
1924     if (m_videoFullscreenLayer)
1925         return true;
1926 #endif
1927     return false;
1928 }
1929
1930 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
1931 {
1932 #if PLATFORM(IOS)
1933     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
1934         return;
1935     
1936     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
1937     [m_textTrackRepresentationLayer setFrame:textFrame];
1938 #endif
1939 }
1940
1941 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1942 {
1943 #if PLATFORM(IOS)
1944     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
1945     if (representationLayer == m_textTrackRepresentationLayer) {
1946         syncTextTrackBounds();
1947         return;
1948     }
1949
1950     if (m_textTrackRepresentationLayer)
1951         [m_textTrackRepresentationLayer removeFromSuperlayer];
1952
1953     m_textTrackRepresentationLayer = representationLayer;
1954
1955     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1956         syncTextTrackBounds();
1957         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1958     }
1959
1960 #else
1961     UNUSED_PARAM(representation);
1962 #endif
1963 }
1964 #endif // ENABLE(VIDEO_TRACK)
1965
1966 #if ENABLE(WEB_AUDIO) && USE(MEDIATOOLBOX)
1967 AudioSourceProvider* MediaPlayerPrivateAVFoundationObjC::audioSourceProvider()
1968 {
1969     if (!m_provider)
1970         m_provider = AudioSourceProviderAVFObjC::create(m_avPlayerItem.get());
1971     return m_provider.get();
1972 }
1973 #endif
1974
1975 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1976 {
1977     if (!m_avAsset)
1978         return;
1979
1980     setNaturalSize(roundedIntSize(m_cachedPresentationSize));
1981 }
1982     
1983 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1984 {
1985     if (!m_avAsset)
1986         return false;
1987     
1988     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL([m_avAsset resolvedURL]));
1989     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1990     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1991 }
1992
1993 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1994 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1995 {
1996     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1997
1998     if (!m_avPlayerItem || m_videoOutput)
1999         return;
2000
2001 #if USE(VIDEOTOOLBOX)
2002     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
2003 #else
2004     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
2005                                 nil];
2006 #endif
2007     m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
2008     ASSERT(m_videoOutput);
2009
2010     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
2011
2012     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
2013
2014     waitForVideoOutputMediaDataWillChange();
2015
2016     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
2017 }
2018
2019 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
2020 {
2021     if (!m_videoOutput)
2022         return;
2023
2024     if (m_avPlayerItem)
2025         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
2026     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
2027
2028     m_videoOutput = 0;
2029 }
2030
2031 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
2032 {
2033     if (!m_videoOutput)
2034         createVideoOutput();
2035     ASSERT(m_videoOutput);
2036
2037 #if !LOG_DISABLED
2038     double start = monotonicallyIncreasingTime();
2039 #endif
2040
2041     CMTime currentTime = [m_avPlayerItem.get() currentTime];
2042
2043     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
2044         return 0;
2045
2046     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
2047     if (!buffer)
2048         return 0;
2049
2050 #if USE(VIDEOTOOLBOX)
2051     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
2052     if (!m_pixelTransferSession) {
2053         VTPixelTransferSessionRef session = 0;
2054         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
2055         m_pixelTransferSession = adoptCF(session);
2056     }
2057
2058     CVPixelBufferRef outputBuffer;
2059     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
2060     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
2061     buffer = adoptCF(outputBuffer);
2062 #endif
2063
2064 #if !LOG_DISABLED
2065     double duration = monotonicallyIncreasingTime() - start;
2066     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
2067 #endif
2068
2069     return buffer;
2070 }
2071
2072 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
2073 {
2074     if (!m_avPlayerItem)
2075         return false;
2076
2077     if (m_lastImage)
2078         return true;
2079
2080     if (!m_videoOutput)
2081         createVideoOutput();
2082
2083     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
2084 }
2085
2086 static const void* CVPixelBufferGetBytePointerCallback(void* info)
2087 {
2088     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2089     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2090     return CVPixelBufferGetBaseAddress(pixelBuffer);
2091 }
2092
2093 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
2094 {
2095     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2096     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
2097 }
2098
2099 static void CVPixelBufferReleaseInfoCallback(void* info)
2100 {
2101     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
2102     CFRelease(pixelBuffer);
2103 }
2104
2105 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
2106 {
2107     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
2108     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
2109
2110     size_t width = CVPixelBufferGetWidth(pixelBuffer);
2111     size_t height = CVPixelBufferGetHeight(pixelBuffer);
2112     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
2113     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
2114     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
2115
2116     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
2117     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
2118     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
2119
2120     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
2121 }
2122
2123 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
2124 {
2125     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
2126
2127     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
2128     // for the requested time has already been retrieved. In this case, the last valid image (if any)
2129     // should be displayed.
2130     if (pixelBuffer)
2131         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
2132 }
2133
2134 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
2135 {
2136     updateLastImage();
2137
2138     if (!m_lastImage)
2139         return;
2140
2141     AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
2142     if (!firstEnabledVideoTrack)
2143         return;
2144
2145     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(%p)", this);
2146
2147     GraphicsContextStateSaver stateSaver(*context);
2148     FloatRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
2149     AffineTransform videoTransform = [firstEnabledVideoTrack preferredTransform];
2150     FloatRect transformedOutputRect = videoTransform.inverse().mapRect(outputRect);
2151
2152     context->concatCTM(videoTransform);
2153     context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, transformedOutputRect, imageRect);
2154
2155     // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
2156     // video frame, destroy it now that it is no longer needed.
2157     if (m_imageGenerator)
2158         destroyImageGenerator();
2159
2160 }
2161
2162 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
2163 {
2164     updateLastImage();
2165     return m_lastImage.get();
2166 }
2167
2168 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
2169 {
2170     if (!m_videoOutputSemaphore)
2171         m_videoOutputSemaphore = dispatch_semaphore_create(0);
2172
2173     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
2174
2175     // Wait for 1 second.
2176     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
2177
2178     if (result)
2179         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
2180 }
2181
2182 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
2183 {
2184     dispatch_semaphore_signal(m_videoOutputSemaphore);
2185 }
2186 #endif
2187
2188 #if ENABLE(ENCRYPTED_MEDIA)
2189 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
2190 {
2191     if (!keySystemIsSupported(keySystem))
2192         return MediaPlayer::KeySystemNotSupported;
2193
2194     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
2195     String keyURI;
2196     String keyID;
2197     RefPtr<Uint8Array> certificate;
2198     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
2199         return MediaPlayer::InvalidPlayerState;
2200
2201     if (!m_keyURIToRequestMap.contains(keyURI))
2202         return MediaPlayer::InvalidPlayerState;
2203
2204     String sessionID = createCanonicalUUIDString();
2205
2206     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
2207
2208     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
2209     NSString* assetStr = keyID;
2210     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2211     NSError* error = 0;
2212     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2213
2214     if (!keyRequest) {
2215         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2216         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2217         return MediaPlayer::NoError;
2218     }
2219
2220     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2221     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2222     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2223
2224     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2225     m_sessionIDToRequestMap.set(sessionID, avRequest);
2226     m_keyURIToRequestMap.remove(keyURI);
2227
2228     return MediaPlayer::NoError;
2229 }
2230
2231 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2232 {
2233     if (!keySystemIsSupported(keySystem))
2234         return MediaPlayer::KeySystemNotSupported;
2235
2236     if (!m_sessionIDToRequestMap.contains(sessionID))
2237         return MediaPlayer::InvalidPlayerState;
2238
2239     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2240     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2241     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2242     [avRequest.get() finishLoading];
2243     m_sessionIDToRequestMap.remove(sessionID);
2244
2245     player()->keyAdded(keySystem, sessionID);
2246
2247     UNUSED_PARAM(initDataPtr);
2248     UNUSED_PARAM(initDataLength);
2249     return MediaPlayer::NoError;
2250 }
2251
2252 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2253 {
2254     if (!keySystemIsSupported(keySystem))
2255         return MediaPlayer::KeySystemNotSupported;
2256
2257     if (!m_sessionIDToRequestMap.contains(sessionID))
2258         return MediaPlayer::InvalidPlayerState;
2259
2260     m_sessionIDToRequestMap.remove(sessionID);
2261     return MediaPlayer::NoError;
2262 }
2263 #endif
2264
2265 #if ENABLE(ENCRYPTED_MEDIA_V2)
2266 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2267 {
2268     return m_keyURIToRequestMap.take(keyURI);
2269 }
2270
2271 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2272 {
2273     if (!keySystemIsSupported(keySystem))
2274         return nullptr;
2275
2276     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2277 }
2278 #endif
2279
2280 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2281 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2282 {
2283 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2284     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2285 #endif
2286
2287     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2288     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2289
2290         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2291         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2292             continue;
2293
2294         bool newCCTrack = true;
2295         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2296             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2297                 continue;
2298
2299             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2300             if (track->avPlayerItemTrack() == playerItemTrack) {
2301                 removedTextTracks.remove(i - 1);
2302                 newCCTrack = false;
2303                 break;
2304             }
2305         }
2306
2307         if (!newCCTrack)
2308             continue;
2309         
2310         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2311     }
2312
2313     processNewAndRemovedTextTracks(removedTextTracks);
2314 }
2315 #endif
2316
2317 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2318 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2319 {
2320     if (!m_avAsset)
2321         return nil;
2322     
2323     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2324         return nil;
2325     
2326     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2327 }
2328
2329 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForAudibleMedia()
2330 {
2331     if (!m_avAsset)
2332         return nil;
2333
2334     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2335         return nil;
2336
2337     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2338 }
2339
2340 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForVisualMedia()
2341 {
2342     if (!m_avAsset)
2343         return nil;
2344
2345     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2346         return nil;
2347
2348     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicVisual];
2349 }
2350
2351 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2352 {
2353     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2354     if (!legibleGroup) {
2355         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2356         return;
2357     }
2358
2359     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2360     // but set the selected legible track to nil so text tracks will not be automatically configured.
2361     if (!m_textTracks.size())
2362         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2363
2364     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2365     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2366     for (AVMediaSelectionOptionType *option in legibleOptions) {
2367         bool newTrack = true;
2368         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2369             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2370                 continue;
2371             
2372             RetainPtr<AVMediaSelectionOptionType> currentOption;
2373 #if ENABLE(AVF_CAPTIONS)
2374             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2375                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2376                 currentOption = track->mediaSelectionOption();
2377             } else
2378 #endif
2379             {
2380                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2381                 currentOption = track->mediaSelectionOption();
2382             }
2383             
2384             if ([currentOption.get() isEqual:option]) {
2385                 removedTextTracks.remove(i - 1);
2386                 newTrack = false;
2387                 break;
2388             }
2389         }
2390         if (!newTrack)
2391             continue;
2392
2393 #if ENABLE(AVF_CAPTIONS)
2394         if ([option outOfBandSource]) {
2395             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2396             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2397             continue;
2398         }
2399 #endif
2400
2401         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2402     }
2403
2404     processNewAndRemovedTextTracks(removedTextTracks);
2405 }
2406
2407 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2408 {
2409     if (m_metadataTrack)
2410         return;
2411
2412     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2413     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2414     player()->addTextTrack(m_metadataTrack);
2415 }
2416
2417 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, const MediaTime& time)
2418 {
2419     if (!m_currentTextTrack)
2420         return;
2421
2422     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2423 }
2424
2425 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2426 {
2427     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2428
2429     if (!m_currentTextTrack)
2430         return;
2431     
2432     m_currentTextTrack->resetCueValues();
2433 }
2434 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2435
2436 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2437 {
2438     if (m_currentTextTrack == track)
2439         return;
2440
2441     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2442         
2443     m_currentTextTrack = track;
2444
2445     if (track) {
2446         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2447             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2448 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2449 #if ENABLE(AVF_CAPTIONS)
2450         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2451             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2452 #endif
2453         else
2454             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2455 #endif
2456     } else {
2457 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2458         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2459 #endif
2460         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2461     }
2462
2463 }
2464
2465 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2466 {
2467     if (!m_languageOfPrimaryAudioTrack.isNull())
2468         return m_languageOfPrimaryAudioTrack;
2469
2470     if (!m_avPlayerItem.get())
2471         return emptyString();
2472
2473 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2474     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2475     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2476     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2477     if (currentlySelectedAudibleOption) {
2478         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2479         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2480
2481         return m_languageOfPrimaryAudioTrack;
2482     }
2483 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2484
2485     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2486     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2487     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2488     if (!tracks || [tracks count] != 1) {
2489         m_languageOfPrimaryAudioTrack = emptyString();
2490         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2491         return m_languageOfPrimaryAudioTrack;
2492     }
2493
2494     AVAssetTrack *track = [tracks objectAtIndex:0];
2495     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2496
2497 #if !LOG_DISABLED
2498     if (m_languageOfPrimaryAudioTrack == emptyString())
2499         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2500     else
2501         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2502 #endif
2503
2504     return m_languageOfPrimaryAudioTrack;
2505 }
2506
2507 #if ENABLE(IOS_AIRPLAY) && PLATFORM(IOS)
2508 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2509 {
2510     if (!m_avPlayer)
2511         return false;
2512
2513     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2514     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2515     return wirelessTarget;
2516 }
2517
2518 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2519 {
2520     if (!m_avPlayer)
2521         return MediaPlayer::TargetTypeNone;
2522
2523     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2524     case wkExternalPlaybackTypeNone:
2525         return MediaPlayer::TargetTypeNone;
2526     case wkExternalPlaybackTypeAirPlay:
2527         return MediaPlayer::TargetTypeAirPlay;
2528     case wkExternalPlaybackTypeTVOut:
2529         return MediaPlayer::TargetTypeTVOut;
2530     }
2531
2532     ASSERT_NOT_REACHED();
2533     return MediaPlayer::TargetTypeNone;
2534 }
2535
2536 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2537 {
2538     if (!m_avPlayer)
2539         return emptyString();
2540     
2541     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2542     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2543
2544     return wirelessTargetName;
2545 }
2546
2547 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2548 {
2549     if (!m_avPlayer)
2550         return !m_allowsWirelessVideoPlayback;
2551     
2552     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2553     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2554
2555     return !m_allowsWirelessVideoPlayback;
2556 }
2557
2558 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2559 {
2560     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2561     m_allowsWirelessVideoPlayback = !disabled;
2562     if (!m_avPlayer)
2563         return;
2564     
2565     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2566 }
2567
2568 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2569 {
2570     if (!m_avPlayer)
2571         return;
2572
2573     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2574 }
2575 #endif
2576
2577 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2578 {
2579     m_cachedItemStatus = status;
2580
2581     updateStates();
2582 }
2583
2584 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2585 {
2586     m_pendingStatusChanges++;
2587 }
2588
2589 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2590 {
2591     m_cachedLikelyToKeepUp = likelyToKeepUp;
2592
2593     ASSERT(m_pendingStatusChanges);
2594     if (!--m_pendingStatusChanges)
2595         updateStates();
2596 }
2597
2598 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2599 {
2600     m_pendingStatusChanges++;
2601 }
2602
2603 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2604 {
2605     m_cachedBufferEmpty = bufferEmpty;
2606
2607     ASSERT(m_pendingStatusChanges);
2608     if (!--m_pendingStatusChanges)
2609         updateStates();
2610 }
2611
2612 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2613 {
2614     m_pendingStatusChanges++;
2615 }
2616
2617 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2618 {
2619     m_cachedBufferFull = bufferFull;
2620
2621     ASSERT(m_pendingStatusChanges);
2622     if (!--m_pendingStatusChanges)
2623         updateStates();
2624 }
2625
2626 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2627 {
2628     m_cachedSeekableRanges = seekableRanges;
2629
2630     seekableTimeRangesChanged();
2631     updateStates();
2632 }
2633
2634 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2635 {
2636     m_cachedLoadedRanges = loadedRanges;
2637
2638     loadedTimeRangesChanged();
2639     updateStates();
2640 }
2641
2642 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2643 {
2644     m_cachedIsReadyForDisplay = isReady;
2645     if (!hasVideo() && isReady)
2646         tracksChanged();
2647     updateStates();
2648 }
2649
2650 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2651 {
2652     tracksChanged();
2653     updateStates();
2654 }
2655
2656 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2657 {
2658     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2659     if (m_shouldBufferData == shouldBuffer)
2660         return;
2661
2662     m_shouldBufferData = shouldBuffer;
2663     
2664     if (!m_avPlayer)
2665         return;
2666
2667     setAVPlayerItem(shouldBuffer ? m_avPlayerItem.get() : nil);
2668 }
2669
2670 #if ENABLE(DATACUE_VALUE)
2671 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2672 {
2673     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2674     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2675     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2676     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2677     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2678
2679     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2680         return quickTimeUserData;
2681     if (AVMetadataKeySpaceISOUserData && [avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2682         return isoUserData;
2683     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2684         return quickTimeMetadata;
2685     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2686         return iTunesMetadata;
2687     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2688         return id3Metadata;
2689
2690     return emptyAtom;
2691 }
2692 #endif
2693
2694 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, const MediaTime& mediaTime)
2695 {
2696     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2697
2698     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %s", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, toString(mediaTime).utf8().data());
2699
2700 #if ENABLE(DATACUE_VALUE)
2701     if (seeking())
2702         return;
2703
2704     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2705         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2706         return;
2707     }
2708
2709     if (!m_metadataTrack)
2710         processMetadataTrack();
2711
2712     // Set the duration of all incomplete cues before adding new ones.
2713     MediaTime earliestStartTime = MediaTime::positiveInfiniteTime();
2714     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2715         MediaTime start = toMediaTime(item.time);
2716         if (start < earliestStartTime)
2717             earliestStartTime = start;
2718     }
2719     m_metadataTrack->updatePendingCueEndTimes(earliestStartTime);
2720
2721     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2722         MediaTime start = toMediaTime(item.time);
2723         MediaTime end = MediaTime::positiveInfiniteTime();
2724         if (CMTIME_IS_VALID(item.duration))
2725             end = start + toMediaTime(item.duration);
2726
2727         AtomicString type = nullAtom;
2728         if (item.keySpace)
2729             type = metadataType(item.keySpace);
2730
2731         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2732     }
2733 #endif
2734 }
2735
2736 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2737 {
2738     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2739         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2740
2741     NSArray *assetTracks = [m_avAsset tracks];
2742
2743     // Tracks which are not present in the AVAsset are streaming tracks, and will instead be represented by
2744     // AVMediaSelectionOptions.
2745     m_cachedTracks = [tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id obj, NSUInteger, BOOL*) {
2746         return [assetTracks containsObject:[obj assetTrack]];
2747     }]];
2748
2749     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2750         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2751
2752     m_cachedTotalBytes = 0;
2753
2754     tracksChanged();
2755     updateStates();
2756 }
2757
2758 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2759 {
2760     m_cachedHasEnabledAudio = hasEnabledAudio;
2761
2762     tracksChanged();
2763     updateStates();
2764 }
2765
2766 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2767 {
2768     m_cachedPresentationSize = size;
2769
2770     sizeChanged();
2771     updateStates();
2772 }
2773
2774 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(const MediaTime& duration)
2775 {
2776     m_cachedDuration = duration;
2777
2778     invalidateCachedDuration();
2779 }
2780
2781 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2782 {
2783     m_cachedRate = rate;
2784
2785     updateStates();
2786     rateChanged();
2787 }
2788     
2789 #if ENABLE(IOS_AIRPLAY)
2790 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2791 {
2792     playbackTargetIsWirelessChanged();
2793 }
2794 #endif
2795
2796 void MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange(bool newValue)
2797 {
2798     m_cachedCanPlayFastForward = newValue;
2799 }
2800
2801 void MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange(bool newValue)
2802 {
2803     m_cachedCanPlayFastReverse = newValue;
2804 }
2805
2806 NSArray* assetMetadataKeyNames()
2807 {
2808     static NSArray* keys;
2809     if (!keys) {
2810         keys = [[NSArray alloc] initWithObjects:@"duration",
2811                     @"naturalSize",
2812                     @"preferredTransform",
2813                     @"preferredVolume",
2814                     @"preferredRate",
2815                     @"playable",
2816                     @"tracks",
2817                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2818                    nil];
2819     }
2820     return keys;
2821 }
2822
2823 NSArray* itemKVOProperties()
2824 {
2825     static NSArray* keys;
2826     if (!keys) {
2827         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2828                 @"status",
2829                 @"asset",
2830                 @"tracks",
2831                 @"seekableTimeRanges",
2832                 @"loadedTimeRanges",
2833                 @"playbackLikelyToKeepUp",
2834                 @"playbackBufferFull",
2835                 @"playbackBufferEmpty",
2836                 @"duration",
2837                 @"hasEnabledAudio",
2838                 @"timedMetadata",
2839                 @"canPlayFastForward",
2840                 @"canPlayFastReverse",
2841                 nil];
2842     }
2843     return keys;
2844 }
2845
2846 NSArray* assetTrackMetadataKeyNames()
2847 {
2848     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
2849     return keys;
2850 }
2851
2852 } // namespace WebCore
2853
2854 @implementation WebCoreAVFMovieObserver
2855
2856 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2857 {
2858     self = [super init];
2859     if (!self)
2860         return nil;
2861     m_callback = callback;
2862     return self;
2863 }
2864
2865 - (void)disconnect
2866 {
2867     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2868     m_callback = 0;
2869 }
2870
2871 - (void)metadataLoaded
2872 {
2873     if (!m_callback)
2874         return;
2875
2876     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2877 }
2878
2879 - (void)didEnd:(NSNotification *)unusedNotification
2880 {
2881     UNUSED_PARAM(unusedNotification);
2882     if (!m_callback)
2883         return;
2884     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2885 }
2886
2887 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2888 {
2889     UNUSED_PARAM(object);
2890     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2891
2892     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2893
2894     if (!m_callback)
2895         return;
2896
2897     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2898
2899     WTF::Function<void ()> function;
2900
2901     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
2902         if ([keyPath isEqualToString:@"readyForDisplay"])
2903             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
2904     }
2905
2906     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
2907         if ([keyPath isEqualToString:@"enabled"])
2908             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
2909     }
2910
2911     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2912         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2913             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2914         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2915             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2916         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2917             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2918     }
2919
2920     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2921         // A value changed for an AVPlayerItem
2922         if ([keyPath isEqualToString:@"status"])
2923             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2924         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2925             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2926         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2927             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2928         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2929             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2930         else if ([keyPath isEqualToString:@"asset"])
2931             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2932         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2933             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2934         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2935             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2936         else if ([keyPath isEqualToString:@"tracks"])
2937             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2938         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2939             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2940         else if ([keyPath isEqualToString:@"presentationSize"])
2941             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2942         else if ([keyPath isEqualToString:@"duration"])
2943             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, toMediaTime([newValue CMTimeValue]));
2944         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
2945             MediaTime now;
2946             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
2947             if (CMTIME_IS_NUMERIC(itemTime))
2948                 now = std::max(toMediaTime(itemTime), MediaTime::zeroTime());
2949             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
2950         } else if ([keyPath isEqualToString:@"canPlayFastReverse"])
2951             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastReverseDidChange, m_callback, [newValue boolValue]);
2952         else if ([keyPath isEqualToString:@"canPlayFastForward"])
2953             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::canPlayFastForwardDidChange, m_callback, [newValue boolValue]);
2954     }
2955
2956     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
2957         // A value changed for an AVPlayer.
2958         if ([keyPath isEqualToString:@"rate"])
2959             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
2960 #if ENABLE(IOS_AIRPLAY)
2961         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
2962             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
2963 #endif
2964     }
2965     
2966     if (function.isNull())
2967         return;
2968
2969     auto weakThis = m_callback->createWeakPtr();
2970     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
2971         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
2972         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
2973         if (!weakThis)
2974             return;
2975         function();
2976     }));
2977 }
2978
2979 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2980 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
2981 {
2982     UNUSED_PARAM(output);
2983     UNUSED_PARAM(nativeSamples);
2984
2985     if (!m_callback)
2986         return;
2987
2988     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2989     RetainPtr<NSArray> strongStrings = strings;
2990     RetainPtr<NSArray> strongSamples = nativeSamples;
2991     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
2992         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2993         if (!callback)
2994             return;
2995         callback->processCue(strongStrings.get(), strongSamples.get(), toMediaTime(itemTime));
2996     });
2997 }
2998
2999 - (void)outputSequenceWasFlushed:(id)output
3000 {
3001     UNUSED_PARAM(output);
3002
3003     if (!m_callback)
3004         return;
3005     
3006     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
3007     callOnMainThread([strongSelf] {
3008         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
3009             callback->flushCues();
3010     });
3011 }
3012 #endif
3013
3014 @end
3015
3016 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
3017 @implementation WebCoreAVFLoaderDelegate
3018
3019 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3020 {
3021     self = [super init];
3022     if (!self)
3023         return nil;
3024     m_callback = callback;
3025     return self;
3026 }
3027
3028 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
3029 {
3030     UNUSED_PARAM(resourceLoader);
3031     if (!m_callback)
3032         return NO;
3033
3034     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3035     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3036     callOnMainThread([strongSelf, strongRequest] {
3037         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3038         if (!callback) {
3039             [strongRequest finishLoadingWithError:nil];
3040             return;
3041         }
3042
3043         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
3044             [strongRequest finishLoadingWithError:nil];
3045     });
3046
3047     return YES;
3048 }
3049
3050 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
3051 {
3052     UNUSED_PARAM(resourceLoader);
3053     if (!m_callback)
3054         return NO;
3055
3056     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
3057         return NO;
3058
3059     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3060     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
3061     callOnMainThread([strongSelf, strongChallenge] {
3062         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3063         if (!callback) {
3064             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3065             return;
3066         }
3067
3068         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
3069             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
3070     });
3071
3072     return YES;
3073 }
3074
3075 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
3076 {
3077     UNUSED_PARAM(resourceLoader);
3078     if (!m_callback)
3079         return;
3080
3081     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
3082     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
3083     callOnMainThread([strongSelf, strongRequest] {
3084         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
3085         if (callback)
3086             callback->didCancelLoadingRequest(strongRequest.get());
3087     });
3088 }
3089
3090 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
3091 {
3092     m_callback = callback;
3093 }
3094 @end
3095 #endif
3096
3097 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
3098 @implementation WebCoreAVFPullDelegate
3099 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3100 {
3101     self = [super init];
3102     if (self)
3103         m_callback = callback;
3104     return self;
3105 }
3106
3107 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
3108 {
3109     m_callback = callback;
3110 }
3111
3112 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
3113 {
3114     if (m_callback)
3115         m_callback->outputMediaDataWillChange(output);
3116 }
3117
3118 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
3119 {
3120     UNUSED_PARAM(output);
3121     // No-op.
3122 }
3123 @end
3124 #endif
3125
3126 #endif