Unreviewed GTK gardening.
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioTrackPrivateAVFObjC.h"
33 #import "AuthenticationChallenge.h"
34 #import "BlockExceptions.h"
35 #import "CDMSessionAVFoundationObjC.h"
36 #import "ExceptionCodePlaceholder.h"
37 #import "FloatConversion.h"
38 #import "FloatConversion.h"
39 #import "FrameView.h"
40 #import "GraphicsContext.h"
41 #import "GraphicsContextCG.h"
42 #import "InbandMetadataTextTrackPrivateAVF.h"
43 #import "InbandTextTrackPrivateAVFObjC.h"
44 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
45 #import "OutOfBandTextTrackPrivateAVF.h"
46 #import "URL.h"
47 #import "Logging.h"
48 #import "MediaTimeMac.h"
49 #import "PlatformTimeRanges.h"
50 #import "SecurityOrigin.h"
51 #import "SerializedPlatformRepresentationMac.h"
52 #import "SoftLinking.h"
53 #import "TextTrackRepresentation.h"
54 #import "UUID.h"
55 #import "VideoTrackPrivateAVFObjC.h"
56 #import "WebCoreAVFResourceLoader.h"
57 #import "WebCoreSystemInterface.h"
58 #import <objc/runtime.h>
59 #import <runtime/DataView.h>
60 #import <runtime/JSCInlines.h>
61 #import <runtime/TypedArrayInlines.h>
62 #import <runtime/Uint16Array.h>
63 #import <runtime/Uint32Array.h>
64 #import <runtime/Uint8Array.h>
65 #import <wtf/CurrentTime.h>
66 #import <wtf/Functional.h>
67 #import <wtf/NeverDestroyed.h>
68 #import <wtf/text/CString.h>
69 #import <wtf/text/StringBuilder.h>
70
71 #if ENABLE(AVF_CAPTIONS)
72 #include "TextTrack.h"
73 #endif
74
75 #import <Foundation/NSGeometry.h>
76 #import <AVFoundation/AVFoundation.h>
77 #if PLATFORM(IOS)
78 #import <CoreImage/CoreImage.h>
79 #else
80 #import <QuartzCore/CoreImage.h>
81 #endif
82 #import <CoreMedia/CoreMedia.h>
83
84 #if USE(VIDEOTOOLBOX)
85 #import <CoreVideo/CoreVideo.h>
86 #import <VideoToolbox/VideoToolbox.h>
87 #endif
88
89 #if ENABLE(AVF_CAPTIONS)
90 // Note: This must be defined before our SOFT_LINK macros:
91 @class AVMediaSelectionOption;
92 @interface AVMediaSelectionOption (OutOfBandExtensions)
93 @property (nonatomic, readonly) NSString* outOfBandSource;
94 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
95 @end
96 #endif
97
98 #if PLATFORM(IOS)
99 @class AVPlayerItem;
100 @interface AVPlayerItem (WebKitExtensions)
101 @property (nonatomic, copy) NSString* dataYouTubeID;
102 @end
103 #endif
104
105 typedef AVPlayerItem AVPlayerItemType;
106 typedef AVMetadataItem AVMetadataItemType;
107
108 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
109 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
110 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
111 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
112
113 #if USE(VIDEOTOOLBOX)
114 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
115 #endif
116
117 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
118 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
119 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
120 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
121
122 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
123 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
124 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
125 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
126 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
127 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
128 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
129 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
130
131 #if USE(VIDEOTOOLBOX)
132 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
133 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
134 #endif
135
136 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
137 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
138 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
139 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
140 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
141 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
142 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
143
144 SOFT_LINK_CLASS(CoreImage, CIContext)
145 SOFT_LINK_CLASS(CoreImage, CIImage)
146
147 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
148 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
149 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
150 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
151 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
152 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
153 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
154 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
155 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
156 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
157 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
158 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
159 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
160 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
161
162 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
163
164 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
165
166 #define AVPlayer getAVPlayerClass()
167 #define AVPlayerItem getAVPlayerItemClass()
168 #define AVPlayerLayer getAVPlayerLayerClass()
169 #define AVURLAsset getAVURLAssetClass()
170 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
171 #define AVMetadataItem getAVMetadataItemClass()
172
173 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
174 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
175 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
176 #define AVMediaTypeVideo getAVMediaTypeVideo()
177 #define AVMediaTypeAudio getAVMediaTypeAudio()
178 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
179 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
180 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
181 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
182 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
183 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
184 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
185 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
186 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
187 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
188
189 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
190 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
191 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
192
193 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
194 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
195 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
196
197 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
198 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
199 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
200 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
201
202 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
203 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
204 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
205 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
206 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
207 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
208 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
209 #endif
210
211 #if ENABLE(AVF_CAPTIONS)
212 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
213 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
214 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
215 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
216 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
217 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
218 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
219 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
220 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
221
222 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
223 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
224 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
225 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
226 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
227 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
228 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
229 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
230 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
231 #endif
232
233 #if ENABLE(DATACUE_VALUE)
234 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
235 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
236 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
237 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
238 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
239
240 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
241 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
242 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
243 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
244 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
245 #endif
246
247 #if PLATFORM(IOS)
248 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
249
250 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
251 #endif
252
253 #define kCMTimeZero getkCMTimeZero()
254
255 using namespace WebCore;
256
257 enum MediaPlayerAVFoundationObservationContext {
258     MediaPlayerAVFoundationObservationContextPlayerItem,
259     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
260     MediaPlayerAVFoundationObservationContextPlayer,
261     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
262 };
263
264 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
265 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
266 #else
267 @interface WebCoreAVFMovieObserver : NSObject
268 #endif
269 {
270     MediaPlayerPrivateAVFoundationObjC* m_callback;
271     int m_delayCallbacks;
272 }
273 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
274 -(void)disconnect;
275 -(void)metadataLoaded;
276 -(void)didEnd:(NSNotification *)notification;
277 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
278 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
279 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
280 - (void)outputSequenceWasFlushed:(id)output;
281 #endif
282 @end
283
284 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
285 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
286     MediaPlayerPrivateAVFoundationObjC* m_callback;
287 }
288 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
289 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
290 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
291 @end
292 #endif
293
294 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
295 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
296     MediaPlayerPrivateAVFoundationObjC *m_callback;
297     dispatch_semaphore_t m_semaphore;
298 }
299 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
300 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
301 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
302 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
303 @end
304 #endif
305
306 namespace WebCore {
307
308 static NSArray *assetMetadataKeyNames();
309 static NSArray *itemKVOProperties();
310 static NSArray* assetTrackMetadataKeyNames();
311
312 #if !LOG_DISABLED
313 static const char *boolString(bool val)
314 {
315     return val ? "true" : "false";
316 }
317 #endif
318
319 #if ENABLE(ENCRYPTED_MEDIA_V2)
320 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
321 static PlayerToPrivateMapType& playerToPrivateMap()
322 {
323     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
324     return map;
325 };
326 #endif
327
328 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
329 static dispatch_queue_t globalLoaderDelegateQueue()
330 {
331     static dispatch_queue_t globalQueue;
332     static dispatch_once_t onceToken;
333     dispatch_once(&onceToken, ^{
334         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
335     });
336     return globalQueue;
337 }
338 #endif
339
340 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
341 static dispatch_queue_t globalPullDelegateQueue()
342 {
343     static dispatch_queue_t globalQueue;
344     static dispatch_once_t onceToken;
345     dispatch_once(&onceToken, ^{
346         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
347     });
348     return globalQueue;
349 }
350 #endif
351
352 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
353
354     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
355 }
356
357 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
358 {
359     if (isAvailable())
360         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
361 }
362
363 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
364     : MediaPlayerPrivateAVFoundation(player)
365     , m_weakPtrFactory(this)
366 #if PLATFORM(IOS)
367     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
368 #endif
369     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
370     , m_videoFrameHasDrawn(false)
371     , m_haveCheckedPlayability(false)
372 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
373     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
374     , m_videoOutputSemaphore(nullptr)
375 #endif
376 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
377     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
378 #endif
379     , m_currentTextTrack(0)
380     , m_cachedDuration(MediaPlayer::invalidTime())
381     , m_cachedRate(0)
382     , m_cachedTotalBytes(0)
383     , m_pendingStatusChanges(0)
384     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
385     , m_cachedLikelyToKeepUp(false)
386     , m_cachedBufferEmpty(false)
387     , m_cachedBufferFull(false)
388     , m_cachedHasEnabledAudio(false)
389     , m_shouldBufferData(true)
390     , m_cachedIsReadyForDisplay(false)
391 #if ENABLE(IOS_AIRPLAY)
392     , m_allowsWirelessVideoPlayback(true)
393 #endif
394 {
395 #if ENABLE(ENCRYPTED_MEDIA_V2)
396     playerToPrivateMap().set(player, this);
397 #endif
398 }
399
400 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
401 {
402 #if ENABLE(ENCRYPTED_MEDIA_V2)
403     playerToPrivateMap().remove(player());
404 #endif
405 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
406     [m_loaderDelegate.get() setCallback:0];
407     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
408
409     for (auto& pair : m_resourceLoaderMap)
410         pair.value->invalidate();
411 #endif
412 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
413     [m_videoOutputDelegate setCallback:0];
414     [m_videoOutput setDelegate:nil queue:0];
415     if (m_videoOutputSemaphore)
416         dispatch_release(m_videoOutputSemaphore);
417 #endif
418     cancelLoad();
419 }
420
421 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
422 {
423     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
424     tearDownVideoRendering();
425
426     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
427     [m_objcObserver.get() disconnect];
428
429     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
430     setIgnoreLoadStateChanges(true);
431     if (m_avAsset) {
432         [m_avAsset.get() cancelLoading];
433         m_avAsset = nil;
434     }
435
436     clearTextTracks();
437
438 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
439     if (m_legibleOutput) {
440         if (m_avPlayerItem)
441             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
442         m_legibleOutput = nil;
443     }
444 #endif
445
446     if (m_avPlayerItem) {
447         for (NSString *keyName in itemKVOProperties())
448             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
449         
450         m_avPlayerItem = nil;
451     }
452     if (m_avPlayer) {
453         if (m_timeObserver)
454             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
455         m_timeObserver = nil;
456         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
457 #if ENABLE(IOS_AIRPLAY)
458         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
459 #endif
460         m_avPlayer = nil;
461     }
462
463     // Reset cached properties
464     m_pendingStatusChanges = 0;
465     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
466     m_cachedSeekableRanges = nullptr;
467     m_cachedLoadedRanges = nullptr;
468     m_cachedHasEnabledAudio = false;
469     m_cachedPresentationSize = FloatSize();
470     m_cachedDuration = 0;
471
472     for (AVPlayerItemTrack *track in m_cachedTracks.get())
473         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
474     m_cachedTracks = nullptr;
475
476     setIgnoreLoadStateChanges(false);
477 }
478
479 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
480 {
481     return m_videoLayer;
482 }
483
484 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
485 {
486 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
487     if (m_videoOutput)
488         return true;
489 #endif
490     return m_imageGenerator;
491 }
492
493 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
494 {
495 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
496     createVideoOutput();
497 #else
498     createImageGenerator();
499 #endif
500 }
501
502 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
503 {
504     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
505
506     if (!m_avAsset || m_imageGenerator)
507         return;
508
509     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
510
511     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
512     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
513     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
514     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
515
516     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
517 }
518
519 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
520 {
521 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
522     destroyVideoOutput();
523 #endif
524     destroyImageGenerator();
525 }
526
527 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
528 {
529     if (!m_imageGenerator)
530         return;
531
532     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
533
534     m_imageGenerator = 0;
535 }
536
537 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
538 {
539     if (!m_avPlayer || m_videoLayer)
540         return;
541
542     auto weakThis = createWeakPtr();
543     callOnMainThread([this, weakThis] {
544         if (!weakThis)
545             return;
546
547         if (!m_avPlayer || m_videoLayer)
548             return;
549
550         m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
551         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
552         [m_videoLayer.get() setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
553 #ifndef NDEBUG
554         [m_videoLayer.get() setName:@"MediaPlayerPrivate AVPlayerLayer"];
555 #endif
556         [m_videoLayer.get() addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
557         updateVideoLayerGravity();
558         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
559
560 #if PLATFORM(IOS)
561         if (m_videoFullscreenLayer) {
562             [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
563             [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
564         }
565 #endif
566         player()->mediaPlayerClient()->mediaPlayerRenderingModeChanged(player());
567     });
568 }
569
570 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
571 {
572     if (!m_videoLayer)
573         return;
574
575     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
576
577     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
578     [m_videoLayer.get() setPlayer:nil];
579
580 #if PLATFORM(IOS)
581     if (m_videoFullscreenLayer)
582         [m_videoLayer removeFromSuperlayer];
583 #endif
584
585     m_videoLayer = 0;
586 }
587
588 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
589 {
590     if (currentRenderingMode() == MediaRenderingToLayer)
591         return m_cachedIsReadyForDisplay;
592
593     return m_videoFrameHasDrawn;
594 }
595
596 #if ENABLE(AVF_CAPTIONS)
597 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
598 {
599     // FIXME: Match these to correct types:
600     if (kind == PlatformTextTrack::Caption)
601         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
602
603     if (kind == PlatformTextTrack::Subtitle)
604         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
605
606     if (kind == PlatformTextTrack::Description)
607         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
608
609     if (kind == PlatformTextTrack::Forced)
610         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
611
612     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
613 }
614     
615 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
616 {
617     trackModeChanged();
618 }
619     
620 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
621 {
622     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
623     
624     for (auto& textTrack : m_textTracks) {
625         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
626             continue;
627         
628         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
629         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
630         
631         for (auto& track : outOfBandTrackSources) {
632             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
633             
634             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
635                 continue;
636             
637             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
638             if (track->mode() == PlatformTextTrack::Hidden)
639                 mode = InbandTextTrackPrivate::Hidden;
640             else if (track->mode() == PlatformTextTrack::Disabled)
641                 mode = InbandTextTrackPrivate::Disabled;
642             else if (track->mode() == PlatformTextTrack::Showing)
643                 mode = InbandTextTrackPrivate::Showing;
644             
645             textTrack->setMode(mode);
646             break;
647         }
648     }
649 }
650 #endif
651
652 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
653 {
654     if (m_avAsset)
655         return;
656
657     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p)", this);
658
659     setDelayCallbacks(true);
660
661     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
662
663     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
664
665     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
666
667     String referrer = player()->referrer();
668     if (!referrer.isEmpty())
669         [headerFields.get() setObject:referrer forKey:@"Referer"];
670
671     String userAgent = player()->userAgent();
672     if (!userAgent.isEmpty())
673         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
674
675     if ([headerFields.get() count])
676         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
677
678     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
679         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
680
681     String identifier = player()->sourceApplicationIdentifier();
682     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
683         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
684
685 #if ENABLE(AVF_CAPTIONS)
686     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
687     if (!outOfBandTrackSources.isEmpty()) {
688         NSMutableArray* outOfBandTracks = [[NSMutableArray alloc] init];
689         for (auto& trackSource : outOfBandTrackSources) {
690             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
691             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
692             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
693             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
694             [outOfBandTracks addObject:
695                 [NSDictionary dictionaryWithObjectsAndKeys:
696                     reinterpret_cast<const NSString*>(label.get()), AVOutOfBandAlternateTrackDisplayNameKey,
697                     reinterpret_cast<const NSString*>(language.get()), AVOutOfBandAlternateTrackExtendedLanguageTagKey,
698                     [NSNumber numberWithBool: (trackSource->isDefault() ? YES : NO)], AVOutOfBandAlternateTrackIsDefaultKey,
699                     reinterpret_cast<const NSString*>(uniqueID.get()), AVOutOfBandAlternateTrackIdentifierKey,
700                     reinterpret_cast<const NSString*>(url.get()), AVOutOfBandAlternateTrackSourceKey,
701                     mediaDescriptionForKind(trackSource->kind()), AVOutOfBandAlternateTrackMediaCharactersticsKey,
702                     nil]];
703         }
704
705         [options.get() setObject: outOfBandTracks forKey: AVURLAssetOutOfBandAlternateTracksKey];
706     }
707 #endif
708
709 #if PLATFORM(IOS)
710     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
711     if (!networkInterfaceName.isEmpty())
712         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
713 #endif
714
715     NSURL *cocoaURL = URL(ParsedURLString, url);
716     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
717
718 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
719     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
720 #endif
721
722     m_haveCheckedPlayability = false;
723
724     setDelayCallbacks(false);
725 }
726
727 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
728 {
729     if (m_avPlayer)
730         return;
731
732     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
733
734     setDelayCallbacks(true);
735
736     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
737     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
738 #if ENABLE(IOS_AIRPLAY)
739     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
740     updateDisableExternalPlayback();
741 #endif
742
743 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
744     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
745 #endif
746
747 #if ENABLE(IOS_AIRPLAY)
748     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
749 #endif
750
751     if (m_avPlayerItem)
752         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
753
754     setDelayCallbacks(false);
755 }
756
757 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
758 {
759     if (m_avPlayerItem)
760         return;
761
762     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
763
764     setDelayCallbacks(true);
765
766     // Create the player item so we can load media data. 
767     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
768
769     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
770
771     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
772     for (NSString *keyName in itemKVOProperties())
773         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
774
775     if (m_avPlayer)
776         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
777
778 #if PLATFORM(IOS)
779     AtomicString value;
780     if (player()->doesHaveAttribute("data-youtube-id", &value))
781         [m_avPlayerItem.get() setDataYouTubeID: value];
782  #endif
783
784 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
785     const NSTimeInterval legibleOutputAdvanceInterval = 2;
786
787     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:[NSArray array]]);
788     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
789
790     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
791     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
792     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
793     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
794 #endif
795
796     setDelayCallbacks(false);
797 }
798
799 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
800 {
801     if (m_haveCheckedPlayability)
802         return;
803     m_haveCheckedPlayability = true;
804
805     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
806     auto weakThis = createWeakPtr();
807
808     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
809         callOnMainThread([weakThis] {
810             if (weakThis)
811                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
812         });
813     }];
814 }
815
816 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
817 {
818     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
819
820     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
821     dispatch_group_enter(metadataLoadingGroup);
822     auto weakThis = createWeakPtr();
823     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
824
825         callOnMainThread([weakThis, metadataLoadingGroup] {
826             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
827                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
828                     dispatch_group_enter(metadataLoadingGroup);
829                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
830                         dispatch_group_leave(metadataLoadingGroup);
831                     }];
832                 }
833             }
834             dispatch_group_leave(metadataLoadingGroup);
835         });
836     }];
837
838     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
839         callOnMainThread([weakThis] {
840             if (weakThis)
841                 [weakThis->m_objcObserver.get() metadataLoaded];
842         });
843
844         dispatch_release(metadataLoadingGroup);
845     });
846 }
847
848 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
849 {
850     if (!m_avPlayerItem)
851         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
852
853     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
854         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
855     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
856         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
857     if (m_cachedLikelyToKeepUp)
858         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
859     if (m_cachedBufferFull)
860         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
861     if (m_cachedBufferEmpty)
862         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
863
864     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
865 }
866
867 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
868 {
869     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
870     PlatformMedia pm;
871     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
872     pm.media.avfMediaPlayer = m_avPlayer.get();
873     return pm;
874 }
875
876 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
877 {
878     return m_videoLayer.get();
879 }
880
881 #if PLATFORM(IOS)
882 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
883 {
884     if (m_videoFullscreenLayer == videoFullscreenLayer)
885         return;
886
887     if (m_videoFullscreenLayer)
888        [m_videoLayer removeFromSuperlayer];
889
890     m_videoFullscreenLayer = videoFullscreenLayer;
891
892     if (m_videoFullscreenLayer && m_videoLayer) {
893         CGRect frame = CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
894         [m_videoLayer setFrame:frame];
895         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
896     }
897
898     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
899         syncTextTrackBounds();
900         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
901     }
902 #if ENABLE(IOS_AIRPLAY)
903     updateDisableExternalPlayback();
904 #endif
905 }
906
907 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
908 {
909     m_videoFullscreenFrame = frame;
910     if (!m_videoFullscreenLayer)
911         return;
912
913     if (m_videoLayer)
914         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
915
916     syncTextTrackBounds();
917 }
918
919 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
920 {
921     m_videoFullscreenGravity = gravity;
922     if (!m_videoLayer)
923         return;
924
925     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
926     if (gravity == MediaPlayer::VideoGravityResize)
927         videoGravity = AVLayerVideoGravityResize;
928     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
929         videoGravity = AVLayerVideoGravityResizeAspect;
930     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
931         videoGravity = AVLayerVideoGravityResizeAspectFill;
932     else
933         ASSERT_NOT_REACHED();
934
935     [m_videoLayer setVideoGravity:videoGravity];
936 }
937
938 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
939 {
940     if (m_currentMetaData)
941         return m_currentMetaData.get();
942     return nil;
943 }
944
945 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
946 {
947     if (!m_avPlayerItem)
948         return emptyString();
949     
950     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
951     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
952
953     return logString.get();
954 }
955
956 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
957 {
958     if (!m_avPlayerItem)
959         return emptyString();
960
961     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
962     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
963
964     return logString.get();
965 }
966 #endif
967
968 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
969 {
970     [CATransaction begin];
971     [CATransaction setDisableActions:YES];    
972     if (m_videoLayer)
973         [m_videoLayer.get() setHidden:!isVisible];
974     [CATransaction commit];
975 }
976     
977 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
978 {
979     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
980     if (!metaDataAvailable())
981         return;
982
983     setDelayCallbacks(true);
984     m_cachedRate = requestedRate();
985     [m_avPlayer.get() setRate:requestedRate()];
986     setDelayCallbacks(false);
987 }
988
989 void MediaPlayerPrivateAVFoundationObjC::platformPause()
990 {
991     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
992     if (!metaDataAvailable())
993         return;
994
995     setDelayCallbacks(true);
996     m_cachedRate = 0;
997     [m_avPlayer.get() setRate:0];
998     setDelayCallbacks(false);
999 }
1000
1001 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1002 {
1003     // Do not ask the asset for duration before it has been loaded or it will fetch the
1004     // answer synchronously.
1005     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1006          return MediaPlayer::invalidTime();
1007     
1008     CMTime cmDuration;
1009     
1010     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1011     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1012         cmDuration = [m_avPlayerItem.get() duration];
1013     else
1014         cmDuration= [m_avAsset.get() duration];
1015
1016     if (CMTIME_IS_NUMERIC(cmDuration))
1017         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
1018
1019     if (CMTIME_IS_INDEFINITE(cmDuration)) {
1020         return std::numeric_limits<float>::infinity();
1021     }
1022
1023     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
1024     return MediaPlayer::invalidTime();
1025 }
1026
1027 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
1028 {
1029     if (!metaDataAvailable() || !m_avPlayerItem)
1030         return 0;
1031
1032     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1033     if (CMTIME_IS_NUMERIC(itemTime))
1034         return std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
1035
1036     return 0;
1037 }
1038
1039 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negativeTolerance, double positiveTolerance)
1040 {
1041     // setCurrentTime generates several event callbacks, update afterwards.
1042     setDelayCallbacks(true);
1043
1044     if (m_metadataTrack)
1045         m_metadataTrack->flushPartialCues();
1046
1047     CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
1048     CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
1049     CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
1050
1051     auto weakThis = createWeakPtr();
1052
1053     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1054         callOnMainThread([weakThis, finished] {
1055             auto _this = weakThis.get();
1056             if (!_this)
1057                 return;
1058
1059             _this->seekCompleted(finished);
1060         });
1061     }];
1062
1063     setDelayCallbacks(false);
1064 }
1065
1066 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1067 {
1068     if (!metaDataAvailable())
1069         return;
1070
1071     [m_avPlayer.get() setVolume:volume];
1072 }
1073
1074 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1075 {
1076     UNUSED_PARAM(closedCaptionsVisible);
1077
1078     if (!metaDataAvailable())
1079         return;
1080
1081     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1082 }
1083
1084 void MediaPlayerPrivateAVFoundationObjC::updateRate()
1085 {
1086     setDelayCallbacks(true);
1087     m_cachedRate = requestedRate();
1088     [m_avPlayer.get() setRate:requestedRate()];
1089     setDelayCallbacks(false);
1090 }
1091
1092 float MediaPlayerPrivateAVFoundationObjC::rate() const
1093 {
1094     if (!metaDataAvailable())
1095         return 0;
1096
1097     return m_cachedRate;
1098 }
1099
1100 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1101 {
1102     auto timeRanges = PlatformTimeRanges::create();
1103
1104     if (!m_avPlayerItem)
1105         return timeRanges;
1106
1107     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1108         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1109         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1110             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1111     }
1112     return timeRanges;
1113 }
1114
1115 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1116 {
1117     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1118         return 0;
1119
1120     double minTimeSeekable = std::numeric_limits<double>::infinity();
1121     bool hasValidRange = false;
1122     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1123         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1124         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1125             continue;
1126
1127         hasValidRange = true;
1128         double startOfRange = CMTimeGetSeconds(timeRange.start);
1129         if (minTimeSeekable > startOfRange)
1130             minTimeSeekable = startOfRange;
1131     }
1132     return hasValidRange ? minTimeSeekable : 0;
1133 }
1134
1135 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1136 {
1137     if (!m_cachedSeekableRanges)
1138         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1139
1140     double maxTimeSeekable = 0;
1141     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1142         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1143         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1144             continue;
1145         
1146         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
1147         if (maxTimeSeekable < endOfRange)
1148             maxTimeSeekable = endOfRange;
1149     }
1150     return maxTimeSeekable;
1151 }
1152
1153 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1154 {
1155 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
1156     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
1157     // when loadedTimeRanges changes when there is no video output. In that case
1158     // update the cached value explicitly.
1159     if (!hasLayerRenderer() && !hasContextRenderer())
1160         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
1161 #endif
1162
1163     if (!m_cachedLoadedRanges)
1164         return 0;
1165
1166     float maxTimeLoaded = 0;
1167     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1168         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1169         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1170             continue;
1171         
1172         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
1173         if (maxTimeLoaded < endOfRange)
1174             maxTimeLoaded = endOfRange;
1175     }
1176
1177     return maxTimeLoaded;   
1178 }
1179
1180 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1181 {
1182     if (!metaDataAvailable())
1183         return 0;
1184
1185     if (m_cachedTotalBytes)
1186         return m_cachedTotalBytes;
1187
1188     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1189         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1190
1191     return m_cachedTotalBytes;
1192 }
1193
1194 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1195 {
1196     m_avAsset = asset;
1197 }
1198
1199 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1200 {
1201     if (!m_avAsset)
1202         return MediaPlayerAVAssetStatusDoesNotExist;
1203
1204     for (NSString *keyName in assetMetadataKeyNames()) {
1205         NSError *error = nil;
1206         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1207 #if !LOG_DISABLED
1208         if (error)
1209             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1210 #endif
1211
1212         if (keyStatus < AVKeyValueStatusLoaded)
1213             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1214         
1215         if (keyStatus == AVKeyValueStatusFailed)
1216             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1217
1218         if (keyStatus == AVKeyValueStatusCancelled)
1219             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1220     }
1221
1222     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1223         return MediaPlayerAVAssetStatusPlayable;
1224
1225     return MediaPlayerAVAssetStatusLoaded;
1226 }
1227
1228 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1229 {
1230     if (!metaDataAvailable() || context->paintingDisabled())
1231         return;
1232
1233     setDelayCallbacks(true);
1234     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1235
1236 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1237     if (videoOutputHasAvailableFrame())
1238         paintWithVideoOutput(context, rect);
1239     else
1240 #endif
1241         paintWithImageGenerator(context, rect);
1242
1243     END_BLOCK_OBJC_EXCEPTIONS;
1244     setDelayCallbacks(false);
1245
1246     m_videoFrameHasDrawn = true;
1247 }
1248
1249 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1250 {
1251     if (!metaDataAvailable() || context->paintingDisabled())
1252         return;
1253
1254     // We can ignore the request if we are already rendering to a layer.
1255     if (currentRenderingMode() == MediaRenderingToLayer)
1256         return;
1257
1258     // paint() is best effort, so only paint if we already have an image generator or video output available.
1259     if (!hasContextRenderer())
1260         return;
1261
1262     paintCurrentFrameInContext(context, rect);
1263 }
1264
1265 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1266 {
1267     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1268     if (image) {
1269         GraphicsContextStateSaver stateSaver(*context);
1270         context->translate(rect.x(), rect.y() + rect.height());
1271         context->scale(FloatSize(1.0f, -1.0f));
1272         context->setImageInterpolationQuality(InterpolationLow);
1273         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1274         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1275         image = 0;
1276     }
1277 }
1278
1279 static HashSet<String> mimeTypeCache()
1280 {
1281     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1282     static bool typeListInitialized = false;
1283
1284     if (typeListInitialized)
1285         return cache;
1286     typeListInitialized = true;
1287
1288     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1289     for (NSString *mimeType in types)
1290         cache.add(mimeType);
1291
1292     return cache;
1293
1294
1295 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1296 {
1297     if (!m_imageGenerator)
1298         createImageGenerator();
1299     ASSERT(m_imageGenerator);
1300
1301 #if !LOG_DISABLED
1302     double start = monotonicallyIncreasingTime();
1303 #endif
1304
1305     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1306     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1307     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1308
1309 #if !LOG_DISABLED
1310     double duration = monotonicallyIncreasingTime() - start;
1311     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1312 #endif
1313
1314     return image;
1315 }
1316
1317 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1318 {
1319     supportedTypes = mimeTypeCache();
1320
1321
1322 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1323 static bool keySystemIsSupported(const String& keySystem)
1324 {
1325     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1326         return true;
1327     return false;
1328 }
1329 #endif
1330
1331 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1332 {
1333 #if ENABLE(ENCRYPTED_MEDIA)
1334     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1335     // In addition to the steps in the current specification, this method must run the following steps:
1336
1337     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1338     //    If keySystem is null, continue to the next step.
1339     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1340         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1341         if (!keySystemIsSupported(parameters.keySystem))
1342             return MediaPlayer::IsNotSupported;
1343
1344         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1345         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1346     }
1347
1348     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1349 #endif
1350
1351 #if ENABLE(MEDIA_SOURCE)
1352     if (parameters.isMediaSource)
1353         return MediaPlayer::IsNotSupported;
1354 #endif
1355
1356     if (!mimeTypeCache().contains(parameters.type))
1357         return MediaPlayer::IsNotSupported;
1358
1359     // The spec says:
1360     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1361     if (parameters.codecs.isEmpty())
1362         return MediaPlayer::MayBeSupported;
1363
1364     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1365     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1366 }
1367
1368 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1369 {
1370 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1371     if (!keySystem.isEmpty()) {
1372         if (!keySystemIsSupported(keySystem))
1373             return false;
1374
1375         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1376             return false;
1377
1378         return true;
1379     }
1380 #else
1381     UNUSED_PARAM(keySystem);
1382     UNUSED_PARAM(mimeType);
1383 #endif
1384     return false;
1385 }
1386
1387 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1388 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1389 {
1390     String scheme = [[[avRequest request] URL] scheme];
1391     String keyURI = [[[avRequest request] URL] absoluteString];
1392
1393 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1394     if (scheme == "skd") {
1395         // Create an initData with the following layout:
1396         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1397         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1398         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1399         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1400         initDataView->set<uint32_t>(0, keyURISize, true);
1401
1402         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1403         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1404
1405 #if ENABLE(ENCRYPTED_MEDIA)
1406         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1407 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1408         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1409         if (!player()->keyNeeded(initData.get()))
1410 #endif
1411             return false;
1412
1413         m_keyURIToRequestMap.set(keyURI, avRequest);
1414         return true;
1415     }
1416 #endif
1417
1418     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1419     m_resourceLoaderMap.add(avRequest, resourceLoader);
1420     resourceLoader->startLoading();
1421     return true;
1422 }
1423
1424 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1425 {
1426 #if USE(CFNETWORK)
1427     UNUSED_PARAM(nsChallenge);
1428     // FIXME: <rdar://problem/15799844>
1429     return false;
1430 #else
1431     AuthenticationChallenge challenge(nsChallenge);
1432
1433     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1434 #endif
1435 }
1436
1437 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1438 {
1439     String scheme = [[[avRequest request] URL] scheme];
1440
1441     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1442
1443     if (resourceLoader)
1444         resourceLoader->stopLoading();
1445 }
1446
1447 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1448 {
1449     m_resourceLoaderMap.remove(avRequest);
1450 }
1451 #endif
1452
1453 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1454 {
1455     return AVFoundationLibrary() && CoreMediaLibrary();
1456 }
1457
1458 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
1459 {
1460     if (!metaDataAvailable())
1461         return timeValue;
1462
1463     // FIXME - impossible to implement until rdar://8721510 is fixed.
1464     return timeValue;
1465 }
1466
1467 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1468 {
1469     if (!m_videoLayer)
1470         return;
1471
1472     [CATransaction begin];
1473     [CATransaction setDisableActions:YES];    
1474     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1475     [m_videoLayer.get() setVideoGravity:gravity];
1476     [CATransaction commit];
1477 }
1478
1479 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1480 {
1481     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1482         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1483     }];
1484     if (index == NSNotFound)
1485         return nil;
1486     return [tracks objectAtIndex:index];
1487 }
1488
1489 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1490 {
1491     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1492     m_languageOfPrimaryAudioTrack = String();
1493
1494     if (!m_avAsset)
1495         return;
1496
1497     setDelayCharacteristicsChangedNotification(true);
1498
1499     bool haveCCTrack = false;
1500     bool hasCaptions = false;
1501
1502     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1503     // asked about those fairly fequently.
1504     if (!m_avPlayerItem) {
1505         // We don't have a player item yet, so check with the asset because some assets support inspection
1506         // prior to becoming ready to play.
1507         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1508         setHasVideo(firstEnabledVideoTrack);
1509         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1510 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1511         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1512 #endif
1513
1514         presentationSizeDidChange(firstEnabledVideoTrack ? IntSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : IntSize());
1515     } else {
1516         bool hasVideo = false;
1517         bool hasAudio = false;
1518         bool hasMetaData = false;
1519         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1520             if ([track isEnabled]) {
1521                 AVAssetTrack *assetTrack = [track assetTrack];
1522                 NSString *mediaType = [assetTrack mediaType];
1523                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1524                     hasVideo = true;
1525                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1526                     hasAudio = true;
1527                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1528 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1529                     hasCaptions = true;
1530 #endif
1531                     haveCCTrack = true;
1532                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1533                     hasMetaData = true;
1534                 }
1535             }
1536         }
1537
1538         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1539         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1540         // when it is not.
1541         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1542
1543         setHasAudio(hasAudio);
1544 #if ENABLE(DATACUE_VALUE)
1545         if (hasMetaData)
1546             processMetadataTrack();
1547 #endif
1548
1549 #if ENABLE(VIDEO_TRACK)
1550         updateAudioTracks();
1551         updateVideoTracks();
1552 #endif
1553     }
1554
1555 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1556     if (AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia()) {
1557         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1558         if (hasCaptions)
1559             processMediaSelectionOptions();
1560     }
1561 #endif
1562
1563 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1564     if (!hasCaptions && haveCCTrack)
1565         processLegacyClosedCaptionsTracks();
1566 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1567     if (haveCCTrack)
1568         processLegacyClosedCaptionsTracks();
1569 #endif
1570
1571     setHasClosedCaptions(hasCaptions);
1572
1573     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1574         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1575
1576     sizeChanged();
1577
1578     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1579         characteristicsChanged();
1580
1581     setDelayCharacteristicsChangedNotification(false);
1582 }
1583
1584 #if ENABLE(VIDEO_TRACK)
1585 template <typename RefT, typename PassRefT>
1586 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1587 {
1588     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1589         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1590     }]]]);
1591     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1592
1593     typedef Vector<RefT> ItemVector;
1594     for (auto i = oldItems.begin(); i != oldItems.end(); ++i)
1595         [oldTracks addObject:(*i)->playerItemTrack()];
1596
1597     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1598     [removedTracks minusSet:newTracks.get()];
1599
1600     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1601     [addedTracks minusSet:oldTracks.get()];
1602
1603     ItemVector replacementItems;
1604     ItemVector addedItems;
1605     ItemVector removedItems;
1606     for (auto i = oldItems.begin(); i != oldItems.end(); ++i) {
1607         if ([removedTracks containsObject:(*i)->playerItemTrack()])
1608             removedItems.append(*i);
1609         else
1610             replacementItems.append(*i);
1611     }
1612
1613     for (AVPlayerItemTrack* track in addedTracks.get())
1614         addedItems.append(itemFactory(track));
1615
1616     replacementItems.appendVector(addedItems);
1617     oldItems.swap(replacementItems);
1618
1619     for (auto i = removedItems.begin(); i != removedItems.end(); ++i)
1620         (player->*removedFunction)(*i);
1621
1622     for (auto i = addedItems.begin(); i != addedItems.end(); ++i)
1623         (player->*addedFunction)(*i);
1624 }
1625
1626 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1627 {
1628 #if !LOG_DISABLED
1629     size_t count = m_audioTracks.size();
1630 #endif
1631
1632     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1633
1634 #if !LOG_DISABLED
1635     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
1636 #endif
1637 }
1638
1639 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1640 {
1641 #if !LOG_DISABLED
1642     size_t count = m_videoTracks.size();
1643 #endif
1644
1645     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1646
1647 #if !LOG_DISABLED
1648     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
1649 #endif
1650 }
1651
1652 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
1653 {
1654 #if PLATFORM(IOS)
1655     if (m_videoFullscreenLayer)
1656         return true;
1657 #endif
1658     return false;
1659 }
1660
1661 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
1662 {
1663 #if PLATFORM(IOS)
1664     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
1665         return;
1666     
1667     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
1668     [m_textTrackRepresentationLayer setFrame:textFrame];
1669 #endif
1670 }
1671
1672 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1673 {
1674 #if PLATFORM(IOS)
1675     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
1676     if (representationLayer == m_textTrackRepresentationLayer) {
1677         syncTextTrackBounds();
1678         return;
1679     }
1680
1681     if (m_textTrackRepresentationLayer)
1682         [m_textTrackRepresentationLayer removeFromSuperlayer];
1683
1684     m_textTrackRepresentationLayer = representationLayer;
1685
1686     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1687         syncTextTrackBounds();
1688         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1689     }
1690
1691 #else
1692     UNUSED_PARAM(representation);
1693 #endif
1694 }
1695 #endif // ENABLE(VIDEO_TRACK)
1696
1697 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1698 {
1699     if (!m_avAsset)
1700         return;
1701
1702     setNaturalSize(roundedIntSize(m_cachedPresentationSize));
1703 }
1704
1705 #if PLATFORM(IOS)
1706 // FIXME: Implement for iOS in WebKit System Interface.
1707 static inline NSURL *wkAVAssetResolvedURL(AVAsset*)
1708 {
1709     return nil;
1710 }
1711 #endif
1712
1713 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1714 {
1715     if (!m_avAsset)
1716         return false;
1717     
1718     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL(wkAVAssetResolvedURL(m_avAsset.get())));
1719     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1720     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1721 }
1722
1723 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1724 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1725 {
1726     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1727
1728     if (!m_avPlayerItem || m_videoOutput)
1729         return;
1730
1731 #if USE(VIDEOTOOLBOX)
1732     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1733 #else
1734     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
1735                                 nil];
1736 #endif
1737     m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
1738     ASSERT(m_videoOutput);
1739
1740     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
1741
1742     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1743
1744     waitForVideoOutputMediaDataWillChange();
1745
1746     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1747 }
1748
1749 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1750 {
1751     if (!m_videoOutput)
1752         return;
1753
1754     if (m_avPlayerItem)
1755         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1756     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1757
1758     m_videoOutput = 0;
1759 }
1760
1761 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1762 {
1763     if (!m_videoOutput)
1764         createVideoOutput();
1765     ASSERT(m_videoOutput);
1766
1767 #if !LOG_DISABLED
1768     double start = monotonicallyIncreasingTime();
1769 #endif
1770
1771     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1772
1773     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1774         return 0;
1775
1776     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1777     if (!buffer)
1778         return 0;
1779
1780 #if USE(VIDEOTOOLBOX)
1781     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1782     if (!m_pixelTransferSession) {
1783         VTPixelTransferSessionRef session = 0;
1784         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1785         m_pixelTransferSession = adoptCF(session);
1786     }
1787
1788     CVPixelBufferRef outputBuffer;
1789     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
1790     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1791     buffer = adoptCF(outputBuffer);
1792 #endif
1793
1794 #if !LOG_DISABLED
1795     double duration = monotonicallyIncreasingTime() - start;
1796     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1797 #endif
1798
1799     return buffer;
1800 }
1801
1802 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
1803 {
1804     if (!m_avPlayerItem)
1805         return false;
1806
1807     if (m_lastImage)
1808         return true;
1809
1810     if (!m_videoOutput)
1811         createVideoOutput();
1812
1813     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
1814 }
1815
1816 static const void* CVPixelBufferGetBytePointerCallback(void* info)
1817 {
1818     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1819     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1820     return CVPixelBufferGetBaseAddress(pixelBuffer);
1821 }
1822
1823 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
1824 {
1825     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1826     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1827 }
1828
1829 static void CVPixelBufferReleaseInfoCallback(void* info)
1830 {
1831     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1832     CFRelease(pixelBuffer);
1833 }
1834
1835 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
1836 {
1837     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
1838     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
1839
1840     size_t width = CVPixelBufferGetWidth(pixelBuffer);
1841     size_t height = CVPixelBufferGetHeight(pixelBuffer);
1842     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
1843     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
1844     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
1845
1846     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
1847     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
1848     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
1849
1850     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
1851 }
1852
1853 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
1854 {
1855     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1856
1857     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1858     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1859     // should be displayed.
1860     if (pixelBuffer)
1861         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
1862 }
1863
1864 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
1865 {
1866     updateLastImage();
1867
1868     if (m_lastImage) {
1869         GraphicsContextStateSaver stateSaver(*context);
1870
1871         IntRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
1872
1873         context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, outputRect, imageRect);
1874
1875         // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
1876         // video frame, destroy it now that it is no longer needed.
1877         if (m_imageGenerator)
1878             destroyImageGenerator();
1879     }
1880 }
1881
1882 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
1883 {
1884     updateLastImage();
1885     return m_lastImage.get();
1886 }
1887
1888 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
1889 {
1890     if (!m_videoOutputSemaphore)
1891         m_videoOutputSemaphore = dispatch_semaphore_create(0);
1892
1893     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
1894
1895     // Wait for 1 second.
1896     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
1897
1898     if (result)
1899         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
1900 }
1901
1902 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
1903 {
1904     dispatch_semaphore_signal(m_videoOutputSemaphore);
1905 }
1906 #endif
1907
1908 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1909 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1910 {
1911     // initData should have the following layout:
1912     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1913     if (initData->byteLength() < 4)
1914         return false;
1915
1916     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1917
1918     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1919     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1920     uint32_t offset = 0;
1921     bool status = true;
1922
1923     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1924     offset += 4;
1925     if (!status || offset + keyURILength > initData->length())
1926         return false;
1927
1928     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1929     if (!keyURIArray)
1930         return false;
1931
1932     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1933     offset += keyURILength;
1934
1935     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1936     offset += 4;
1937     if (!status || offset + keyIDLength > initData->length())
1938         return false;
1939
1940     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1941     if (!keyIDArray)
1942         return false;
1943
1944     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1945     offset += keyIDLength;
1946
1947     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1948     offset += 4;
1949     if (!status || offset + certificateLength > initData->length())
1950         return false;
1951
1952     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1953     if (!certificate)
1954         return false;
1955
1956     return true;
1957 }
1958 #endif
1959
1960 #if ENABLE(ENCRYPTED_MEDIA)
1961 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1962 {
1963     if (!keySystemIsSupported(keySystem))
1964         return MediaPlayer::KeySystemNotSupported;
1965
1966     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1967     String keyURI;
1968     String keyID;
1969     RefPtr<Uint8Array> certificate;
1970     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1971         return MediaPlayer::InvalidPlayerState;
1972
1973     if (!m_keyURIToRequestMap.contains(keyURI))
1974         return MediaPlayer::InvalidPlayerState;
1975
1976     String sessionID = createCanonicalUUIDString();
1977
1978     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1979
1980     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1981     NSString* assetStr = keyID;
1982     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
1983     NSError* error = 0;
1984     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
1985
1986     if (!keyRequest) {
1987         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
1988         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
1989         return MediaPlayer::NoError;
1990     }
1991
1992     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
1993     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
1994     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
1995
1996     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
1997     m_sessionIDToRequestMap.set(sessionID, avRequest);
1998     m_keyURIToRequestMap.remove(keyURI);
1999
2000     return MediaPlayer::NoError;
2001 }
2002
2003 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2004 {
2005     if (!keySystemIsSupported(keySystem))
2006         return MediaPlayer::KeySystemNotSupported;
2007
2008     if (!m_sessionIDToRequestMap.contains(sessionID))
2009         return MediaPlayer::InvalidPlayerState;
2010
2011     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2012     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2013     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2014     [avRequest.get() finishLoading];
2015     m_sessionIDToRequestMap.remove(sessionID);
2016
2017     player()->keyAdded(keySystem, sessionID);
2018
2019     UNUSED_PARAM(initDataPtr);
2020     UNUSED_PARAM(initDataLength);
2021     return MediaPlayer::NoError;
2022 }
2023
2024 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2025 {
2026     if (!keySystemIsSupported(keySystem))
2027         return MediaPlayer::KeySystemNotSupported;
2028
2029     if (!m_sessionIDToRequestMap.contains(sessionID))
2030         return MediaPlayer::InvalidPlayerState;
2031
2032     m_sessionIDToRequestMap.remove(sessionID);
2033     return MediaPlayer::NoError;
2034 }
2035 #endif
2036
2037 #if ENABLE(ENCRYPTED_MEDIA_V2)
2038 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2039 {
2040     return m_keyURIToRequestMap.take(keyURI);
2041 }
2042
2043 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2044 {
2045     if (!keySystemIsSupported(keySystem))
2046         return nullptr;
2047
2048     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2049 }
2050 #endif
2051
2052 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2053 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2054 {
2055 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2056     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2057 #endif
2058
2059     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2060     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2061
2062         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2063         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2064             continue;
2065
2066         bool newCCTrack = true;
2067         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2068             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2069                 continue;
2070
2071             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2072             if (track->avPlayerItemTrack() == playerItemTrack) {
2073                 removedTextTracks.remove(i - 1);
2074                 newCCTrack = false;
2075                 break;
2076             }
2077         }
2078
2079         if (!newCCTrack)
2080             continue;
2081         
2082         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2083     }
2084
2085     processNewAndRemovedTextTracks(removedTextTracks);
2086 }
2087 #endif
2088
2089 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2090 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2091 {
2092     if (!m_avAsset)
2093         return nil;
2094     
2095     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2096         return nil;
2097     
2098     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2099 }
2100
2101 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2102 {
2103     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2104     if (!legibleGroup) {
2105         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2106         return;
2107     }
2108
2109     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2110     // but set the selected legible track to nil so text tracks will not be automatically configured.
2111     if (!m_textTracks.size())
2112         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2113
2114     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2115     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2116     for (AVMediaSelectionOptionType *option in legibleOptions) {
2117         bool newTrack = true;
2118         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2119             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2120                 continue;
2121             
2122             RetainPtr<AVMediaSelectionOptionType> currentOption;
2123 #if ENABLE(AVF_CAPTIONS)
2124             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2125                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2126                 currentOption = track->mediaSelectionOption();
2127             } else
2128 #endif
2129             {
2130                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2131                 currentOption = track->mediaSelectionOption();
2132             }
2133             
2134             if ([currentOption.get() isEqual:option]) {
2135                 removedTextTracks.remove(i - 1);
2136                 newTrack = false;
2137                 break;
2138             }
2139         }
2140         if (!newTrack)
2141             continue;
2142
2143 #if ENABLE(AVF_CAPTIONS)
2144         if ([option outOfBandSource]) {
2145             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2146             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2147             continue;
2148         }
2149 #endif
2150
2151         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option));
2152     }
2153
2154     processNewAndRemovedTextTracks(removedTextTracks);
2155 }
2156
2157 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2158 {
2159     if (m_metadataTrack)
2160         return;
2161
2162     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2163     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2164     player()->addTextTrack(m_metadataTrack);
2165 }
2166
2167 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, double time)
2168 {
2169     if (!m_currentTextTrack)
2170         return;
2171
2172     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), time);
2173 }
2174
2175 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2176 {
2177     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2178
2179     if (!m_currentTextTrack)
2180         return;
2181     
2182     m_currentTextTrack->resetCueValues();
2183 }
2184 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2185
2186 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2187 {
2188     if (m_currentTextTrack == track)
2189         return;
2190
2191     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2192         
2193     m_currentTextTrack = track;
2194
2195     if (track) {
2196         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2197             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2198 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2199 #if ENABLE(AVF_CAPTIONS)
2200         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2201             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2202 #endif
2203         else
2204             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2205 #endif
2206     } else {
2207 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2208         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2209 #endif
2210         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2211     }
2212
2213 }
2214
2215 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2216 {
2217     if (!m_languageOfPrimaryAudioTrack.isNull())
2218         return m_languageOfPrimaryAudioTrack;
2219
2220     if (!m_avPlayerItem.get())
2221         return emptyString();
2222
2223 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2224     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2225     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2226     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2227     if (currentlySelectedAudibleOption) {
2228         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2229         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2230
2231         return m_languageOfPrimaryAudioTrack;
2232     }
2233 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2234
2235     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2236     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2237     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2238     if (!tracks || [tracks count] != 1) {
2239         m_languageOfPrimaryAudioTrack = emptyString();
2240         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2241         return m_languageOfPrimaryAudioTrack;
2242     }
2243
2244     AVAssetTrack *track = [tracks objectAtIndex:0];
2245     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2246
2247 #if !LOG_DISABLED
2248     if (m_languageOfPrimaryAudioTrack == emptyString())
2249         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2250     else
2251         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2252 #endif
2253
2254     return m_languageOfPrimaryAudioTrack;
2255 }
2256
2257 #if ENABLE(IOS_AIRPLAY)
2258 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2259 {
2260     if (!m_avPlayer)
2261         return false;
2262
2263     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2264     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2265     return wirelessTarget;
2266 }
2267
2268 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2269 {
2270     if (!m_avPlayer)
2271         return MediaPlayer::TargetTypeNone;
2272
2273     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2274     case wkExternalPlaybackTypeNone:
2275         return MediaPlayer::TargetTypeNone;
2276     case wkExternalPlaybackTypeAirPlay:
2277         return MediaPlayer::TargetTypeAirPlay;
2278     case wkExternalPlaybackTypeTVOut:
2279         return MediaPlayer::TargetTypeTVOut;
2280     }
2281
2282     ASSERT_NOT_REACHED();
2283     return MediaPlayer::TargetTypeNone;
2284 }
2285
2286 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2287 {
2288     if (!m_avPlayer)
2289         return emptyString();
2290     
2291     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2292     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2293
2294     return wirelessTargetName;
2295 }
2296
2297 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2298 {
2299     if (!m_avPlayer)
2300         return !m_allowsWirelessVideoPlayback;
2301     
2302     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2303     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2304
2305     return !m_allowsWirelessVideoPlayback;
2306 }
2307
2308 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2309 {
2310     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2311     m_allowsWirelessVideoPlayback = !disabled;
2312     if (!m_avPlayer)
2313         return;
2314     
2315     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2316 }
2317
2318 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2319 {
2320     if (!m_avPlayer)
2321         return;
2322
2323     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2324 }
2325 #endif
2326
2327 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2328 {
2329     m_cachedItemStatus = status;
2330
2331     updateStates();
2332 }
2333
2334 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2335 {
2336     m_pendingStatusChanges++;
2337 }
2338
2339 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2340 {
2341     m_cachedLikelyToKeepUp = likelyToKeepUp;
2342
2343     ASSERT(m_pendingStatusChanges);
2344     if (!--m_pendingStatusChanges)
2345         updateStates();
2346 }
2347
2348 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2349 {
2350     m_pendingStatusChanges++;
2351 }
2352
2353 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2354 {
2355     m_cachedBufferEmpty = bufferEmpty;
2356
2357     ASSERT(m_pendingStatusChanges);
2358     if (!--m_pendingStatusChanges)
2359         updateStates();
2360 }
2361
2362 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2363 {
2364     m_pendingStatusChanges++;
2365 }
2366
2367 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2368 {
2369     m_cachedBufferFull = bufferFull;
2370
2371     ASSERT(m_pendingStatusChanges);
2372     if (!--m_pendingStatusChanges)
2373         updateStates();
2374 }
2375
2376 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2377 {
2378     m_cachedSeekableRanges = seekableRanges;
2379
2380     seekableTimeRangesChanged();
2381     updateStates();
2382 }
2383
2384 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2385 {
2386     m_cachedLoadedRanges = loadedRanges;
2387
2388     loadedTimeRangesChanged();
2389     updateStates();
2390 }
2391
2392 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2393 {
2394     m_cachedIsReadyForDisplay = isReady;
2395     if (!hasVideo() && isReady)
2396         tracksChanged();
2397     updateStates();
2398 }
2399
2400 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2401 {
2402     tracksChanged();
2403     updateStates();
2404 }
2405
2406 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2407 {
2408     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2409     if (m_shouldBufferData == shouldBuffer)
2410         return;
2411
2412     m_shouldBufferData = shouldBuffer;
2413     
2414     if (!m_avPlayer)
2415         return;
2416
2417     if (m_shouldBufferData)
2418         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
2419     else
2420         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:nil];
2421 }
2422
2423 #if ENABLE(DATACUE_VALUE)
2424 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2425 {
2426     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2427     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2428     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2429     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2430     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2431
2432     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2433         return quickTimeUserData;
2434     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2435         return isoUserData;
2436     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2437         return quickTimeMetadata;
2438     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2439         return iTunesMetadata;
2440     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2441         return id3Metadata;
2442
2443     return emptyAtom;
2444 }
2445 #endif
2446
2447 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, double mediaTime)
2448 {
2449     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2450
2451     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %.2f", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, mediaTime);
2452
2453 #if ENABLE(DATACUE_VALUE)
2454     if (seeking())
2455         return;
2456
2457     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2458         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2459         return;
2460     }
2461
2462     if (!m_metadataTrack)
2463         processMetadataTrack();
2464
2465     // Set the duration of all incomplete cues before adding new ones.
2466     double earliesStartTime = std::numeric_limits<double>::infinity();
2467     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2468         double start = CMTimeGetSeconds(item.time);
2469         if (start < earliesStartTime)
2470             earliesStartTime = start;
2471     }
2472     m_metadataTrack->updatePendingCueEndTimes(earliesStartTime);
2473
2474     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2475         double start = CMTimeGetSeconds(item.time);
2476         double end = std::numeric_limits<double>::infinity();
2477         if (CMTIME_IS_VALID(item.duration))
2478             end = start + CMTimeGetSeconds(item.duration);
2479
2480         AtomicString type = nullAtom;
2481         if (item.keySpace)
2482             type = metadataType(item.keySpace);
2483
2484         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2485     }
2486 #endif
2487 }
2488
2489 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2490 {
2491     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2492         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2493
2494     m_cachedTracks = tracks;
2495     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2496         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2497
2498     m_cachedTotalBytes = 0;
2499
2500     tracksChanged();
2501     updateStates();
2502 }
2503
2504 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2505 {
2506     m_cachedHasEnabledAudio = hasEnabledAudio;
2507
2508     tracksChanged();
2509     updateStates();
2510 }
2511
2512 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2513 {
2514     m_cachedPresentationSize = size;
2515
2516     sizeChanged();
2517     updateStates();
2518 }
2519
2520 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(double duration)
2521 {
2522     m_cachedDuration = duration;
2523
2524     invalidateCachedDuration();
2525 }
2526
2527 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2528 {
2529     m_cachedRate = rate;
2530
2531     updateStates();
2532     rateChanged();
2533 }
2534     
2535 #if ENABLE(IOS_AIRPLAY)
2536 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2537 {
2538     playbackTargetIsWirelessChanged();
2539 }
2540 #endif
2541
2542 NSArray* assetMetadataKeyNames()
2543 {
2544     static NSArray* keys;
2545     if (!keys) {
2546         keys = [[NSArray alloc] initWithObjects:@"duration",
2547                     @"naturalSize",
2548                     @"preferredTransform",
2549                     @"preferredVolume",
2550                     @"preferredRate",
2551                     @"playable",
2552                     @"tracks",
2553                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2554                    nil];
2555     }
2556     return keys;
2557 }
2558
2559 NSArray* itemKVOProperties()
2560 {
2561     static NSArray* keys;
2562     if (!keys) {
2563         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2564                 @"status",
2565                 @"asset",
2566                 @"tracks",
2567                 @"seekableTimeRanges",
2568                 @"loadedTimeRanges",
2569                 @"playbackLikelyToKeepUp",
2570                 @"playbackBufferFull",
2571                 @"playbackBufferEmpty",
2572                 @"duration",
2573                 @"hasEnabledAudio",
2574                 @"timedMetadata",
2575                 nil];
2576     }
2577     return keys;
2578 }
2579
2580 NSArray* assetTrackMetadataKeyNames()
2581 {
2582     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
2583     return keys;
2584 }
2585
2586 } // namespace WebCore
2587
2588 @implementation WebCoreAVFMovieObserver
2589
2590 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2591 {
2592     self = [super init];
2593     if (!self)
2594         return nil;
2595     m_callback = callback;
2596     return self;
2597 }
2598
2599 - (void)disconnect
2600 {
2601     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2602     m_callback = 0;
2603 }
2604
2605 - (void)metadataLoaded
2606 {
2607     if (!m_callback)
2608         return;
2609
2610     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2611 }
2612
2613 - (void)didEnd:(NSNotification *)unusedNotification
2614 {
2615     UNUSED_PARAM(unusedNotification);
2616     if (!m_callback)
2617         return;
2618     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2619 }
2620
2621 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2622 {
2623     UNUSED_PARAM(object);
2624     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2625
2626     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2627
2628     if (!m_callback)
2629         return;
2630
2631     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2632
2633     WTF::Function<void ()> function;
2634
2635     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
2636         if ([keyPath isEqualToString:@"readyForDisplay"])
2637             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
2638     }
2639
2640     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
2641         if ([keyPath isEqualToString:@"enabled"])
2642             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
2643     }
2644
2645     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2646         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2647             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2648         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2649             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2650         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2651             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2652     }
2653
2654     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2655         // A value changed for an AVPlayerItem
2656         if ([keyPath isEqualToString:@"status"])
2657             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2658         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2659             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2660         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2661             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2662         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2663             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2664         else if ([keyPath isEqualToString:@"asset"])
2665             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2666         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2667             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2668         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2669             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2670         else if ([keyPath isEqualToString:@"tracks"])
2671             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2672         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2673             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2674         else if ([keyPath isEqualToString:@"presentationSize"])
2675             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2676         else if ([keyPath isEqualToString:@"duration"])
2677             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, CMTimeGetSeconds([newValue CMTimeValue]));
2678         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
2679             double now = 0;
2680             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
2681             if (CMTIME_IS_NUMERIC(itemTime))
2682                 now = std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
2683             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
2684         }
2685     }
2686
2687     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
2688         // A value changed for an AVPlayer.
2689         if ([keyPath isEqualToString:@"rate"])
2690             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
2691 #if ENABLE(IOS_AIRPLAY)
2692         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
2693             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
2694 #endif
2695     }
2696     
2697     if (function.isNull())
2698         return;
2699
2700     auto weakThis = m_callback->createWeakPtr();
2701     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
2702         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
2703         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
2704         if (!weakThis)
2705             return;
2706         function();
2707     }));
2708 }
2709
2710 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2711 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
2712 {
2713     UNUSED_PARAM(output);
2714     UNUSED_PARAM(nativeSamples);
2715
2716     if (!m_callback)
2717         return;
2718
2719     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2720     RetainPtr<NSArray> strongStrings = strings;
2721     callOnMainThread([strongSelf, strongStrings, itemTime] {
2722         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2723         if (!callback)
2724             return;
2725         callback->processCue(strongStrings.get(), CMTimeGetSeconds(itemTime));
2726     });
2727 }
2728
2729 - (void)outputSequenceWasFlushed:(id)output
2730 {
2731     UNUSED_PARAM(output);
2732
2733     if (!m_callback)
2734         return;
2735     
2736     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2737     callOnMainThread([strongSelf] {
2738         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
2739             callback->flushCues();
2740     });
2741 }
2742 #endif
2743
2744 @end
2745
2746 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
2747 @implementation WebCoreAVFLoaderDelegate
2748
2749 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2750 {
2751     self = [super init];
2752     if (!self)
2753         return nil;
2754     m_callback = callback;
2755     return self;
2756 }
2757
2758 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
2759 {
2760     UNUSED_PARAM(resourceLoader);
2761     if (!m_callback)
2762         return NO;
2763
2764     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2765     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2766     callOnMainThread([strongSelf, strongRequest] {
2767         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2768         if (!callback) {
2769             [strongRequest finishLoadingWithError:nil];
2770             return;
2771         }
2772
2773         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
2774             [strongRequest finishLoadingWithError:nil];
2775     });
2776
2777     return YES;
2778 }
2779
2780 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
2781 {
2782     UNUSED_PARAM(resourceLoader);
2783     if (!m_callback)
2784         return NO;
2785
2786     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
2787         return NO;
2788
2789     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2790     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
2791     callOnMainThread([strongSelf, strongChallenge] {
2792         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2793         if (!callback) {
2794             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2795             return;
2796         }
2797
2798         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
2799             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2800     });
2801
2802     return YES;
2803 }
2804
2805 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
2806 {
2807     UNUSED_PARAM(resourceLoader);
2808     if (!m_callback)
2809         return;
2810
2811     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2812     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2813     callOnMainThread([strongSelf, strongRequest] {
2814         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2815         if (callback)
2816             callback->didCancelLoadingRequest(strongRequest.get());
2817     });
2818 }
2819
2820 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2821 {
2822     m_callback = callback;
2823 }
2824 @end
2825 #endif
2826
2827 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2828 @implementation WebCoreAVFPullDelegate
2829 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2830 {
2831     self = [super init];
2832     if (self)
2833         m_callback = callback;
2834     return self;
2835 }
2836
2837 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2838 {
2839     m_callback = callback;
2840 }
2841
2842 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
2843 {
2844     if (m_callback)
2845         m_callback->outputMediaDataWillChange(output);
2846 }
2847
2848 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
2849 {
2850     UNUSED_PARAM(output);
2851     // No-op.
2852 }
2853 @end
2854 #endif
2855
2856 #endif