[Mac] process raw VTT in-band captions
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29 #import "MediaPlayerPrivateAVFoundationObjC.h"
30
31 #import "AVTrackPrivateAVFObjCImpl.h"
32 #import "AudioTrackPrivateAVFObjC.h"
33 #import "AuthenticationChallenge.h"
34 #import "BlockExceptions.h"
35 #import "CDMSessionAVFoundationObjC.h"
36 #import "ExceptionCodePlaceholder.h"
37 #import "FloatConversion.h"
38 #import "FloatConversion.h"
39 #import "FrameView.h"
40 #import "GraphicsContext.h"
41 #import "GraphicsContextCG.h"
42 #import "InbandMetadataTextTrackPrivateAVF.h"
43 #import "InbandTextTrackPrivateAVFObjC.h"
44 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
45 #import "OutOfBandTextTrackPrivateAVF.h"
46 #import "URL.h"
47 #import "Logging.h"
48 #import "MediaTimeMac.h"
49 #import "PlatformTimeRanges.h"
50 #import "SecurityOrigin.h"
51 #import "SerializedPlatformRepresentationMac.h"
52 #import "SoftLinking.h"
53 #import "TextTrackRepresentation.h"
54 #import "UUID.h"
55 #import "VideoTrackPrivateAVFObjC.h"
56 #import "WebCoreAVFResourceLoader.h"
57 #import "WebCoreSystemInterface.h"
58 #import <objc/runtime.h>
59 #import <runtime/DataView.h>
60 #import <runtime/JSCInlines.h>
61 #import <runtime/TypedArrayInlines.h>
62 #import <runtime/Uint16Array.h>
63 #import <runtime/Uint32Array.h>
64 #import <runtime/Uint8Array.h>
65 #import <wtf/CurrentTime.h>
66 #import <wtf/Functional.h>
67 #import <wtf/NeverDestroyed.h>
68 #import <wtf/text/CString.h>
69 #import <wtf/text/StringBuilder.h>
70
71 #if ENABLE(AVF_CAPTIONS)
72 #include "TextTrack.h"
73 #endif
74
75 #import <Foundation/NSGeometry.h>
76 #import <AVFoundation/AVFoundation.h>
77 #if PLATFORM(IOS)
78 #import <CoreImage/CoreImage.h>
79 #else
80 #import <QuartzCore/CoreImage.h>
81 #endif
82 #import <CoreMedia/CoreMedia.h>
83
84 #if USE(VIDEOTOOLBOX)
85 #import <CoreVideo/CoreVideo.h>
86 #import <VideoToolbox/VideoToolbox.h>
87 #endif
88
89 #if ENABLE(AVF_CAPTIONS)
90 // Note: This must be defined before our SOFT_LINK macros:
91 @class AVMediaSelectionOption;
92 @interface AVMediaSelectionOption (OutOfBandExtensions)
93 @property (nonatomic, readonly) NSString* outOfBandSource;
94 @property (nonatomic, readonly) NSString* outOfBandIdentifier;
95 @end
96 #endif
97
98 #if PLATFORM(IOS)
99 @class AVPlayerItem;
100 @interface AVPlayerItem (WebKitExtensions)
101 @property (nonatomic, copy) NSString* dataYouTubeID;
102 @end
103 #endif
104
105 @interface AVURLAsset (WebKitExtensions)
106 @property (nonatomic, readonly) NSURL *resolvedURL;
107 @end
108
109 typedef AVPlayerItem AVPlayerItemType;
110 typedef AVMetadataItem AVMetadataItemType;
111
112 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
113 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
114 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
115 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
116
117 #if USE(VIDEOTOOLBOX)
118 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
119 #endif
120
121 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
122 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
123 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
124 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
125
126 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
127 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
128 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
129 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
130 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
131 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
132 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
133 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
134
135 #if USE(VIDEOTOOLBOX)
136 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
137 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
138 #endif
139
140 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
141 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
142 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
143 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
144 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
145 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
146 SOFT_LINK_CLASS(AVFoundation, AVMetadataItem)
147
148 SOFT_LINK_CLASS(CoreImage, CIContext)
149 SOFT_LINK_CLASS(CoreImage, CIImage)
150
151 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
152 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
153 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
154 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
155 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
156 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeMetadata, NSString *)
157 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
158 SOFT_LINK_POINTER(AVFoundation, AVURLAssetInheritURIQueryComponentFromReferencingURIKey, NSString *)
159 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
160 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
161 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
162 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspectFill, NSString *)
163 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
164 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
165
166 SOFT_LINK_POINTER_OPTIONAL(AVFoundation, AVURLAssetClientBundleIdentifierKey, NSString *)
167
168 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
169
170 #define AVPlayer getAVPlayerClass()
171 #define AVPlayerItem getAVPlayerItemClass()
172 #define AVPlayerLayer getAVPlayerLayerClass()
173 #define AVURLAsset getAVURLAssetClass()
174 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
175 #define AVMetadataItem getAVMetadataItemClass()
176
177 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
178 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
179 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
180 #define AVMediaTypeVideo getAVMediaTypeVideo()
181 #define AVMediaTypeAudio getAVMediaTypeAudio()
182 #define AVMediaTypeMetadata getAVMediaTypeMetadata()
183 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
184 #define AVURLAssetInheritURIQueryComponentFromReferencingURIKey getAVURLAssetInheritURIQueryComponentFromReferencingURIKey()
185 #define AVURLAssetClientBundleIdentifierKey getAVURLAssetClientBundleIdentifierKey()
186 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
187 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
188 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
189 #define AVLayerVideoGravityResizeAspectFill getAVLayerVideoGravityResizeAspectFill()
190 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
191 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
192
193 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
194 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
195 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
196
197 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
198 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
199 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
200
201 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
202 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
203 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
204 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
205
206 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
207 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
208 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
209 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
210 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
211 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
212 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
213 #endif
214
215 #if ENABLE(AVF_CAPTIONS)
216 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
217 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
218 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
219 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
220 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
221 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
222 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
223 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
224 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
225
226 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
227 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
228 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
229 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
230 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
231 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
232 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
233 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
234 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
235 #endif
236
237 #if ENABLE(DATACUE_VALUE)
238 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeUserData, NSString*)
239 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceISOUserData, NSString*)
240 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceQuickTimeMetadata, NSString*)
241 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceiTunes, NSString*)
242 SOFT_LINK_POINTER(AVFoundation, AVMetadataKeySpaceID3, NSString*)
243
244 #define AVMetadataKeySpaceQuickTimeUserData getAVMetadataKeySpaceQuickTimeUserData()
245 #define AVMetadataKeySpaceISOUserData getAVMetadataKeySpaceISOUserData()
246 #define AVMetadataKeySpaceQuickTimeMetadata getAVMetadataKeySpaceQuickTimeMetadata()
247 #define AVMetadataKeySpaceiTunes getAVMetadataKeySpaceiTunes()
248 #define AVMetadataKeySpaceID3 getAVMetadataKeySpaceID3()
249 #endif
250
251 #if PLATFORM(IOS)
252 SOFT_LINK_POINTER(AVFoundation, AVURLAssetBoundNetworkInterfaceName, NSString *)
253
254 #define AVURLAssetBoundNetworkInterfaceName getAVURLAssetBoundNetworkInterfaceName()
255 #endif
256
257 #define kCMTimeZero getkCMTimeZero()
258
259 using namespace WebCore;
260
261 enum MediaPlayerAVFoundationObservationContext {
262     MediaPlayerAVFoundationObservationContextPlayerItem,
263     MediaPlayerAVFoundationObservationContextPlayerItemTrack,
264     MediaPlayerAVFoundationObservationContextPlayer,
265     MediaPlayerAVFoundationObservationContextAVPlayerLayer,
266 };
267
268 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
269 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
270 #else
271 @interface WebCoreAVFMovieObserver : NSObject
272 #endif
273 {
274     MediaPlayerPrivateAVFoundationObjC* m_callback;
275     int m_delayCallbacks;
276 }
277 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
278 -(void)disconnect;
279 -(void)metadataLoaded;
280 -(void)didEnd:(NSNotification *)notification;
281 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
282 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
283 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
284 - (void)outputSequenceWasFlushed:(id)output;
285 #endif
286 @end
287
288 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
289 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
290     MediaPlayerPrivateAVFoundationObjC* m_callback;
291 }
292 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
293 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
294 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
295 @end
296 #endif
297
298 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
299 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
300     MediaPlayerPrivateAVFoundationObjC *m_callback;
301     dispatch_semaphore_t m_semaphore;
302 }
303 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
304 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
305 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
306 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
307 @end
308 #endif
309
310 namespace WebCore {
311
312 static NSArray *assetMetadataKeyNames();
313 static NSArray *itemKVOProperties();
314 static NSArray* assetTrackMetadataKeyNames();
315
316 #if !LOG_DISABLED
317 static const char *boolString(bool val)
318 {
319     return val ? "true" : "false";
320 }
321 #endif
322
323 #if ENABLE(ENCRYPTED_MEDIA_V2)
324 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
325 static PlayerToPrivateMapType& playerToPrivateMap()
326 {
327     DEPRECATED_DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
328     return map;
329 };
330 #endif
331
332 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
333 static dispatch_queue_t globalLoaderDelegateQueue()
334 {
335     static dispatch_queue_t globalQueue;
336     static dispatch_once_t onceToken;
337     dispatch_once(&onceToken, ^{
338         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
339     });
340     return globalQueue;
341 }
342 #endif
343
344 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
345 static dispatch_queue_t globalPullDelegateQueue()
346 {
347     static dispatch_queue_t globalQueue;
348     static dispatch_once_t onceToken;
349     dispatch_once(&onceToken, ^{
350         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
351     });
352     return globalQueue;
353 }
354 #endif
355
356 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
357
358     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
359 }
360
361 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
362 {
363     if (isAvailable())
364         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
365 }
366
367 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
368     : MediaPlayerPrivateAVFoundation(player)
369     , m_weakPtrFactory(this)
370 #if PLATFORM(IOS)
371     , m_videoFullscreenGravity(MediaPlayer::VideoGravityResizeAspect)
372 #endif
373     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
374     , m_videoFrameHasDrawn(false)
375     , m_haveCheckedPlayability(false)
376 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
377     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
378     , m_videoOutputSemaphore(nullptr)
379 #endif
380 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
381     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
382 #endif
383     , m_currentTextTrack(0)
384     , m_cachedDuration(MediaPlayer::invalidTime())
385     , m_cachedRate(0)
386     , m_cachedTotalBytes(0)
387     , m_pendingStatusChanges(0)
388     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
389     , m_cachedLikelyToKeepUp(false)
390     , m_cachedBufferEmpty(false)
391     , m_cachedBufferFull(false)
392     , m_cachedHasEnabledAudio(false)
393     , m_shouldBufferData(true)
394     , m_cachedIsReadyForDisplay(false)
395     , m_haveBeenAskedToCreateLayer(false)
396 #if ENABLE(IOS_AIRPLAY)
397     , m_allowsWirelessVideoPlayback(true)
398 #endif
399 {
400 #if ENABLE(ENCRYPTED_MEDIA_V2)
401     playerToPrivateMap().set(player, this);
402 #endif
403 }
404
405 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
406 {
407 #if ENABLE(ENCRYPTED_MEDIA_V2)
408     playerToPrivateMap().remove(player());
409 #endif
410 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
411     [m_loaderDelegate.get() setCallback:0];
412     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
413
414     for (auto& pair : m_resourceLoaderMap)
415         pair.value->invalidate();
416 #endif
417 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
418     [m_videoOutputDelegate setCallback:0];
419     [m_videoOutput setDelegate:nil queue:0];
420     if (m_videoOutputSemaphore)
421         dispatch_release(m_videoOutputSemaphore);
422 #endif
423
424     if (m_videoLayer)
425         destroyVideoLayer();
426
427     cancelLoad();
428 }
429
430 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
431 {
432     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
433     tearDownVideoRendering();
434
435     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
436     [m_objcObserver.get() disconnect];
437
438     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
439     setIgnoreLoadStateChanges(true);
440     if (m_avAsset) {
441         [m_avAsset.get() cancelLoading];
442         m_avAsset = nil;
443     }
444
445     clearTextTracks();
446
447 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
448     if (m_legibleOutput) {
449         if (m_avPlayerItem)
450             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
451         m_legibleOutput = nil;
452     }
453 #endif
454
455     if (m_avPlayerItem) {
456         for (NSString *keyName in itemKVOProperties())
457             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
458         
459         m_avPlayerItem = nil;
460     }
461     if (m_avPlayer) {
462         if (m_timeObserver)
463             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
464         m_timeObserver = nil;
465         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
466 #if ENABLE(IOS_AIRPLAY)
467         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
468 #endif
469         m_avPlayer = nil;
470     }
471
472     // Reset cached properties
473     m_pendingStatusChanges = 0;
474     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
475     m_cachedSeekableRanges = nullptr;
476     m_cachedLoadedRanges = nullptr;
477     m_cachedHasEnabledAudio = false;
478     m_cachedPresentationSize = FloatSize();
479     m_cachedDuration = 0;
480
481     for (AVPlayerItemTrack *track in m_cachedTracks.get())
482         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
483     m_cachedTracks = nullptr;
484
485     setIgnoreLoadStateChanges(false);
486 }
487
488 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
489 {
490     return m_haveBeenAskedToCreateLayer;
491 }
492
493 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
494 {
495 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
496     if (m_videoOutput)
497         return true;
498 #endif
499     return m_imageGenerator;
500 }
501
502 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
503 {
504 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
505     createVideoOutput();
506 #else
507     createImageGenerator();
508 #endif
509 }
510
511 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
512 {
513     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
514
515     if (!m_avAsset || m_imageGenerator)
516         return;
517
518     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
519
520     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
521     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
522     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
523     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
524
525     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
526 }
527
528 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
529 {
530 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
531     destroyVideoOutput();
532 #endif
533     destroyImageGenerator();
534 }
535
536 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
537 {
538     if (!m_imageGenerator)
539         return;
540
541     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
542
543     m_imageGenerator = 0;
544 }
545
546 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
547 {
548     if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
549         return;
550
551     auto weakThis = createWeakPtr();
552     callOnMainThread([this, weakThis] {
553         if (!weakThis)
554             return;
555
556         if (!m_avPlayer || m_haveBeenAskedToCreateLayer)
557             return;
558         m_haveBeenAskedToCreateLayer = true;
559
560         if (!m_videoLayer)
561             createAVPlayerLayer();
562
563         player()->mediaPlayerClient()->mediaPlayerRenderingModeChanged(player());
564     });
565 }
566
567 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerLayer()
568 {
569     if (!m_avPlayer)
570         return;
571
572     m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
573     [m_videoLayer setPlayer:m_avPlayer.get()];
574     [m_videoLayer setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
575 #ifndef NDEBUG
576     [m_videoLayer setName:@"MediaPlayerPrivate AVPlayerLayer"];
577 #endif
578     [m_videoLayer addObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextAVPlayerLayer];
579     updateVideoLayerGravity();
580     IntSize defaultSize = player()->mediaPlayerClient() ? player()->mediaPlayerClient()->mediaPlayerContentBoxRect().pixelSnappedSize() : IntSize();
581     [m_videoLayer setFrame:CGRectMake(0, 0, defaultSize.width(), defaultSize.height())];
582     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
583
584 #if PLATFORM(IOS)
585     if (m_videoFullscreenLayer) {
586         [m_videoLayer setFrame:CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height())];
587         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
588     }
589 #endif
590 }
591
592 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
593 {
594     if (!m_videoLayer)
595         return;
596
597     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
598
599     [m_videoLayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"readyForDisplay"];
600     [m_videoLayer.get() setPlayer:nil];
601
602 #if PLATFORM(IOS)
603     if (m_videoFullscreenLayer)
604         [m_videoLayer removeFromSuperlayer];
605 #endif
606
607     m_videoLayer = 0;
608 }
609
610 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
611 {
612     if (currentRenderingMode() == MediaRenderingToLayer)
613         return m_cachedIsReadyForDisplay;
614
615     return m_videoFrameHasDrawn;
616 }
617
618 #if ENABLE(AVF_CAPTIONS)
619 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
620 {
621     // FIXME: Match these to correct types:
622     if (kind == PlatformTextTrack::Caption)
623         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
624
625     if (kind == PlatformTextTrack::Subtitle)
626         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
627
628     if (kind == PlatformTextTrack::Description)
629         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
630
631     if (kind == PlatformTextTrack::Forced)
632         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
633
634     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
635 }
636     
637 void MediaPlayerPrivateAVFoundationObjC::notifyTrackModeChanged()
638 {
639     trackModeChanged();
640 }
641     
642 void MediaPlayerPrivateAVFoundationObjC::synchronizeTextTrackState()
643 {
644     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
645     
646     for (auto& textTrack : m_textTracks) {
647         if (textTrack->textTrackCategory() != InbandTextTrackPrivateAVF::OutOfBand)
648             continue;
649         
650         RefPtr<OutOfBandTextTrackPrivateAVF> trackPrivate = static_cast<OutOfBandTextTrackPrivateAVF*>(textTrack.get());
651         RetainPtr<AVMediaSelectionOptionType> currentOption = trackPrivate->mediaSelectionOption();
652         
653         for (auto& track : outOfBandTrackSources) {
654             RetainPtr<CFStringRef> uniqueID = String::number(track->uniqueId()).createCFString();
655             
656             if (![[currentOption.get() outOfBandIdentifier] isEqual: reinterpret_cast<const NSString*>(uniqueID.get())])
657                 continue;
658             
659             InbandTextTrackPrivate::Mode mode = InbandTextTrackPrivate::Hidden;
660             if (track->mode() == PlatformTextTrack::Hidden)
661                 mode = InbandTextTrackPrivate::Hidden;
662             else if (track->mode() == PlatformTextTrack::Disabled)
663                 mode = InbandTextTrackPrivate::Disabled;
664             else if (track->mode() == PlatformTextTrack::Showing)
665                 mode = InbandTextTrackPrivate::Showing;
666             
667             textTrack->setMode(mode);
668             break;
669         }
670     }
671 }
672 #endif
673
674 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
675 {
676     if (m_avAsset)
677         return;
678
679     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p) - url = %s", this, url.utf8().data());
680
681     setDelayCallbacks(true);
682
683     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
684
685     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
686
687     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
688
689     String referrer = player()->referrer();
690     if (!referrer.isEmpty())
691         [headerFields.get() setObject:referrer forKey:@"Referer"];
692
693     String userAgent = player()->userAgent();
694     if (!userAgent.isEmpty())
695         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
696
697     if ([headerFields.get() count])
698         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
699
700     if (player()->doesHaveAttribute("x-itunes-inherit-uri-query-component"))
701         [options.get() setObject: [NSNumber numberWithBool: TRUE] forKey: AVURLAssetInheritURIQueryComponentFromReferencingURIKey];
702
703     String identifier = player()->sourceApplicationIdentifier();
704     if (!identifier.isEmpty() && AVURLAssetClientBundleIdentifierKey)
705         [options setObject:identifier forKey:AVURLAssetClientBundleIdentifierKey];
706
707 #if ENABLE(AVF_CAPTIONS)
708     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
709     if (!outOfBandTrackSources.isEmpty()) {
710         RetainPtr<NSMutableArray> outOfBandTracks = adoptNS([[NSMutableArray alloc] init]);
711         for (auto& trackSource : outOfBandTrackSources) {
712             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
713             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
714             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
715             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
716             [outOfBandTracks.get() addObject:@{
717                 AVOutOfBandAlternateTrackDisplayNameKey: reinterpret_cast<const NSString*>(label.get()),
718                 AVOutOfBandAlternateTrackExtendedLanguageTagKey: reinterpret_cast<const NSString*>(language.get()),
719                 AVOutOfBandAlternateTrackIsDefaultKey: trackSource->isDefault() ? @YES : @NO,
720                 AVOutOfBandAlternateTrackIdentifierKey: reinterpret_cast<const NSString*>(uniqueID.get()),
721                 AVOutOfBandAlternateTrackSourceKey: reinterpret_cast<const NSString*>(url.get()),
722                 AVOutOfBandAlternateTrackMediaCharactersticsKey: mediaDescriptionForKind(trackSource->kind()),
723             }];
724         }
725
726         [options.get() setObject:outOfBandTracks.get() forKey:AVURLAssetOutOfBandAlternateTracksKey];
727     }
728 #endif
729
730 #if PLATFORM(IOS)
731     String networkInterfaceName = player()->mediaPlayerNetworkInterfaceName();
732     if (!networkInterfaceName.isEmpty())
733         [options setObject:networkInterfaceName forKey:AVURLAssetBoundNetworkInterfaceName];
734 #endif
735
736     NSURL *cocoaURL = URL(ParsedURLString, url);
737     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
738
739 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
740     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
741 #endif
742
743     m_haveCheckedPlayability = false;
744
745     setDelayCallbacks(false);
746 }
747
748 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
749 {
750     if (m_avPlayer)
751         return;
752
753     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
754
755     setDelayCallbacks(true);
756
757     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
758     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
759 #if ENABLE(IOS_AIRPLAY)
760     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
761     updateDisableExternalPlayback();
762 #endif
763
764 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
765     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
766 #endif
767
768 #if ENABLE(IOS_AIRPLAY)
769     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
770 #endif
771
772     if (player()->mediaPlayerClient() && player()->mediaPlayerClient()->mediaPlayerIsVideo())
773         createAVPlayerLayer();
774
775     if (m_avPlayerItem)
776         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
777
778     setDelayCallbacks(false);
779 }
780
781 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
782 {
783     if (m_avPlayerItem)
784         return;
785
786     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
787
788     setDelayCallbacks(true);
789
790     // Create the player item so we can load media data. 
791     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
792
793     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
794
795     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
796     for (NSString *keyName in itemKVOProperties())
797         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
798
799     if (m_avPlayer)
800         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
801
802 #if PLATFORM(IOS)
803     AtomicString value;
804     if (player()->doesHaveAttribute("data-youtube-id", &value))
805         [m_avPlayerItem.get() setDataYouTubeID: value];
806  #endif
807
808 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
809     const NSTimeInterval legibleOutputAdvanceInterval = 2;
810
811     RetainPtr<NSArray> subtypes = adoptNS([[NSArray alloc] initWithObjects:[NSNumber numberWithUnsignedInt:kCMSubtitleFormatType_WebVTT], nil]);
812     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:subtypes.get()]);
813     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
814
815     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
816     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
817     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
818     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
819 #endif
820
821     setDelayCallbacks(false);
822 }
823
824 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
825 {
826     if (m_haveCheckedPlayability)
827         return;
828     m_haveCheckedPlayability = true;
829
830     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
831     auto weakThis = createWeakPtr();
832
833     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
834         callOnMainThread([weakThis] {
835             if (weakThis)
836                 weakThis->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
837         });
838     }];
839 }
840
841 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
842 {
843     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
844
845     dispatch_group_t metadataLoadingGroup = dispatch_group_create();
846     dispatch_group_enter(metadataLoadingGroup);
847     auto weakThis = createWeakPtr();
848     [m_avAsset.get() loadValuesAsynchronouslyForKeys:assetMetadataKeyNames() completionHandler:^{
849
850         callOnMainThread([weakThis, metadataLoadingGroup] {
851             if (weakThis && [weakThis->m_avAsset.get() statusOfValueForKey:@"tracks" error:nil] == AVKeyValueStatusLoaded) {
852                 for (AVAssetTrack *track in [weakThis->m_avAsset.get() tracks]) {
853                     dispatch_group_enter(metadataLoadingGroup);
854                     [track loadValuesAsynchronouslyForKeys:assetTrackMetadataKeyNames() completionHandler:^{
855                         dispatch_group_leave(metadataLoadingGroup);
856                     }];
857                 }
858             }
859             dispatch_group_leave(metadataLoadingGroup);
860         });
861     }];
862
863     dispatch_group_notify(metadataLoadingGroup, dispatch_get_main_queue(), ^{
864         callOnMainThread([weakThis] {
865             if (weakThis)
866                 [weakThis->m_objcObserver.get() metadataLoaded];
867         });
868
869         dispatch_release(metadataLoadingGroup);
870     });
871 }
872
873 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
874 {
875     if (!m_avPlayerItem)
876         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
877
878     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
879         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
880     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
881         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
882     if (m_cachedLikelyToKeepUp)
883         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
884     if (m_cachedBufferFull)
885         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
886     if (m_cachedBufferEmpty)
887         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
888
889     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
890 }
891
892 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
893 {
894     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
895     PlatformMedia pm;
896     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
897     pm.media.avfMediaPlayer = m_avPlayer.get();
898     return pm;
899 }
900
901 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
902 {
903     return m_haveBeenAskedToCreateLayer ? m_videoLayer.get() : nullptr;
904 }
905
906 #if PLATFORM(IOS)
907 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenLayer(PlatformLayer* videoFullscreenLayer)
908 {
909     if (m_videoFullscreenLayer == videoFullscreenLayer)
910         return;
911
912     if (m_videoFullscreenLayer)
913        [m_videoLayer removeFromSuperlayer];
914
915     m_videoFullscreenLayer = videoFullscreenLayer;
916
917     if (m_videoFullscreenLayer && m_videoLayer) {
918         CGRect frame = CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
919         [m_videoLayer setFrame:frame];
920         [m_videoFullscreenLayer insertSublayer:m_videoLayer.get() atIndex:0];
921     }
922
923     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
924         syncTextTrackBounds();
925         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
926     }
927 #if ENABLE(IOS_AIRPLAY)
928     updateDisableExternalPlayback();
929 #endif
930 }
931
932 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenFrame(FloatRect frame)
933 {
934     m_videoFullscreenFrame = frame;
935     if (!m_videoFullscreenLayer)
936         return;
937
938     if (m_videoLayer)
939         [m_videoLayer setFrame:CGRectMake(0, 0, frame.width(), frame.height())];
940
941     syncTextTrackBounds();
942 }
943
944 void MediaPlayerPrivateAVFoundationObjC::setVideoFullscreenGravity(MediaPlayer::VideoGravity gravity)
945 {
946     m_videoFullscreenGravity = gravity;
947     if (!m_videoLayer)
948         return;
949
950     NSString *videoGravity = AVLayerVideoGravityResizeAspect;
951     if (gravity == MediaPlayer::VideoGravityResize)
952         videoGravity = AVLayerVideoGravityResize;
953     else if (gravity == MediaPlayer::VideoGravityResizeAspect)
954         videoGravity = AVLayerVideoGravityResizeAspect;
955     else if (gravity == MediaPlayer::VideoGravityResizeAspectFill)
956         videoGravity = AVLayerVideoGravityResizeAspectFill;
957     else
958         ASSERT_NOT_REACHED();
959
960     [m_videoLayer setVideoGravity:videoGravity];
961 }
962
963 NSArray *MediaPlayerPrivateAVFoundationObjC::timedMetadata() const
964 {
965     if (m_currentMetaData)
966         return m_currentMetaData.get();
967     return nil;
968 }
969
970 String MediaPlayerPrivateAVFoundationObjC::accessLog() const
971 {
972     if (!m_avPlayerItem)
973         return emptyString();
974     
975     AVPlayerItemAccessLog *log = [m_avPlayerItem.get() accessLog];
976     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
977
978     return logString.get();
979 }
980
981 String MediaPlayerPrivateAVFoundationObjC::errorLog() const
982 {
983     if (!m_avPlayerItem)
984         return emptyString();
985
986     AVPlayerItemErrorLog *log = [m_avPlayerItem.get() errorLog];
987     RetainPtr<NSString> logString = adoptNS([[NSString alloc] initWithData:[log extendedLogData] encoding:[log extendedLogDataStringEncoding]]);
988
989     return logString.get();
990 }
991 #endif
992
993 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
994 {
995     [CATransaction begin];
996     [CATransaction setDisableActions:YES];    
997     if (m_videoLayer)
998         [m_videoLayer.get() setHidden:!isVisible];
999     [CATransaction commit];
1000 }
1001     
1002 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
1003 {
1004     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
1005     if (!metaDataAvailable())
1006         return;
1007
1008     setDelayCallbacks(true);
1009     m_cachedRate = requestedRate();
1010     [m_avPlayer.get() setRate:requestedRate()];
1011     setDelayCallbacks(false);
1012 }
1013
1014 void MediaPlayerPrivateAVFoundationObjC::platformPause()
1015 {
1016     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
1017     if (!metaDataAvailable())
1018         return;
1019
1020     setDelayCallbacks(true);
1021     m_cachedRate = 0;
1022     [m_avPlayer.get() setRate:0];
1023     setDelayCallbacks(false);
1024 }
1025
1026 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
1027 {
1028     // Do not ask the asset for duration before it has been loaded or it will fetch the
1029     // answer synchronously.
1030     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
1031          return MediaPlayer::invalidTime();
1032     
1033     CMTime cmDuration;
1034     
1035     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
1036     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
1037         cmDuration = [m_avPlayerItem.get() duration];
1038     else
1039         cmDuration= [m_avAsset.get() duration];
1040
1041     if (CMTIME_IS_NUMERIC(cmDuration))
1042         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
1043
1044     if (CMTIME_IS_INDEFINITE(cmDuration)) {
1045         return std::numeric_limits<float>::infinity();
1046     }
1047
1048     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
1049     return MediaPlayer::invalidTime();
1050 }
1051
1052 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
1053 {
1054     if (!metaDataAvailable() || !m_avPlayerItem)
1055         return 0;
1056
1057     CMTime itemTime = [m_avPlayerItem.get() currentTime];
1058     if (CMTIME_IS_NUMERIC(itemTime))
1059         return std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
1060
1061     return 0;
1062 }
1063
1064 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negativeTolerance, double positiveTolerance)
1065 {
1066     // setCurrentTime generates several event callbacks, update afterwards.
1067     setDelayCallbacks(true);
1068
1069     if (m_metadataTrack)
1070         m_metadataTrack->flushPartialCues();
1071
1072     CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
1073     CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
1074     CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
1075
1076     auto weakThis = createWeakPtr();
1077
1078     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
1079         callOnMainThread([weakThis, finished] {
1080             auto _this = weakThis.get();
1081             if (!_this)
1082                 return;
1083
1084             _this->seekCompleted(finished);
1085         });
1086     }];
1087
1088     setDelayCallbacks(false);
1089 }
1090
1091 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
1092 {
1093     if (!metaDataAvailable())
1094         return;
1095
1096     [m_avPlayer.get() setVolume:volume];
1097 }
1098
1099 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
1100 {
1101     UNUSED_PARAM(closedCaptionsVisible);
1102
1103     if (!metaDataAvailable())
1104         return;
1105
1106     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
1107 }
1108
1109 void MediaPlayerPrivateAVFoundationObjC::updateRate()
1110 {
1111     setDelayCallbacks(true);
1112     m_cachedRate = requestedRate();
1113     [m_avPlayer.get() setRate:requestedRate()];
1114     setDelayCallbacks(false);
1115 }
1116
1117 float MediaPlayerPrivateAVFoundationObjC::rate() const
1118 {
1119     if (!metaDataAvailable())
1120         return 0;
1121
1122     return m_cachedRate;
1123 }
1124
1125 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
1126 {
1127     auto timeRanges = PlatformTimeRanges::create();
1128
1129     if (!m_avPlayerItem)
1130         return timeRanges;
1131
1132     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1133         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1134         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange))
1135             timeRanges->add(toMediaTime(timeRange.start), toMediaTime(CMTimeRangeGetEnd(timeRange)));
1136     }
1137     return timeRanges;
1138 }
1139
1140 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
1141 {
1142     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
1143         return 0;
1144
1145     double minTimeSeekable = std::numeric_limits<double>::infinity();
1146     bool hasValidRange = false;
1147     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1148         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1149         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1150             continue;
1151
1152         hasValidRange = true;
1153         double startOfRange = CMTimeGetSeconds(timeRange.start);
1154         if (minTimeSeekable > startOfRange)
1155             minTimeSeekable = startOfRange;
1156     }
1157     return hasValidRange ? minTimeSeekable : 0;
1158 }
1159
1160 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
1161 {
1162     if (!m_cachedSeekableRanges)
1163         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
1164
1165     double maxTimeSeekable = 0;
1166     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
1167         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1168         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1169             continue;
1170         
1171         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
1172         if (maxTimeSeekable < endOfRange)
1173             maxTimeSeekable = endOfRange;
1174     }
1175     return maxTimeSeekable;
1176 }
1177
1178 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
1179 {
1180 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
1181     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
1182     // when loadedTimeRanges changes when there is no video output. In that case
1183     // update the cached value explicitly.
1184     if (!hasLayerRenderer() && !hasContextRenderer())
1185         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
1186 #endif
1187
1188     if (!m_cachedLoadedRanges)
1189         return 0;
1190
1191     float maxTimeLoaded = 0;
1192     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
1193         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
1194         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
1195             continue;
1196         
1197         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
1198         if (maxTimeLoaded < endOfRange)
1199             maxTimeLoaded = endOfRange;
1200     }
1201
1202     return maxTimeLoaded;   
1203 }
1204
1205 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
1206 {
1207     if (!metaDataAvailable())
1208         return 0;
1209
1210     if (m_cachedTotalBytes)
1211         return m_cachedTotalBytes;
1212
1213     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
1214         m_cachedTotalBytes += [[thisTrack assetTrack] totalSampleDataLength];
1215
1216     return m_cachedTotalBytes;
1217 }
1218
1219 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
1220 {
1221     m_avAsset = asset;
1222 }
1223
1224 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
1225 {
1226     if (!m_avAsset)
1227         return MediaPlayerAVAssetStatusDoesNotExist;
1228
1229     for (NSString *keyName in assetMetadataKeyNames()) {
1230         NSError *error = nil;
1231         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:&error];
1232 #if !LOG_DISABLED
1233         if (error)
1234             LOG(Media, "MediaPlayerPrivateAVFoundation::assetStatus - statusOfValueForKey failed for %s, error = %s", [keyName UTF8String], [[error localizedDescription] UTF8String]);
1235 #endif
1236
1237         if (keyStatus < AVKeyValueStatusLoaded)
1238             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
1239         
1240         if (keyStatus == AVKeyValueStatusFailed)
1241             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
1242
1243         if (keyStatus == AVKeyValueStatusCancelled)
1244             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
1245     }
1246
1247     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
1248         return MediaPlayerAVAssetStatusPlayable;
1249
1250     return MediaPlayerAVAssetStatusLoaded;
1251 }
1252
1253 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
1254 {
1255     if (!metaDataAvailable() || context->paintingDisabled())
1256         return;
1257
1258     setDelayCallbacks(true);
1259     BEGIN_BLOCK_OBJC_EXCEPTIONS;
1260
1261 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1262     if (videoOutputHasAvailableFrame())
1263         paintWithVideoOutput(context, rect);
1264     else
1265 #endif
1266         paintWithImageGenerator(context, rect);
1267
1268     END_BLOCK_OBJC_EXCEPTIONS;
1269     setDelayCallbacks(false);
1270
1271     m_videoFrameHasDrawn = true;
1272 }
1273
1274 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
1275 {
1276     if (!metaDataAvailable() || context->paintingDisabled())
1277         return;
1278
1279     // We can ignore the request if we are already rendering to a layer.
1280     if (currentRenderingMode() == MediaRenderingToLayer)
1281         return;
1282
1283     // paint() is best effort, so only paint if we already have an image generator or video output available.
1284     if (!hasContextRenderer())
1285         return;
1286
1287     paintCurrentFrameInContext(context, rect);
1288 }
1289
1290 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1291 {
1292     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1293     if (image) {
1294         GraphicsContextStateSaver stateSaver(*context);
1295         context->translate(rect.x(), rect.y() + rect.height());
1296         context->scale(FloatSize(1.0f, -1.0f));
1297         context->setImageInterpolationQuality(InterpolationLow);
1298         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1299         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1300         image = 0;
1301     }
1302 }
1303
1304 static HashSet<String> mimeTypeCache()
1305 {
1306     DEPRECATED_DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1307     static bool typeListInitialized = false;
1308
1309     if (typeListInitialized)
1310         return cache;
1311     typeListInitialized = true;
1312
1313     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1314     for (NSString *mimeType in types)
1315         cache.add(mimeType);
1316
1317     return cache;
1318
1319
1320 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1321 {
1322     if (!m_imageGenerator)
1323         createImageGenerator();
1324     ASSERT(m_imageGenerator);
1325
1326 #if !LOG_DISABLED
1327     double start = monotonicallyIncreasingTime();
1328 #endif
1329
1330     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1331     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1332     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1333
1334 #if !LOG_DISABLED
1335     double duration = monotonicallyIncreasingTime() - start;
1336     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1337 #endif
1338
1339     return image;
1340 }
1341
1342 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1343 {
1344     supportedTypes = mimeTypeCache();
1345
1346
1347 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1348 static bool keySystemIsSupported(const String& keySystem)
1349 {
1350     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1351         return true;
1352     return false;
1353 }
1354 #endif
1355
1356 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1357 {
1358 #if ENABLE(ENCRYPTED_MEDIA)
1359     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1360     // In addition to the steps in the current specification, this method must run the following steps:
1361
1362     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1363     //    If keySystem is null, continue to the next step.
1364     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1365         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1366         if (!keySystemIsSupported(parameters.keySystem))
1367             return MediaPlayer::IsNotSupported;
1368
1369         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1370         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1371     }
1372
1373     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1374 #endif
1375
1376 #if ENABLE(MEDIA_SOURCE)
1377     if (parameters.isMediaSource)
1378         return MediaPlayer::IsNotSupported;
1379 #endif
1380
1381     if (!mimeTypeCache().contains(parameters.type))
1382         return MediaPlayer::IsNotSupported;
1383
1384     // The spec says:
1385     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1386     if (parameters.codecs.isEmpty())
1387         return MediaPlayer::MayBeSupported;
1388
1389     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1390     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1391 }
1392
1393 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1394 {
1395 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1396     if (!keySystem.isEmpty()) {
1397         if (!keySystemIsSupported(keySystem))
1398             return false;
1399
1400         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1401             return false;
1402
1403         return true;
1404     }
1405 #else
1406     UNUSED_PARAM(keySystem);
1407     UNUSED_PARAM(mimeType);
1408 #endif
1409     return false;
1410 }
1411
1412 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1413 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1414 {
1415     String scheme = [[[avRequest request] URL] scheme];
1416     String keyURI = [[[avRequest request] URL] absoluteString];
1417
1418 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1419     if (scheme == "skd") {
1420         // Create an initData with the following layout:
1421         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1422         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1423         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1424         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1425         initDataView->set<uint32_t>(0, keyURISize, true);
1426
1427         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1428         keyURIArray->setRange(StringView(keyURI).upconvertedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1429
1430 #if ENABLE(ENCRYPTED_MEDIA)
1431         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1432 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1433         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1434         if (!player()->keyNeeded(initData.get()))
1435 #endif
1436             return false;
1437
1438         m_keyURIToRequestMap.set(keyURI, avRequest);
1439         return true;
1440     }
1441 #endif
1442
1443     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1444     m_resourceLoaderMap.add(avRequest, resourceLoader);
1445     resourceLoader->startLoading();
1446     return true;
1447 }
1448
1449 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1450 {
1451 #if USE(CFNETWORK)
1452     UNUSED_PARAM(nsChallenge);
1453     // FIXME: <rdar://problem/15799844>
1454     return false;
1455 #else
1456     AuthenticationChallenge challenge(nsChallenge);
1457
1458     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1459 #endif
1460 }
1461
1462 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1463 {
1464     String scheme = [[[avRequest request] URL] scheme];
1465
1466     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1467
1468     if (resourceLoader)
1469         resourceLoader->stopLoading();
1470 }
1471
1472 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1473 {
1474     m_resourceLoaderMap.remove(avRequest);
1475 }
1476 #endif
1477
1478 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1479 {
1480     return AVFoundationLibrary() && CoreMediaLibrary();
1481 }
1482
1483 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
1484 {
1485     if (!metaDataAvailable())
1486         return timeValue;
1487
1488     // FIXME - impossible to implement until rdar://8721510 is fixed.
1489     return timeValue;
1490 }
1491
1492 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1493 {
1494     if (!m_videoLayer)
1495         return;
1496
1497     [CATransaction begin];
1498     [CATransaction setDisableActions:YES];    
1499     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1500     [m_videoLayer.get() setVideoGravity:gravity];
1501     [CATransaction commit];
1502 }
1503
1504 static AVAssetTrack* firstEnabledTrack(NSArray* tracks)
1505 {
1506     NSUInteger index = [tracks indexOfObjectPassingTest:^(id obj, NSUInteger, BOOL *) {
1507         return [static_cast<AVAssetTrack*>(obj) isEnabled];
1508     }];
1509     if (index == NSNotFound)
1510         return nil;
1511     return [tracks objectAtIndex:index];
1512 }
1513
1514 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1515 {
1516     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1517     m_languageOfPrimaryAudioTrack = String();
1518
1519     if (!m_avAsset)
1520         return;
1521
1522     setDelayCharacteristicsChangedNotification(true);
1523
1524     bool haveCCTrack = false;
1525     bool hasCaptions = false;
1526
1527     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1528     // asked about those fairly fequently.
1529     if (!m_avPlayerItem) {
1530         // We don't have a player item yet, so check with the asset because some assets support inspection
1531         // prior to becoming ready to play.
1532         AVAssetTrack* firstEnabledVideoTrack = firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual]);
1533         setHasVideo(firstEnabledVideoTrack);
1534         setHasAudio(firstEnabledTrack([m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible]));
1535 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1536         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1537 #endif
1538
1539         presentationSizeDidChange(firstEnabledVideoTrack ? IntSize(CGSizeApplyAffineTransform([firstEnabledVideoTrack naturalSize], [firstEnabledVideoTrack preferredTransform])) : IntSize());
1540     } else {
1541         bool hasVideo = false;
1542         bool hasAudio = false;
1543         bool hasMetaData = false;
1544         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1545             if ([track isEnabled]) {
1546                 AVAssetTrack *assetTrack = [track assetTrack];
1547                 NSString *mediaType = [assetTrack mediaType];
1548                 if ([mediaType isEqualToString:AVMediaTypeVideo])
1549                     hasVideo = true;
1550                 else if ([mediaType isEqualToString:AVMediaTypeAudio])
1551                     hasAudio = true;
1552                 else if ([mediaType isEqualToString:AVMediaTypeClosedCaption]) {
1553 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1554                     hasCaptions = true;
1555 #endif
1556                     haveCCTrack = true;
1557                 } else if ([mediaType isEqualToString:AVMediaTypeMetadata]) {
1558                     hasMetaData = true;
1559                 }
1560             }
1561         }
1562
1563         // Always says we have video if the AVPlayerLayer is ready for diaplay to work around
1564         // an AVFoundation bug which causes it to sometimes claim a track is disabled even
1565         // when it is not.
1566         setHasVideo(hasVideo || m_cachedIsReadyForDisplay);
1567
1568         setHasAudio(hasAudio);
1569 #if ENABLE(DATACUE_VALUE)
1570         if (hasMetaData)
1571             processMetadataTrack();
1572 #endif
1573
1574 #if ENABLE(VIDEO_TRACK)
1575         updateAudioTracks();
1576         updateVideoTracks();
1577 #endif
1578     }
1579
1580 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1581     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1582     if (legibleGroup && m_cachedTracks) {
1583         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1584         if (hasCaptions)
1585             processMediaSelectionOptions();
1586     }
1587 #endif
1588
1589 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1590     if (!hasCaptions && haveCCTrack)
1591         processLegacyClosedCaptionsTracks();
1592 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1593     if (haveCCTrack)
1594         processLegacyClosedCaptionsTracks();
1595 #endif
1596
1597     setHasClosedCaptions(hasCaptions);
1598
1599     LOG(Media, "MediaPlayerPrivateAVFoundation:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1600         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1601
1602     sizeChanged();
1603
1604     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1605         characteristicsChanged();
1606
1607     setDelayCharacteristicsChangedNotification(false);
1608 }
1609
1610 #if ENABLE(VIDEO_TRACK)
1611 template <typename RefT, typename PassRefT>
1612 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1613 {
1614     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1615         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1616     }]]]);
1617     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1618
1619     typedef Vector<RefT> ItemVector;
1620     for (auto i = oldItems.begin(); i != oldItems.end(); ++i)
1621         [oldTracks addObject:(*i)->playerItemTrack()];
1622
1623     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1624     [removedTracks minusSet:newTracks.get()];
1625
1626     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1627     [addedTracks minusSet:oldTracks.get()];
1628
1629     ItemVector replacementItems;
1630     ItemVector addedItems;
1631     ItemVector removedItems;
1632     for (auto i = oldItems.begin(); i != oldItems.end(); ++i) {
1633         if ([removedTracks containsObject:(*i)->playerItemTrack()])
1634             removedItems.append(*i);
1635         else
1636             replacementItems.append(*i);
1637     }
1638
1639     for (AVPlayerItemTrack* track in addedTracks.get())
1640         addedItems.append(itemFactory(track));
1641
1642     replacementItems.appendVector(addedItems);
1643     oldItems.swap(replacementItems);
1644
1645     for (auto i = removedItems.begin(); i != removedItems.end(); ++i)
1646         (player->*removedFunction)(*i);
1647
1648     for (auto i = addedItems.begin(); i != addedItems.end(); ++i)
1649         (player->*addedFunction)(*i);
1650 }
1651
1652 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1653 {
1654 #if !LOG_DISABLED
1655     size_t count = m_audioTracks.size();
1656 #endif
1657
1658     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1659
1660 #if !LOG_DISABLED
1661     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateAudioTracks(%p) - audio track count was %lu, is %lu", this, count, m_audioTracks.size());
1662 #endif
1663 }
1664
1665 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1666 {
1667 #if !LOG_DISABLED
1668     size_t count = m_videoTracks.size();
1669 #endif
1670
1671     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1672
1673 #if !LOG_DISABLED
1674     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::updateVideoTracks(%p) - video track count was %lu, is %lu", this, count, m_videoTracks.size());
1675 #endif
1676 }
1677
1678 bool MediaPlayerPrivateAVFoundationObjC::requiresTextTrackRepresentation() const
1679 {
1680 #if PLATFORM(IOS)
1681     if (m_videoFullscreenLayer)
1682         return true;
1683 #endif
1684     return false;
1685 }
1686
1687 void MediaPlayerPrivateAVFoundationObjC::syncTextTrackBounds()
1688 {
1689 #if PLATFORM(IOS)
1690     if (!m_videoFullscreenLayer || !m_textTrackRepresentationLayer)
1691         return;
1692     
1693     CGRect textFrame = m_videoLayer ? [m_videoLayer videoRect] : CGRectMake(0, 0, m_videoFullscreenFrame.width(), m_videoFullscreenFrame.height());
1694     [m_textTrackRepresentationLayer setFrame:textFrame];
1695 #endif
1696 }
1697
1698 void MediaPlayerPrivateAVFoundationObjC::setTextTrackRepresentation(TextTrackRepresentation* representation)
1699 {
1700 #if PLATFORM(IOS)
1701     PlatformLayer* representationLayer = representation ? representation->platformLayer() : nil;
1702     if (representationLayer == m_textTrackRepresentationLayer) {
1703         syncTextTrackBounds();
1704         return;
1705     }
1706
1707     if (m_textTrackRepresentationLayer)
1708         [m_textTrackRepresentationLayer removeFromSuperlayer];
1709
1710     m_textTrackRepresentationLayer = representationLayer;
1711
1712     if (m_videoFullscreenLayer && m_textTrackRepresentationLayer) {
1713         syncTextTrackBounds();
1714         [m_videoFullscreenLayer addSublayer:m_textTrackRepresentationLayer.get()];
1715     }
1716
1717 #else
1718     UNUSED_PARAM(representation);
1719 #endif
1720 }
1721 #endif // ENABLE(VIDEO_TRACK)
1722
1723 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1724 {
1725     if (!m_avAsset)
1726         return;
1727
1728     setNaturalSize(roundedIntSize(m_cachedPresentationSize));
1729 }
1730     
1731 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1732 {
1733     if (!m_avAsset)
1734         return false;
1735     
1736     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL([m_avAsset resolvedURL]));
1737     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1738     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1739 }
1740
1741 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1742 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1743 {
1744     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1745
1746     if (!m_avPlayerItem || m_videoOutput)
1747         return;
1748
1749 #if USE(VIDEOTOOLBOX)
1750     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1751 #else
1752     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
1753                                 nil];
1754 #endif
1755     m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
1756     ASSERT(m_videoOutput);
1757
1758     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
1759
1760     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1761
1762     waitForVideoOutputMediaDataWillChange();
1763
1764     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1765 }
1766
1767 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1768 {
1769     if (!m_videoOutput)
1770         return;
1771
1772     if (m_avPlayerItem)
1773         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1774     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1775
1776     m_videoOutput = 0;
1777 }
1778
1779 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1780 {
1781     if (!m_videoOutput)
1782         createVideoOutput();
1783     ASSERT(m_videoOutput);
1784
1785 #if !LOG_DISABLED
1786     double start = monotonicallyIncreasingTime();
1787 #endif
1788
1789     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1790
1791     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1792         return 0;
1793
1794     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1795     if (!buffer)
1796         return 0;
1797
1798 #if USE(VIDEOTOOLBOX)
1799     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1800     if (!m_pixelTransferSession) {
1801         VTPixelTransferSessionRef session = 0;
1802         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1803         m_pixelTransferSession = adoptCF(session);
1804     }
1805
1806     CVPixelBufferRef outputBuffer;
1807     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
1808     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1809     buffer = adoptCF(outputBuffer);
1810 #endif
1811
1812 #if !LOG_DISABLED
1813     double duration = monotonicallyIncreasingTime() - start;
1814     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1815 #endif
1816
1817     return buffer;
1818 }
1819
1820 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
1821 {
1822     if (!m_avPlayerItem)
1823         return false;
1824
1825     if (m_lastImage)
1826         return true;
1827
1828     if (!m_videoOutput)
1829         createVideoOutput();
1830
1831     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
1832 }
1833
1834 static const void* CVPixelBufferGetBytePointerCallback(void* info)
1835 {
1836     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1837     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1838     return CVPixelBufferGetBaseAddress(pixelBuffer);
1839 }
1840
1841 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
1842 {
1843     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1844     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1845 }
1846
1847 static void CVPixelBufferReleaseInfoCallback(void* info)
1848 {
1849     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1850     CFRelease(pixelBuffer);
1851 }
1852
1853 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
1854 {
1855     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
1856     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
1857
1858     size_t width = CVPixelBufferGetWidth(pixelBuffer);
1859     size_t height = CVPixelBufferGetHeight(pixelBuffer);
1860     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
1861     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
1862     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
1863
1864     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
1865     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
1866     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
1867
1868     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
1869 }
1870
1871 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
1872 {
1873     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1874
1875     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1876     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1877     // should be displayed.
1878     if (pixelBuffer)
1879         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
1880 }
1881
1882 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
1883 {
1884     updateLastImage();
1885
1886     if (m_lastImage) {
1887         GraphicsContextStateSaver stateSaver(*context);
1888
1889         IntRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
1890
1891         context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, outputRect, imageRect);
1892
1893         // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
1894         // video frame, destroy it now that it is no longer needed.
1895         if (m_imageGenerator)
1896             destroyImageGenerator();
1897     }
1898 }
1899
1900 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
1901 {
1902     updateLastImage();
1903     return m_lastImage.get();
1904 }
1905
1906 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
1907 {
1908     if (!m_videoOutputSemaphore)
1909         m_videoOutputSemaphore = dispatch_semaphore_create(0);
1910
1911     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
1912
1913     // Wait for 1 second.
1914     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
1915
1916     if (result)
1917         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
1918 }
1919
1920 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
1921 {
1922     dispatch_semaphore_signal(m_videoOutputSemaphore);
1923 }
1924 #endif
1925
1926 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1927 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1928 {
1929     // initData should have the following layout:
1930     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1931     if (initData->byteLength() < 4)
1932         return false;
1933
1934     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1935
1936     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1937     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1938     uint32_t offset = 0;
1939     bool status = true;
1940
1941     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1942     offset += 4;
1943     if (!status || offset + keyURILength > initData->length())
1944         return false;
1945
1946     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1947     if (!keyURIArray)
1948         return false;
1949
1950     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1951     offset += keyURILength;
1952
1953     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1954     offset += 4;
1955     if (!status || offset + keyIDLength > initData->length())
1956         return false;
1957
1958     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1959     if (!keyIDArray)
1960         return false;
1961
1962     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1963     offset += keyIDLength;
1964
1965     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1966     offset += 4;
1967     if (!status || offset + certificateLength > initData->length())
1968         return false;
1969
1970     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1971     if (!certificate)
1972         return false;
1973
1974     return true;
1975 }
1976 #endif
1977
1978 #if ENABLE(ENCRYPTED_MEDIA)
1979 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1980 {
1981     if (!keySystemIsSupported(keySystem))
1982         return MediaPlayer::KeySystemNotSupported;
1983
1984     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1985     String keyURI;
1986     String keyID;
1987     RefPtr<Uint8Array> certificate;
1988     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1989         return MediaPlayer::InvalidPlayerState;
1990
1991     if (!m_keyURIToRequestMap.contains(keyURI))
1992         return MediaPlayer::InvalidPlayerState;
1993
1994     String sessionID = createCanonicalUUIDString();
1995
1996     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1997
1998     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1999     NSString* assetStr = keyID;
2000     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
2001     NSError* error = 0;
2002     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
2003
2004     if (!keyRequest) {
2005         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
2006         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
2007         return MediaPlayer::NoError;
2008     }
2009
2010     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
2011     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
2012     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
2013
2014     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
2015     m_sessionIDToRequestMap.set(sessionID, avRequest);
2016     m_keyURIToRequestMap.remove(keyURI);
2017
2018     return MediaPlayer::NoError;
2019 }
2020
2021 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
2022 {
2023     if (!keySystemIsSupported(keySystem))
2024         return MediaPlayer::KeySystemNotSupported;
2025
2026     if (!m_sessionIDToRequestMap.contains(sessionID))
2027         return MediaPlayer::InvalidPlayerState;
2028
2029     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
2030     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
2031     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
2032     [avRequest.get() finishLoading];
2033     m_sessionIDToRequestMap.remove(sessionID);
2034
2035     player()->keyAdded(keySystem, sessionID);
2036
2037     UNUSED_PARAM(initDataPtr);
2038     UNUSED_PARAM(initDataLength);
2039     return MediaPlayer::NoError;
2040 }
2041
2042 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
2043 {
2044     if (!keySystemIsSupported(keySystem))
2045         return MediaPlayer::KeySystemNotSupported;
2046
2047     if (!m_sessionIDToRequestMap.contains(sessionID))
2048         return MediaPlayer::InvalidPlayerState;
2049
2050     m_sessionIDToRequestMap.remove(sessionID);
2051     return MediaPlayer::NoError;
2052 }
2053 #endif
2054
2055 #if ENABLE(ENCRYPTED_MEDIA_V2)
2056 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
2057 {
2058     return m_keyURIToRequestMap.take(keyURI);
2059 }
2060
2061 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
2062 {
2063     if (!keySystemIsSupported(keySystem))
2064         return nullptr;
2065
2066     return std::make_unique<CDMSessionAVFoundationObjC>(this);
2067 }
2068 #endif
2069
2070 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
2071 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
2072 {
2073 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2074     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2075 #endif
2076
2077     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2078     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
2079
2080         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
2081         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
2082             continue;
2083
2084         bool newCCTrack = true;
2085         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2086             if (removedTextTracks[i - 1]->textTrackCategory() != InbandTextTrackPrivateAVF::LegacyClosedCaption)
2087                 continue;
2088
2089             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
2090             if (track->avPlayerItemTrack() == playerItemTrack) {
2091                 removedTextTracks.remove(i - 1);
2092                 newCCTrack = false;
2093                 break;
2094             }
2095         }
2096
2097         if (!newCCTrack)
2098             continue;
2099         
2100         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
2101     }
2102
2103     processNewAndRemovedTextTracks(removedTextTracks);
2104 }
2105 #endif
2106
2107 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2108 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
2109 {
2110     if (!m_avAsset)
2111         return nil;
2112     
2113     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
2114         return nil;
2115     
2116     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
2117 }
2118
2119 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
2120 {
2121     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
2122     if (!legibleGroup) {
2123         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
2124         return;
2125     }
2126
2127     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
2128     // but set the selected legible track to nil so text tracks will not be automatically configured.
2129     if (!m_textTracks.size())
2130         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2131
2132     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
2133     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
2134     for (AVMediaSelectionOptionType *option in legibleOptions) {
2135         bool newTrack = true;
2136         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
2137             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2138                 continue;
2139             
2140             RetainPtr<AVMediaSelectionOptionType> currentOption;
2141 #if ENABLE(AVF_CAPTIONS)
2142             if (removedTextTracks[i - 1]->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand) {
2143                 RefPtr<OutOfBandTextTrackPrivateAVF> track = static_cast<OutOfBandTextTrackPrivateAVF*>(removedTextTracks[i - 1].get());
2144                 currentOption = track->mediaSelectionOption();
2145             } else
2146 #endif
2147             {
2148                 RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
2149                 currentOption = track->mediaSelectionOption();
2150             }
2151             
2152             if ([currentOption.get() isEqual:option]) {
2153                 removedTextTracks.remove(i - 1);
2154                 newTrack = false;
2155                 break;
2156             }
2157         }
2158         if (!newTrack)
2159             continue;
2160
2161 #if ENABLE(AVF_CAPTIONS)
2162         if ([option outOfBandSource]) {
2163             m_textTracks.append(OutOfBandTextTrackPrivateAVF::create(this, option));
2164             m_textTracks.last()->setHasBeenReported(true); // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
2165             continue;
2166         }
2167 #endif
2168
2169         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option, InbandTextTrackPrivate::Generic));
2170     }
2171
2172     processNewAndRemovedTextTracks(removedTextTracks);
2173 }
2174
2175 void MediaPlayerPrivateAVFoundationObjC::processMetadataTrack()
2176 {
2177     if (m_metadataTrack)
2178         return;
2179
2180     m_metadataTrack = InbandMetadataTextTrackPrivateAVF::create(InbandTextTrackPrivate::Metadata, InbandTextTrackPrivate::Data);
2181     m_metadataTrack->setInBandMetadataTrackDispatchType("com.apple.streaming");
2182     player()->addTextTrack(m_metadataTrack);
2183 }
2184
2185 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, NSArray *nativeSamples, double time)
2186 {
2187     if (!m_currentTextTrack)
2188         return;
2189
2190     m_currentTextTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), reinterpret_cast<CFArrayRef>(nativeSamples), time);
2191 }
2192
2193 void MediaPlayerPrivateAVFoundationObjC::flushCues()
2194 {
2195     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
2196
2197     if (!m_currentTextTrack)
2198         return;
2199     
2200     m_currentTextTrack->resetCueValues();
2201 }
2202 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2203
2204 void MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(InbandTextTrackPrivateAVF *track)
2205 {
2206     if (m_currentTextTrack == track)
2207         return;
2208
2209     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTextTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
2210         
2211     m_currentTextTrack = track;
2212
2213     if (track) {
2214         if (track->textTrackCategory() == InbandTextTrackPrivateAVF::LegacyClosedCaption)
2215             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
2216 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2217 #if ENABLE(AVF_CAPTIONS)
2218         else if (track->textTrackCategory() == InbandTextTrackPrivateAVF::OutOfBand)
2219             [m_avPlayerItem.get() selectMediaOption:static_cast<OutOfBandTextTrackPrivateAVF*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2220 #endif
2221         else
2222             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2223 #endif
2224     } else {
2225 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2226         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
2227 #endif
2228         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
2229     }
2230
2231 }
2232
2233 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
2234 {
2235     if (!m_languageOfPrimaryAudioTrack.isNull())
2236         return m_languageOfPrimaryAudioTrack;
2237
2238     if (!m_avPlayerItem.get())
2239         return emptyString();
2240
2241 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2242     // If AVFoundation has an audible group, return the language of the currently selected audible option.
2243     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
2244     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
2245     if (currentlySelectedAudibleOption) {
2246         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
2247         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2248
2249         return m_languageOfPrimaryAudioTrack;
2250     }
2251 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2252
2253     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
2254     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
2255     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
2256     if (!tracks || [tracks count] != 1) {
2257         m_languageOfPrimaryAudioTrack = emptyString();
2258         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
2259         return m_languageOfPrimaryAudioTrack;
2260     }
2261
2262     AVAssetTrack *track = [tracks objectAtIndex:0];
2263     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
2264
2265 #if !LOG_DISABLED
2266     if (m_languageOfPrimaryAudioTrack == emptyString())
2267         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
2268     else
2269         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
2270 #endif
2271
2272     return m_languageOfPrimaryAudioTrack;
2273 }
2274
2275 #if ENABLE(IOS_AIRPLAY)
2276 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
2277 {
2278     if (!m_avPlayer)
2279         return false;
2280
2281     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
2282     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
2283     return wirelessTarget;
2284 }
2285
2286 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
2287 {
2288     if (!m_avPlayer)
2289         return MediaPlayer::TargetTypeNone;
2290
2291     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
2292     case wkExternalPlaybackTypeNone:
2293         return MediaPlayer::TargetTypeNone;
2294     case wkExternalPlaybackTypeAirPlay:
2295         return MediaPlayer::TargetTypeAirPlay;
2296     case wkExternalPlaybackTypeTVOut:
2297         return MediaPlayer::TargetTypeTVOut;
2298     }
2299
2300     ASSERT_NOT_REACHED();
2301     return MediaPlayer::TargetTypeNone;
2302 }
2303
2304 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
2305 {
2306     if (!m_avPlayer)
2307         return emptyString();
2308     
2309     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
2310     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
2311
2312     return wirelessTargetName;
2313 }
2314
2315 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
2316 {
2317     if (!m_avPlayer)
2318         return !m_allowsWirelessVideoPlayback;
2319     
2320     m_allowsWirelessVideoPlayback = [m_avPlayer.get() allowsExternalPlayback];
2321     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
2322
2323     return !m_allowsWirelessVideoPlayback;
2324 }
2325
2326 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
2327 {
2328     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
2329     m_allowsWirelessVideoPlayback = !disabled;
2330     if (!m_avPlayer)
2331         return;
2332     
2333     [m_avPlayer.get() setAllowsExternalPlayback:!disabled];
2334 }
2335
2336 void MediaPlayerPrivateAVFoundationObjC::updateDisableExternalPlayback()
2337 {
2338     if (!m_avPlayer)
2339         return;
2340
2341     [m_avPlayer setUsesExternalPlaybackWhileExternalScreenIsActive:m_videoFullscreenLayer != nil];
2342 }
2343 #endif
2344
2345 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
2346 {
2347     m_cachedItemStatus = status;
2348
2349     updateStates();
2350 }
2351
2352 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
2353 {
2354     m_pendingStatusChanges++;
2355 }
2356
2357 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
2358 {
2359     m_cachedLikelyToKeepUp = likelyToKeepUp;
2360
2361     ASSERT(m_pendingStatusChanges);
2362     if (!--m_pendingStatusChanges)
2363         updateStates();
2364 }
2365
2366 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
2367 {
2368     m_pendingStatusChanges++;
2369 }
2370
2371 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
2372 {
2373     m_cachedBufferEmpty = bufferEmpty;
2374
2375     ASSERT(m_pendingStatusChanges);
2376     if (!--m_pendingStatusChanges)
2377         updateStates();
2378 }
2379
2380 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2381 {
2382     m_pendingStatusChanges++;
2383 }
2384
2385 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2386 {
2387     m_cachedBufferFull = bufferFull;
2388
2389     ASSERT(m_pendingStatusChanges);
2390     if (!--m_pendingStatusChanges)
2391         updateStates();
2392 }
2393
2394 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2395 {
2396     m_cachedSeekableRanges = seekableRanges;
2397
2398     seekableTimeRangesChanged();
2399     updateStates();
2400 }
2401
2402 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2403 {
2404     m_cachedLoadedRanges = loadedRanges;
2405
2406     loadedTimeRangesChanged();
2407     updateStates();
2408 }
2409
2410 void MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange(bool isReady)
2411 {
2412     m_cachedIsReadyForDisplay = isReady;
2413     if (!hasVideo() && isReady)
2414         tracksChanged();
2415     updateStates();
2416 }
2417
2418 void MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange(bool)
2419 {
2420     tracksChanged();
2421     updateStates();
2422 }
2423
2424 void MediaPlayerPrivateAVFoundationObjC::setShouldBufferData(bool shouldBuffer)
2425 {
2426     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::shouldBufferData(%p) - %s", this, boolString(shouldBuffer));
2427     if (m_shouldBufferData == shouldBuffer)
2428         return;
2429
2430     m_shouldBufferData = shouldBuffer;
2431     
2432     if (!m_avPlayer)
2433         return;
2434
2435     if (m_shouldBufferData)
2436         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
2437     else
2438         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:nil];
2439 }
2440
2441 #if ENABLE(DATACUE_VALUE)
2442 static const AtomicString& metadataType(NSString *avMetadataKeySpace)
2443 {
2444     static NeverDestroyed<const AtomicString> quickTimeUserData("com.apple.quicktime.udta", AtomicString::ConstructFromLiteral);
2445     static NeverDestroyed<const AtomicString> isoUserData("org.mp4ra", AtomicString::ConstructFromLiteral);
2446     static NeverDestroyed<const AtomicString> quickTimeMetadata("com.apple.quicktime.mdta", AtomicString::ConstructFromLiteral);
2447     static NeverDestroyed<const AtomicString> iTunesMetadata("com.apple.itunes", AtomicString::ConstructFromLiteral);
2448     static NeverDestroyed<const AtomicString> id3Metadata("org.id3", AtomicString::ConstructFromLiteral);
2449
2450     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeUserData])
2451         return quickTimeUserData;
2452     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceISOUserData])
2453         return isoUserData;
2454     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceQuickTimeMetadata])
2455         return quickTimeMetadata;
2456     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceiTunes])
2457         return iTunesMetadata;
2458     if ([avMetadataKeySpace isEqualToString:AVMetadataKeySpaceID3])
2459         return id3Metadata;
2460
2461     return emptyAtom;
2462 }
2463 #endif
2464
2465 void MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(RetainPtr<NSArray> metadata, double mediaTime)
2466 {
2467     m_currentMetaData = metadata && ![metadata isKindOfClass:[NSNull class]] ? metadata : nil;
2468
2469     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::metadataDidArrive(%p) - adding %i cues at time %.2f", this, m_currentMetaData ? static_cast<int>([m_currentMetaData.get() count]) : 0, mediaTime);
2470
2471 #if ENABLE(DATACUE_VALUE)
2472     if (seeking())
2473         return;
2474
2475     if (!metadata || [metadata isKindOfClass:[NSNull class]]) {
2476         m_metadataTrack->updatePendingCueEndTimes(mediaTime);
2477         return;
2478     }
2479
2480     if (!m_metadataTrack)
2481         processMetadataTrack();
2482
2483     // Set the duration of all incomplete cues before adding new ones.
2484     double earliesStartTime = std::numeric_limits<double>::infinity();
2485     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2486         double start = CMTimeGetSeconds(item.time);
2487         if (start < earliesStartTime)
2488             earliesStartTime = start;
2489     }
2490     m_metadataTrack->updatePendingCueEndTimes(earliesStartTime);
2491
2492     for (AVMetadataItemType *item in m_currentMetaData.get()) {
2493         double start = CMTimeGetSeconds(item.time);
2494         double end = std::numeric_limits<double>::infinity();
2495         if (CMTIME_IS_VALID(item.duration))
2496             end = start + CMTimeGetSeconds(item.duration);
2497
2498         AtomicString type = nullAtom;
2499         if (item.keySpace)
2500             type = metadataType(item.keySpace);
2501
2502         m_metadataTrack->addDataCue(start, end, SerializedPlatformRepresentationMac::create(item), type);
2503     }
2504 #endif
2505 }
2506
2507 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2508 {
2509     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2510         [track removeObserver:m_objcObserver.get() forKeyPath:@"enabled"];
2511
2512     m_cachedTracks = tracks;
2513     for (AVPlayerItemTrack *track in m_cachedTracks.get())
2514         [track addObserver:m_objcObserver.get() forKeyPath:@"enabled" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayerItemTrack];
2515
2516     m_cachedTotalBytes = 0;
2517
2518     tracksChanged();
2519     updateStates();
2520 }
2521
2522 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2523 {
2524     m_cachedHasEnabledAudio = hasEnabledAudio;
2525
2526     tracksChanged();
2527     updateStates();
2528 }
2529
2530 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2531 {
2532     m_cachedPresentationSize = size;
2533
2534     sizeChanged();
2535     updateStates();
2536 }
2537
2538 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(double duration)
2539 {
2540     m_cachedDuration = duration;
2541
2542     invalidateCachedDuration();
2543 }
2544
2545 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2546 {
2547     m_cachedRate = rate;
2548
2549     updateStates();
2550     rateChanged();
2551 }
2552     
2553 #if ENABLE(IOS_AIRPLAY)
2554 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2555 {
2556     playbackTargetIsWirelessChanged();
2557 }
2558 #endif
2559
2560 NSArray* assetMetadataKeyNames()
2561 {
2562     static NSArray* keys;
2563     if (!keys) {
2564         keys = [[NSArray alloc] initWithObjects:@"duration",
2565                     @"naturalSize",
2566                     @"preferredTransform",
2567                     @"preferredVolume",
2568                     @"preferredRate",
2569                     @"playable",
2570                     @"tracks",
2571                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2572                    nil];
2573     }
2574     return keys;
2575 }
2576
2577 NSArray* itemKVOProperties()
2578 {
2579     static NSArray* keys;
2580     if (!keys) {
2581         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2582                 @"status",
2583                 @"asset",
2584                 @"tracks",
2585                 @"seekableTimeRanges",
2586                 @"loadedTimeRanges",
2587                 @"playbackLikelyToKeepUp",
2588                 @"playbackBufferFull",
2589                 @"playbackBufferEmpty",
2590                 @"duration",
2591                 @"hasEnabledAudio",
2592                 @"timedMetadata",
2593                 nil];
2594     }
2595     return keys;
2596 }
2597
2598 NSArray* assetTrackMetadataKeyNames()
2599 {
2600     static NSArray* keys = [[NSArray alloc] initWithObjects:@"totalSampleDataLength", @"mediaType", @"enabled", @"preferredTransform", @"naturalSize", nil];
2601     return keys;
2602 }
2603
2604 } // namespace WebCore
2605
2606 @implementation WebCoreAVFMovieObserver
2607
2608 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2609 {
2610     self = [super init];
2611     if (!self)
2612         return nil;
2613     m_callback = callback;
2614     return self;
2615 }
2616
2617 - (void)disconnect
2618 {
2619     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2620     m_callback = 0;
2621 }
2622
2623 - (void)metadataLoaded
2624 {
2625     if (!m_callback)
2626         return;
2627
2628     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2629 }
2630
2631 - (void)didEnd:(NSNotification *)unusedNotification
2632 {
2633     UNUSED_PARAM(unusedNotification);
2634     if (!m_callback)
2635         return;
2636     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2637 }
2638
2639 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2640 {
2641     UNUSED_PARAM(object);
2642     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2643
2644     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2645
2646     if (!m_callback)
2647         return;
2648
2649     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2650
2651     WTF::Function<void ()> function;
2652
2653     if (context == MediaPlayerAVFoundationObservationContextAVPlayerLayer) {
2654         if ([keyPath isEqualToString:@"readyForDisplay"])
2655             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::firstFrameAvailableDidChange, m_callback, [newValue boolValue]);
2656     }
2657
2658     if (context == MediaPlayerAVFoundationObservationContextPlayerItemTrack) {
2659         if ([keyPath isEqualToString:@"enabled"])
2660             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::trackEnabledDidChange, m_callback, [newValue boolValue]);
2661     }
2662
2663     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2664         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2665             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2666         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2667             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2668         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2669             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2670     }
2671
2672     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2673         // A value changed for an AVPlayerItem
2674         if ([keyPath isEqualToString:@"status"])
2675             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2676         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2677             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2678         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2679             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2680         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2681             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2682         else if ([keyPath isEqualToString:@"asset"])
2683             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2684         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2685             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2686         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2687             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2688         else if ([keyPath isEqualToString:@"tracks"])
2689             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2690         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2691             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2692         else if ([keyPath isEqualToString:@"presentationSize"])
2693             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2694         else if ([keyPath isEqualToString:@"duration"])
2695             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, CMTimeGetSeconds([newValue CMTimeValue]));
2696         else if ([keyPath isEqualToString:@"timedMetadata"] && newValue) {
2697             double now = 0;
2698             CMTime itemTime = [(AVPlayerItemType *)object currentTime];
2699             if (CMTIME_IS_NUMERIC(itemTime))
2700                 now = std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
2701             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::metadataDidArrive, m_callback, RetainPtr<NSArray>(newValue), now);
2702         }
2703     }
2704
2705     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
2706         // A value changed for an AVPlayer.
2707         if ([keyPath isEqualToString:@"rate"])
2708             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
2709 #if ENABLE(IOS_AIRPLAY)
2710         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
2711             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
2712 #endif
2713     }
2714     
2715     if (function.isNull())
2716         return;
2717
2718     auto weakThis = m_callback->createWeakPtr();
2719     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
2720         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
2721         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
2722         if (!weakThis)
2723             return;
2724         function();
2725     }));
2726 }
2727
2728 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2729 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
2730 {
2731     UNUSED_PARAM(output);
2732     UNUSED_PARAM(nativeSamples);
2733
2734     if (!m_callback)
2735         return;
2736
2737     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2738     RetainPtr<NSArray> strongStrings = strings;
2739     RetainPtr<NSArray> strongSamples = nativeSamples;
2740     callOnMainThread([strongSelf, strongStrings, strongSamples, itemTime] {
2741         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2742         if (!callback)
2743             return;
2744         callback->processCue(strongStrings.get(), strongSamples.get(), CMTimeGetSeconds(itemTime));
2745     });
2746 }
2747
2748 - (void)outputSequenceWasFlushed:(id)output
2749 {
2750     UNUSED_PARAM(output);
2751
2752     if (!m_callback)
2753         return;
2754     
2755     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2756     callOnMainThread([strongSelf] {
2757         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
2758             callback->flushCues();
2759     });
2760 }
2761 #endif
2762
2763 @end
2764
2765 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
2766 @implementation WebCoreAVFLoaderDelegate
2767
2768 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2769 {
2770     self = [super init];
2771     if (!self)
2772         return nil;
2773     m_callback = callback;
2774     return self;
2775 }
2776
2777 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
2778 {
2779     UNUSED_PARAM(resourceLoader);
2780     if (!m_callback)
2781         return NO;
2782
2783     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2784     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2785     callOnMainThread([strongSelf, strongRequest] {
2786         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2787         if (!callback) {
2788             [strongRequest finishLoadingWithError:nil];
2789             return;
2790         }
2791
2792         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
2793             [strongRequest finishLoadingWithError:nil];
2794     });
2795
2796     return YES;
2797 }
2798
2799 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
2800 {
2801     UNUSED_PARAM(resourceLoader);
2802     if (!m_callback)
2803         return NO;
2804
2805     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
2806         return NO;
2807
2808     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2809     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
2810     callOnMainThread([strongSelf, strongChallenge] {
2811         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2812         if (!callback) {
2813             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2814             return;
2815         }
2816
2817         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
2818             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2819     });
2820
2821     return YES;
2822 }
2823
2824 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
2825 {
2826     UNUSED_PARAM(resourceLoader);
2827     if (!m_callback)
2828         return;
2829
2830     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2831     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2832     callOnMainThread([strongSelf, strongRequest] {
2833         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2834         if (callback)
2835             callback->didCancelLoadingRequest(strongRequest.get());
2836     });
2837 }
2838
2839 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2840 {
2841     m_callback = callback;
2842 }
2843 @end
2844 #endif
2845
2846 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2847 @implementation WebCoreAVFPullDelegate
2848 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2849 {
2850     self = [super init];
2851     if (self)
2852         m_callback = callback;
2853     return self;
2854 }
2855
2856 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2857 {
2858     m_callback = callback;
2859 }
2860
2861 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
2862 {
2863     if (m_callback)
2864         m_callback->outputMediaDataWillChange(output);
2865 }
2866
2867 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
2868 {
2869     UNUSED_PARAM(output);
2870     // No-op.
2871 }
2872 @end
2873 #endif
2874
2875 #endif