b1f4a4f3c52fa629390910fa0344c9ef8ed57713
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29
30 #import "MediaPlayerPrivateAVFoundationObjC.h"
31
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "ExceptionCodePlaceholder.h"
38 #import "FloatConversion.h"
39 #import "FloatConversion.h"
40 #import "FrameView.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandTextTrackPrivateAVFObjC.h"
44 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
45 #import "URL.h"
46 #import "Logging.h"
47 #import "PlatformTimeRanges.h"
48 #import "SecurityOrigin.h"
49 #import "SoftLinking.h"
50 #import "UUID.h"
51 #import "VideoTrackPrivateAVFObjC.h"
52 #import "WebCoreAVFResourceLoader.h"
53 #import "WebCoreSystemInterface.h"
54 #import <objc/runtime.h>
55 #import <runtime/DataView.h>
56 #import <runtime/JSCInlines.h>
57 #import <runtime/TypedArrayInlines.h>
58 #import <runtime/Uint16Array.h>
59 #import <runtime/Uint32Array.h>
60 #import <runtime/Uint8Array.h>
61 #import <wtf/CurrentTime.h>
62 #import <wtf/Functional.h>
63 #import <wtf/text/CString.h>
64
65 #if ENABLE(AVF_CAPTIONS)
66 #include "TextTrack.h"
67 #endif
68
69 #import <AVFoundation/AVFoundation.h>
70 #if PLATFORM(IOS)
71 #import <CoreImage/CoreImage.h>
72 #else
73 #import <QuartzCore/CoreImage.h>
74 #endif
75 #import <CoreMedia/CoreMedia.h>
76
77 #if USE(VIDEOTOOLBOX)
78 #import <CoreVideo/CoreVideo.h>
79 #import <VideoToolbox/VideoToolbox.h>
80 #endif
81
82 #if ENABLE(AVF_CAPTIONS)
83 // Note: This must be defined before our SOFT_LINK macros:
84 @class AVMediaSelectionOption;
85 @interface AVMediaSelectionOption (OutOfBandExtensions)
86 @property (nonatomic, readonly) NSString *outOfBandSource /*NS_AVAILABLE(TBD, TBD)*/;
87 @end
88 #endif
89
90
91 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
92 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
93 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
94 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
95
96 #if USE(VIDEOTOOLBOX)
97 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
98 #endif
99
100 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
101 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
102 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
103 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
104
105 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
106 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
107 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
108 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
109 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
110 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
111 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
112 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
113
114 #if USE(VIDEOTOOLBOX)
115 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
116 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
117 #endif
118
119 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
120 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
121 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
122 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
123 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
124 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
125 SOFT_LINK_CLASS(CoreImage, CIContext)
126 SOFT_LINK_CLASS(CoreImage, CIImage)
127
128 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
129 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
130 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
131 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
132 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
133 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
134 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
135 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
136 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
137 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
138 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
139
140 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
141
142 #define AVPlayer getAVPlayerClass()
143 #define AVPlayerItem getAVPlayerItemClass()
144 #define AVPlayerLayer getAVPlayerLayerClass()
145 #define AVURLAsset getAVURLAssetClass()
146 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
147
148 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
149 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
150 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
151 #define AVMediaTypeVideo getAVMediaTypeVideo()
152 #define AVMediaTypeAudio getAVMediaTypeAudio()
153 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
154 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
155 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
156 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
157 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
158 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
159
160 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
161 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
162 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
163
164 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
165 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
166 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
167
168 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
169 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
170 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
171 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
172
173 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
174 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
175 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
176 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
177 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
178 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
179 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
180 #endif
181
182 #if ENABLE(AVF_CAPTIONS)
183 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
184 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
185 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
186 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
187 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
188 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
189 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
191 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
192
193 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
194 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
195 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
196 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
197 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
198 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
199 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
200 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
201 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
202 #endif
203
204 #define kCMTimeZero getkCMTimeZero()
205
206 using namespace WebCore;
207
208 enum MediaPlayerAVFoundationObservationContext {
209     MediaPlayerAVFoundationObservationContextPlayerItem,
210     MediaPlayerAVFoundationObservationContextPlayer
211 };
212
213 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
214 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
215 #else
216 @interface WebCoreAVFMovieObserver : NSObject
217 #endif
218 {
219     MediaPlayerPrivateAVFoundationObjC* m_callback;
220     int m_delayCallbacks;
221 }
222 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
223 -(void)disconnect;
224 -(void)playableKnown;
225 -(void)metadataLoaded;
226 -(void)didEnd:(NSNotification *)notification;
227 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
228 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
229 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
230 - (void)outputSequenceWasFlushed:(id)output;
231 #endif
232 @end
233
234 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
235 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
236     MediaPlayerPrivateAVFoundationObjC* m_callback;
237 }
238 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
239 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
240 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
241 @end
242 #endif
243
244 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
245 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
246     MediaPlayerPrivateAVFoundationObjC *m_callback;
247     dispatch_semaphore_t m_semaphore;
248 }
249 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
250 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
251 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
252 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
253 @end
254 #endif
255
256 namespace WebCore {
257
258 static NSArray *assetMetadataKeyNames();
259 static NSArray *itemKVOProperties();
260
261 #if !LOG_DISABLED
262 static const char *boolString(bool val)
263 {
264     return val ? "true" : "false";
265 }
266 #endif
267
268 #if ENABLE(ENCRYPTED_MEDIA_V2)
269 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
270 static PlayerToPrivateMapType& playerToPrivateMap()
271 {
272     DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
273     return map;
274 };
275 #endif
276
277 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
278 static dispatch_queue_t globalLoaderDelegateQueue()
279 {
280     static dispatch_queue_t globalQueue;
281     static dispatch_once_t onceToken;
282     dispatch_once(&onceToken, ^{
283         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
284     });
285     return globalQueue;
286 }
287 #endif
288
289 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
290 static dispatch_queue_t globalPullDelegateQueue()
291 {
292     static dispatch_queue_t globalQueue;
293     static dispatch_once_t onceToken;
294     dispatch_once(&onceToken, ^{
295         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
296     });
297     return globalQueue;
298 }
299 #endif
300
301 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
302
303     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
304 }
305
306 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
307 {
308     if (isAvailable())
309         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
310 }
311
312 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
313     : MediaPlayerPrivateAVFoundation(player)
314     , m_weakPtrFactory(this)
315     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
316     , m_videoFrameHasDrawn(false)
317     , m_haveCheckedPlayability(false)
318 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
319     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
320     , m_videoOutputSemaphore(nullptr)
321 #endif
322 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
323     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
324 #endif
325     , m_currentTrack(0)
326     , m_cachedDuration(MediaPlayer::invalidTime())
327     , m_cachedRate(0)
328     , m_pendingStatusChanges(0)
329     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
330     , m_cachedLikelyToKeepUp(false)
331     , m_cachedBufferEmpty(false)
332     , m_cachedBufferFull(false)
333     , m_cachedHasEnabledAudio(false)
334 #if ENABLE(IOS_AIRPLAY)
335     , m_allowsWirelessVideoPlayback(true)
336 #endif
337 {
338 #if ENABLE(ENCRYPTED_MEDIA_V2)
339     playerToPrivateMap().set(player, this);
340 #endif
341 }
342
343 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
344 {
345 #if ENABLE(ENCRYPTED_MEDIA_V2)
346     playerToPrivateMap().remove(player());
347 #endif
348 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
349     [m_loaderDelegate.get() setCallback:0];
350     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
351 #endif
352 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
353     [m_videoOutputDelegate setCallback:0];
354     [m_videoOutput setDelegate:nil queue:0];
355 #endif
356     cancelLoad();
357 }
358
359 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
360 {
361     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
362     tearDownVideoRendering();
363
364     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
365     [m_objcObserver.get() disconnect];
366
367     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
368     setIgnoreLoadStateChanges(true);
369     if (m_avAsset) {
370         [m_avAsset.get() cancelLoading];
371         m_avAsset = nil;
372     }
373
374     clearTextTracks();
375
376 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
377     if (m_legibleOutput) {
378         if (m_avPlayerItem)
379             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
380         m_legibleOutput = nil;
381     }
382 #endif
383
384     if (m_avPlayerItem) {
385         for (NSString *keyName in itemKVOProperties())
386             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
387         
388         m_avPlayerItem = nil;
389     }
390     if (m_avPlayer) {
391         if (m_timeObserver)
392             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
393         m_timeObserver = nil;
394         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
395 #if ENABLE(IOS_AIRPLAY)
396         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
397 #endif
398         m_avPlayer = nil;
399     }
400
401     // Reset cached properties
402     m_pendingStatusChanges = 0;
403     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
404     m_cachedSeekableRanges = nullptr;
405     m_cachedLoadedRanges = nullptr;
406     m_cachedTracks = nullptr;
407     m_cachedHasEnabledAudio = false;
408     m_cachedPresentationSize = FloatSize();
409     m_cachedDuration = 0;
410
411     setIgnoreLoadStateChanges(false);
412 }
413
414 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
415 {
416     return m_videoLayer;
417 }
418
419 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
420 {
421 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
422     if (m_videoOutput)
423         return true;
424 #endif
425     return m_imageGenerator;
426 }
427
428 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
429 {
430 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
431     createVideoOutput();
432 #else
433     createImageGenerator();
434 #endif
435 }
436
437 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
438 {
439     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
440
441     if (!m_avAsset || m_imageGenerator)
442         return;
443
444     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
445
446     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
447     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
448     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
449     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
450
451     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
452 }
453
454 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
455 {
456 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
457     destroyVideoOutput();
458 #endif
459     destroyImageGenerator();
460 }
461
462 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
463 {
464     if (!m_imageGenerator)
465         return;
466
467     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
468
469     m_imageGenerator = 0;
470 }
471
472 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
473 {
474     if (!m_avPlayer || m_videoLayer)
475         return;
476
477     auto weakThis = createWeakPtr();
478     callOnMainThread([this, weakThis] {
479         if (!weakThis)
480             return;
481
482         if (!m_avPlayer || m_videoLayer)
483             return;
484
485         m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
486         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
487         [m_videoLayer.get() setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
488 #ifndef NDEBUG
489         [m_videoLayer.get() setName:@"Video layer"];
490 #endif
491         updateVideoLayerGravity();
492         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
493
494         player()->mediaPlayerClient()->mediaPlayerRenderingModeChanged(player());
495     });
496 }
497
498 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
499 {
500     if (!m_videoLayer)
501         return;
502
503     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
504
505     [m_videoLayer.get() setPlayer:nil];
506
507     m_videoLayer = 0;
508 }
509
510 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
511 {
512     if (currentRenderingMode() == MediaRenderingToLayer)
513         return m_videoLayer && [m_videoLayer.get() isReadyForDisplay];
514
515     return m_videoFrameHasDrawn;
516 }
517
518 #if ENABLE(AVF_CAPTIONS)
519 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
520 {
521     // FIXME: Match these to correct types:
522     if (kind == PlatformTextTrack::Caption)
523         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
524
525     if (kind == PlatformTextTrack::Subtitle)
526         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
527
528     if (kind == PlatformTextTrack::Description)
529         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
530
531     if (kind == PlatformTextTrack::Forced)
532         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
533
534     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
535 }
536 #endif
537
538 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
539 {
540     if (m_avAsset)
541         return;
542
543     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p)", this);
544
545     setDelayCallbacks(true);
546
547     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
548
549     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
550
551     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
552
553     String referrer = player()->referrer();
554     if (!referrer.isEmpty())
555         [headerFields.get() setObject:referrer forKey:@"Referer"];
556
557     String userAgent = player()->userAgent();
558     if (!userAgent.isEmpty())
559         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
560
561     if ([headerFields.get() count])
562         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
563
564 #if ENABLE(AVF_CAPTIONS)
565     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
566     if (!outOfBandTrackSources.isEmpty()) {
567         NSMutableArray* outOfBandTracks = [[NSMutableArray alloc] init];
568         for (auto& trackSource : outOfBandTrackSources) {
569             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
570             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
571             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
572             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
573             [outOfBandTracks addObject:
574                 [NSDictionary dictionaryWithObjectsAndKeys:
575                     reinterpret_cast<const NSString*>(label.get()), AVOutOfBandAlternateTrackDisplayNameKey,
576                     reinterpret_cast<const NSString*>(language.get()), AVOutOfBandAlternateTrackExtendedLanguageTagKey,
577                     [NSNumber numberWithBool: (trackSource->isDefault() ? YES : NO)], AVOutOfBandAlternateTrackIsDefaultKey,
578                     reinterpret_cast<const NSString*>(uniqueID.get()), AVOutOfBandAlternateTrackIdentifierKey,
579                     reinterpret_cast<const NSString*>(url.get()), AVOutOfBandAlternateTrackSourceKey,
580                     mediaDescriptionForKind(trackSource->kind()), AVOutOfBandAlternateTrackMediaCharactersticsKey,
581                     nil]];
582         }
583
584         [options.get() setObject: outOfBandTracks forKey: AVURLAssetOutOfBandAlternateTracksKey];
585     }
586 #endif
587     
588     NSURL *cocoaURL = URL(ParsedURLString, url);
589     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
590
591 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
592     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
593 #endif
594
595     m_haveCheckedPlayability = false;
596
597     setDelayCallbacks(false);
598 }
599
600 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
601 {
602     if (m_avPlayer)
603         return;
604
605     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
606
607     setDelayCallbacks(true);
608
609     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
610     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
611 #if ENABLE(IOS_AIRPLAY)
612     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
613 #endif
614
615 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
616     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
617 #endif
618
619 #if ENABLE(IOS_AIRPLAY)
620     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
621 #endif
622
623     if (m_avPlayerItem)
624         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
625
626     setDelayCallbacks(false);
627 }
628
629 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
630 {
631     if (m_avPlayerItem)
632         return;
633
634     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
635
636     setDelayCallbacks(true);
637
638     // Create the player item so we can load media data. 
639     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
640
641     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
642
643     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
644     for (NSString *keyName in itemKVOProperties())
645         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
646
647     if (m_avPlayer)
648         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
649
650 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
651     const NSTimeInterval legibleOutputAdvanceInterval = 2;
652
653     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:[NSArray array]]);
654     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
655
656     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
657     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
658     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
659     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
660 #endif
661
662     setDelayCallbacks(false);
663 }
664
665 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
666 {
667     if (m_haveCheckedPlayability)
668         return;
669     m_haveCheckedPlayability = true;
670
671     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
672
673     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
674         [m_objcObserver.get() playableKnown];
675     }];
676 }
677
678 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
679 {
680     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
681     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[assetMetadataKeyNames() retain] completionHandler:^{
682         [m_objcObserver.get() metadataLoaded];
683     }];
684 }
685
686 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
687 {
688     if (!m_avPlayerItem)
689         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
690
691     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
692         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
693     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
694         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
695     if (m_cachedLikelyToKeepUp)
696         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
697     if (m_cachedBufferFull)
698         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
699     if (m_cachedBufferEmpty)
700         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
701
702     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
703 }
704
705 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
706 {
707     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
708     PlatformMedia pm;
709     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
710     pm.media.avfMediaPlayer = m_avPlayer.get();
711     return pm;
712 }
713
714 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
715 {
716     return m_videoLayer.get();
717 }
718
719 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
720 {
721     [CATransaction begin];
722     [CATransaction setDisableActions:YES];    
723     if (m_videoLayer)
724         [m_videoLayer.get() setHidden:!isVisible];
725     [CATransaction commit];
726 }
727     
728 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
729 {
730     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
731     if (!metaDataAvailable())
732         return;
733
734     setDelayCallbacks(true);
735     m_cachedRate = requestedRate();
736     [m_avPlayer.get() setRate:requestedRate()];
737     setDelayCallbacks(false);
738 }
739
740 void MediaPlayerPrivateAVFoundationObjC::platformPause()
741 {
742     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
743     if (!metaDataAvailable())
744         return;
745
746     setDelayCallbacks(true);
747     m_cachedRate = 0;
748     [m_avPlayer.get() setRate:nil];
749     setDelayCallbacks(false);
750 }
751
752 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
753 {
754     // Do not ask the asset for duration before it has been loaded or it will fetch the
755     // answer synchronously.
756     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
757          return MediaPlayer::invalidTime();
758     
759     CMTime cmDuration;
760     
761     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
762     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
763         cmDuration = [m_avPlayerItem.get() duration];
764     else
765         cmDuration= [m_avAsset.get() duration];
766
767     if (CMTIME_IS_NUMERIC(cmDuration))
768         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
769
770     if (CMTIME_IS_INDEFINITE(cmDuration)) {
771         return std::numeric_limits<float>::infinity();
772     }
773
774     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
775     return MediaPlayer::invalidTime();
776 }
777
778 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
779 {
780     if (!metaDataAvailable() || !m_avPlayerItem)
781         return 0;
782
783     CMTime itemTime = [m_avPlayerItem.get() currentTime];
784     if (CMTIME_IS_NUMERIC(itemTime))
785         return std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
786
787     return 0;
788 }
789
790 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negativeTolerance, double positiveTolerance)
791 {
792     // setCurrentTime generates several event callbacks, update afterwards.
793     setDelayCallbacks(true);
794
795     CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
796     CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
797     CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
798
799     auto weakThis = createWeakPtr();
800
801     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
802         callOnMainThread([weakThis, finished] {
803             auto _this = weakThis.get();
804             if (!_this)
805                 return;
806
807             _this->seekCompleted(finished);
808         });
809     }];
810
811     setDelayCallbacks(false);
812 }
813
814 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
815 {
816     if (!metaDataAvailable())
817         return;
818
819     [m_avPlayer.get() setVolume:volume];
820 }
821
822 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
823 {
824     UNUSED_PARAM(closedCaptionsVisible);
825
826     if (!metaDataAvailable())
827         return;
828
829     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
830 }
831
832 void MediaPlayerPrivateAVFoundationObjC::updateRate()
833 {
834     setDelayCallbacks(true);
835     m_cachedRate = requestedRate();
836     [m_avPlayer.get() setRate:requestedRate()];
837     setDelayCallbacks(false);
838 }
839
840 float MediaPlayerPrivateAVFoundationObjC::rate() const
841 {
842     if (!metaDataAvailable())
843         return 0;
844
845     return m_cachedRate;
846 }
847
848 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
849 {
850     auto timeRanges = PlatformTimeRanges::create();
851
852     if (!m_avPlayerItem)
853         return timeRanges;
854
855     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
856         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
857         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange)) {
858             float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(timeRange.start));
859             float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
860             timeRanges->add(rangeStart, rangeEnd);
861         }
862     }
863     return timeRanges;
864 }
865
866 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
867 {
868     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
869         return 0;
870
871     double minTimeSeekable = std::numeric_limits<double>::infinity();
872     bool hasValidRange = false;
873     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
874         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
875         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
876             continue;
877
878         hasValidRange = true;
879         double startOfRange = CMTimeGetSeconds(timeRange.start);
880         if (minTimeSeekable > startOfRange)
881             minTimeSeekable = startOfRange;
882     }
883     return hasValidRange ? minTimeSeekable : 0;
884 }
885
886 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
887 {
888     if (!m_cachedSeekableRanges)
889         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
890
891     double maxTimeSeekable = 0;
892     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
893         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
894         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
895             continue;
896         
897         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
898         if (maxTimeSeekable < endOfRange)
899             maxTimeSeekable = endOfRange;
900     }
901     return maxTimeSeekable;
902 }
903
904 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
905 {
906 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
907     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
908     // when loadedTimeRanges changes when there is no video output. In that case
909     // update the cached value explicitly.
910     if (!hasLayerRenderer() && !hasContextRenderer())
911         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
912 #endif
913
914     if (!m_cachedLoadedRanges)
915         return 0;
916
917     float maxTimeLoaded = 0;
918     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
919         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
920         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
921             continue;
922         
923         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
924         if (maxTimeLoaded < endOfRange)
925             maxTimeLoaded = endOfRange;
926     }
927
928     return maxTimeLoaded;   
929 }
930
931 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
932 {
933     if (!metaDataAvailable())
934         return 0;
935
936     long long totalMediaSize = 0;
937     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
938         totalMediaSize += [[thisTrack assetTrack] totalSampleDataLength];
939
940     return totalMediaSize;
941 }
942
943 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
944 {
945     m_avAsset = asset;
946 }
947
948 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
949 {
950     if (!m_avAsset)
951         return MediaPlayerAVAssetStatusDoesNotExist;
952
953     for (NSString *keyName in assetMetadataKeyNames()) {
954         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:nil];
955
956         if (keyStatus < AVKeyValueStatusLoaded)
957             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
958         
959         if (keyStatus == AVKeyValueStatusFailed)
960             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
961
962         if (keyStatus == AVKeyValueStatusCancelled)
963             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
964     }
965
966     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
967         return MediaPlayerAVAssetStatusPlayable;
968
969     return MediaPlayerAVAssetStatusLoaded;
970 }
971
972 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
973 {
974     if (!metaDataAvailable() || context->paintingDisabled())
975         return;
976
977     setDelayCallbacks(true);
978     BEGIN_BLOCK_OBJC_EXCEPTIONS;
979
980 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
981     if (videoOutputHasAvailableFrame())
982         paintWithVideoOutput(context, rect);
983     else
984 #endif
985         paintWithImageGenerator(context, rect);
986
987     END_BLOCK_OBJC_EXCEPTIONS;
988     setDelayCallbacks(false);
989
990     m_videoFrameHasDrawn = true;
991 }
992
993 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
994 {
995     if (!metaDataAvailable() || context->paintingDisabled())
996         return;
997
998     // We can ignore the request if we are already rendering to a layer.
999     if (currentRenderingMode() == MediaRenderingToLayer)
1000         return;
1001
1002     paintCurrentFrameInContext(context, rect);
1003 }
1004
1005 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1006 {
1007     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1008     if (image) {
1009         GraphicsContextStateSaver stateSaver(*context);
1010         context->translate(rect.x(), rect.y() + rect.height());
1011         context->scale(FloatSize(1.0f, -1.0f));
1012         context->setImageInterpolationQuality(InterpolationLow);
1013         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1014         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1015         image = 0;
1016     }
1017 }
1018
1019 static HashSet<String> mimeTypeCache()
1020 {
1021     DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1022     static bool typeListInitialized = false;
1023
1024     if (typeListInitialized)
1025         return cache;
1026     typeListInitialized = true;
1027
1028     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1029     for (NSString *mimeType in types)
1030         cache.add(mimeType);
1031
1032     return cache;
1033
1034
1035 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1036 {
1037     if (!m_imageGenerator)
1038         createImageGenerator();
1039     ASSERT(m_imageGenerator);
1040
1041 #if !LOG_DISABLED
1042     double start = monotonicallyIncreasingTime();
1043 #endif
1044
1045     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1046     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1047     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1048
1049 #if !LOG_DISABLED
1050     double duration = monotonicallyIncreasingTime() - start;
1051     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1052 #endif
1053
1054     return image;
1055 }
1056
1057 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1058 {
1059     supportedTypes = mimeTypeCache();
1060
1061
1062 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1063 static bool keySystemIsSupported(const String& keySystem)
1064 {
1065     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1066         return true;
1067     return false;
1068 }
1069 #endif
1070
1071 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1072 {
1073 #if ENABLE(ENCRYPTED_MEDIA)
1074     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1075     // In addition to the steps in the current specification, this method must run the following steps:
1076
1077     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1078     //    If keySystem is null, continue to the next step.
1079     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1080         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1081         if (!keySystemIsSupported(parameters.keySystem))
1082             return MediaPlayer::IsNotSupported;
1083
1084         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1085         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1086     }
1087
1088     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1089 #endif
1090
1091 #if ENABLE(MEDIA_SOURCE)
1092     if (parameters.isMediaSource)
1093         return MediaPlayer::IsNotSupported;
1094 #endif
1095
1096     if (!mimeTypeCache().contains(parameters.type))
1097         return MediaPlayer::IsNotSupported;
1098
1099     // The spec says:
1100     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1101     if (parameters.codecs.isEmpty())
1102         return MediaPlayer::MayBeSupported;
1103
1104     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1105     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1106 }
1107
1108 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1109 {
1110 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1111     if (!keySystem.isEmpty()) {
1112         if (!keySystemIsSupported(keySystem))
1113             return false;
1114
1115         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1116             return false;
1117
1118         return true;
1119     }
1120 #else
1121     UNUSED_PARAM(keySystem);
1122     UNUSED_PARAM(mimeType);
1123 #endif
1124     return false;
1125 }
1126
1127 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1128 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1129 {
1130     String scheme = [[[avRequest request] URL] scheme];
1131     String keyURI = [[[avRequest request] URL] absoluteString];
1132
1133 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1134     if (scheme == "skd") {
1135         // Create an initData with the following layout:
1136         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1137         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1138         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1139         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1140         initDataView->set<uint32_t>(0, keyURISize, true);
1141
1142         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1143         keyURIArray->setRange(keyURI.deprecatedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1144
1145 #if ENABLE(ENCRYPTED_MEDIA)
1146         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1147 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1148         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1149         if (!player()->keyNeeded(initData.get()))
1150 #endif
1151             return false;
1152
1153         m_keyURIToRequestMap.set(keyURI, avRequest);
1154         return true;
1155     }
1156 #endif
1157
1158     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1159     m_resourceLoaderMap.add(avRequest, resourceLoader);
1160     resourceLoader->startLoading();
1161     return true;
1162 }
1163
1164 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1165 {
1166 #if USE(CFNETWORK)
1167     UNUSED_PARAM(nsChallenge);
1168     // FIXME: <rdar://problem/15799844>
1169     return false;
1170 #else
1171     AuthenticationChallenge challenge(nsChallenge);
1172
1173     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1174 #endif
1175 }
1176
1177 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1178 {
1179     String scheme = [[[avRequest request] URL] scheme];
1180
1181     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1182
1183     if (resourceLoader)
1184         resourceLoader->stopLoading();
1185 }
1186
1187 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1188 {
1189     m_resourceLoaderMap.remove(avRequest);
1190 }
1191 #endif
1192
1193 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1194 {
1195     return AVFoundationLibrary() && CoreMediaLibrary();
1196 }
1197
1198 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
1199 {
1200     if (!metaDataAvailable())
1201         return timeValue;
1202
1203     // FIXME - impossible to implement until rdar://8721510 is fixed.
1204     return timeValue;
1205 }
1206
1207 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1208 {
1209     if (!m_videoLayer)
1210         return;
1211
1212     [CATransaction begin];
1213     [CATransaction setDisableActions:YES];    
1214     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1215     [m_videoLayer.get() setVideoGravity:gravity];
1216     [CATransaction commit];
1217 }
1218
1219 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1220 {
1221     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1222     m_languageOfPrimaryAudioTrack = String();
1223
1224     if (!m_avAsset)
1225         return;
1226
1227     setDelayCharacteristicsChangedNotification(true);
1228
1229     bool haveCCTrack = false;
1230     bool hasCaptions = false;
1231
1232     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1233     // asked about those fairly fequently.
1234     if (!m_avPlayerItem) {
1235         // We don't have a player item yet, so check with the asset because some assets support inspection
1236         // prior to becoming ready to play.
1237         setHasVideo([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual] count]);
1238         setHasAudio([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible] count]);
1239 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1240         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1241 #endif
1242     } else {
1243         bool hasVideo = false;
1244         bool hasAudio = false;
1245         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1246             if ([track isEnabled]) {
1247                 AVAssetTrack *assetTrack = [track assetTrack];
1248                 if ([[assetTrack mediaType] isEqualToString:AVMediaTypeVideo])
1249                     hasVideo = true;
1250                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeAudio])
1251                     hasAudio = true;
1252                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption]) {
1253 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1254                     hasCaptions = true;
1255 #endif
1256                     haveCCTrack = true;
1257                 }
1258             }
1259         }
1260         setHasVideo(hasVideo);
1261         setHasAudio(hasAudio);
1262
1263
1264 #if ENABLE(VIDEO_TRACK)
1265         updateAudioTracks();
1266         updateVideoTracks();
1267 #endif
1268     }
1269
1270 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1271     if (AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia()) {
1272         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1273         if (hasCaptions)
1274             processMediaSelectionOptions();
1275     }
1276 #endif
1277
1278 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1279     if (!hasCaptions && haveCCTrack)
1280         processLegacyClosedCaptionsTracks();
1281 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1282     if (haveCCTrack)
1283         processLegacyClosedCaptionsTracks();
1284 #endif
1285
1286     setHasClosedCaptions(hasCaptions);
1287
1288     LOG(Media, "WebCoreAVFMovieObserver:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1289         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1290
1291     sizeChanged();
1292
1293     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1294         characteristicsChanged();
1295
1296     setDelayCharacteristicsChangedNotification(false);
1297 }
1298
1299 #if ENABLE(VIDEO_TRACK)
1300 template <typename RefT, typename PassRefT>
1301 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1302 {
1303     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1304         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1305     }]]]);
1306     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1307
1308     typedef Vector<RefT> ItemVector;
1309     for (auto i = oldItems.begin(); i != oldItems.end(); ++i)
1310         [oldTracks addObject:(*i)->playerItemTrack()];
1311
1312     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1313     [removedTracks minusSet:newTracks.get()];
1314
1315     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1316     [addedTracks minusSet:oldTracks.get()];
1317
1318     ItemVector replacementItems;
1319     ItemVector addedItems;
1320     ItemVector removedItems;
1321     for (auto i = oldItems.begin(); i != oldItems.end(); ++i) {
1322         if ([removedTracks containsObject:(*i)->playerItemTrack()])
1323             removedItems.append(*i);
1324         else
1325             replacementItems.append(*i);
1326     }
1327
1328     for (AVPlayerItemTrack* track in addedTracks.get())
1329         addedItems.append(itemFactory(track));
1330
1331     replacementItems.appendVector(addedItems);
1332     oldItems.swap(replacementItems);
1333
1334     for (auto i = removedItems.begin(); i != removedItems.end(); ++i)
1335         (player->*removedFunction)(*i);
1336
1337     for (auto i = addedItems.begin(); i != addedItems.end(); ++i)
1338         (player->*addedFunction)(*i);
1339 }
1340
1341 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1342 {
1343     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1344 }
1345
1346 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1347 {
1348     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1349 }
1350 #endif // ENABLE(VIDEO_TRACK)
1351
1352 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1353 {
1354     if (!m_avAsset)
1355         return;
1356
1357     // Some assets don't report track properties until they are completely ready to play, but we
1358     // want to report a size as early as possible so use presentationSize when an asset has no tracks.
1359     if (m_avPlayerItem && ![m_cachedTracks count]) {
1360         setNaturalSize(roundedIntSize(m_cachedPresentationSize));
1361         return;
1362     }
1363
1364     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1365     // the union of all visual track rects.
1366     CGRect trackUnionRect = CGRectZero;
1367     for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1368         AVAssetTrack* assetTrack = [track assetTrack];
1369         CGSize trackSize = [assetTrack naturalSize];
1370         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1371         trackUnionRect = CGRectUnion(trackUnionRect, CGRectApplyAffineTransform(trackRect, [assetTrack preferredTransform]));
1372     }
1373
1374     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1375     trackUnionRect = CGRectOffset(trackUnionRect, trackUnionRect.origin.x, trackUnionRect.origin.y);
1376     
1377     // Also look at the asset's preferred transform so we account for a movie matrix.
1378     CGSize naturalSize = CGSizeApplyAffineTransform(trackUnionRect.size, [m_avAsset.get() preferredTransform]);
1379
1380     // Cache the natural size (setNaturalSize will notify the player if it has changed).
1381     setNaturalSize(IntSize(naturalSize));
1382 }
1383
1384 #if PLATFORM(IOS)
1385 // FIXME: Implement for iOS in WebKit System Interface.
1386 static inline NSURL *wkAVAssetResolvedURL(AVAsset*)
1387 {
1388     return nil;
1389 }
1390 #endif
1391
1392 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1393 {
1394     if (!m_avAsset)
1395         return false;
1396     
1397     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL(wkAVAssetResolvedURL(m_avAsset.get())));
1398     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1399     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1400 }
1401
1402 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1403 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1404 {
1405     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1406
1407     if (!m_avPlayerItem || m_videoOutput)
1408         return;
1409
1410 #if USE(VIDEOTOOLBOX)
1411     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1412 #else
1413     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
1414                                 nil];
1415 #endif
1416     m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
1417     ASSERT(m_videoOutput);
1418
1419     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
1420
1421     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1422
1423     waitForVideoOutputMediaDataWillChange();
1424
1425     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1426 }
1427
1428 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1429 {
1430     if (!m_videoOutput)
1431         return;
1432
1433     if (m_avPlayerItem)
1434         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1435     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1436
1437     m_videoOutput = 0;
1438 }
1439
1440 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1441 {
1442     if (!m_videoOutput)
1443         createVideoOutput();
1444     ASSERT(m_videoOutput);
1445
1446 #if !LOG_DISABLED
1447     double start = monotonicallyIncreasingTime();
1448 #endif
1449
1450     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1451
1452     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1453         return 0;
1454
1455     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1456     if (!buffer)
1457         return 0;
1458
1459 #if USE(VIDEOTOOLBOX)
1460     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1461     if (!m_pixelTransferSession) {
1462         VTPixelTransferSessionRef session = 0;
1463         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1464         m_pixelTransferSession = adoptCF(session);
1465     }
1466
1467     CVPixelBufferRef outputBuffer;
1468     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
1469     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1470     buffer = adoptCF(outputBuffer);
1471 #endif
1472
1473 #if !LOG_DISABLED
1474     double duration = monotonicallyIncreasingTime() - start;
1475     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1476 #endif
1477
1478     return buffer;
1479 }
1480
1481 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
1482 {
1483     if (!m_avPlayerItem)
1484         return false;
1485
1486     if (m_lastImage)
1487         return true;
1488
1489     if (!m_videoOutput)
1490         createVideoOutput();
1491
1492     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
1493 }
1494
1495 static const void* CVPixelBufferGetBytePointerCallback(void* info)
1496 {
1497     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1498     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1499     return CVPixelBufferGetBaseAddress(pixelBuffer);
1500 }
1501
1502 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
1503 {
1504     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1505     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1506 }
1507
1508 static void CVPixelBufferReleaseInfoCallback(void* info)
1509 {
1510     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1511     CFRelease(pixelBuffer);
1512 }
1513
1514 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
1515 {
1516     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
1517     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
1518
1519     size_t width = CVPixelBufferGetWidth(pixelBuffer);
1520     size_t height = CVPixelBufferGetHeight(pixelBuffer);
1521     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
1522     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
1523     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
1524
1525     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
1526     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
1527     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
1528
1529     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
1530 }
1531
1532 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
1533 {
1534     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1535
1536     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1537     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1538     // should be displayed.
1539     if (pixelBuffer)
1540         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
1541 }
1542
1543 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
1544 {
1545     updateLastImage();
1546
1547     if (m_lastImage) {
1548         GraphicsContextStateSaver stateSaver(*context);
1549
1550         IntRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
1551
1552         context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, outputRect, imageRect);
1553
1554         // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
1555         // video frame, destroy it now that it is no longer needed.
1556         if (m_imageGenerator)
1557             destroyImageGenerator();
1558     }
1559 }
1560
1561 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
1562 {
1563     updateLastImage();
1564     return m_lastImage.get();
1565 }
1566
1567 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
1568 {
1569     if (!m_videoOutputSemaphore)
1570         m_videoOutputSemaphore = dispatch_semaphore_create(0);
1571
1572     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
1573
1574     // Wait for 1 second.
1575     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
1576
1577     if (result)
1578         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
1579 }
1580
1581 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
1582 {
1583     dispatch_semaphore_signal(m_videoOutputSemaphore);
1584 }
1585 #endif
1586
1587 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1588 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1589 {
1590     // initData should have the following layout:
1591     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1592     if (initData->byteLength() < 4)
1593         return false;
1594
1595     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1596
1597     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1598     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1599     uint32_t offset = 0;
1600     bool status = true;
1601
1602     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1603     offset += 4;
1604     if (!status || offset + keyURILength > initData->length())
1605         return false;
1606
1607     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1608     if (!keyURIArray)
1609         return false;
1610
1611     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1612     offset += keyURILength;
1613
1614     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1615     offset += 4;
1616     if (!status || offset + keyIDLength > initData->length())
1617         return false;
1618
1619     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1620     if (!keyIDArray)
1621         return false;
1622
1623     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1624     offset += keyIDLength;
1625
1626     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1627     offset += 4;
1628     if (!status || offset + certificateLength > initData->length())
1629         return false;
1630
1631     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1632     if (!certificate)
1633         return false;
1634
1635     return true;
1636 }
1637 #endif
1638
1639 #if ENABLE(ENCRYPTED_MEDIA)
1640 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1641 {
1642     if (!keySystemIsSupported(keySystem))
1643         return MediaPlayer::KeySystemNotSupported;
1644
1645     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1646     String keyURI;
1647     String keyID;
1648     RefPtr<Uint8Array> certificate;
1649     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1650         return MediaPlayer::InvalidPlayerState;
1651
1652     if (!m_keyURIToRequestMap.contains(keyURI))
1653         return MediaPlayer::InvalidPlayerState;
1654
1655     String sessionID = createCanonicalUUIDString();
1656
1657     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1658
1659     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1660     NSString* assetStr = keyID;
1661     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
1662     NSError* error = 0;
1663     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
1664
1665     if (!keyRequest) {
1666         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
1667         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
1668         return MediaPlayer::NoError;
1669     }
1670
1671     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
1672     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
1673     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
1674
1675     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
1676     m_sessionIDToRequestMap.set(sessionID, avRequest);
1677     m_keyURIToRequestMap.remove(keyURI);
1678
1679     return MediaPlayer::NoError;
1680 }
1681
1682 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
1683 {
1684     if (!keySystemIsSupported(keySystem))
1685         return MediaPlayer::KeySystemNotSupported;
1686
1687     if (!m_sessionIDToRequestMap.contains(sessionID))
1688         return MediaPlayer::InvalidPlayerState;
1689
1690     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
1691     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
1692     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
1693     [avRequest.get() finishLoading];
1694     m_sessionIDToRequestMap.remove(sessionID);
1695
1696     player()->keyAdded(keySystem, sessionID);
1697
1698     UNUSED_PARAM(initDataPtr);
1699     UNUSED_PARAM(initDataLength);
1700     return MediaPlayer::NoError;
1701 }
1702
1703 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
1704 {
1705     if (!keySystemIsSupported(keySystem))
1706         return MediaPlayer::KeySystemNotSupported;
1707
1708     if (!m_sessionIDToRequestMap.contains(sessionID))
1709         return MediaPlayer::InvalidPlayerState;
1710
1711     m_sessionIDToRequestMap.remove(sessionID);
1712     return MediaPlayer::NoError;
1713 }
1714 #endif
1715
1716 #if ENABLE(ENCRYPTED_MEDIA_V2)
1717 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
1718 {
1719     return m_keyURIToRequestMap.take(keyURI);
1720 }
1721
1722 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
1723 {
1724     if (!keySystemIsSupported(keySystem))
1725         return nullptr;
1726
1727     return std::make_unique<CDMSessionAVFoundationObjC>(this);
1728 }
1729 #endif
1730
1731 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1732 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
1733 {
1734 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1735     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1736 #endif
1737
1738     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1739     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
1740
1741         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
1742         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
1743             continue;
1744
1745         bool newCCTrack = true;
1746         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1747             if (!removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack())
1748                 continue;
1749
1750             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
1751             if (track->avPlayerItemTrack() == playerItemTrack) {
1752                 removedTextTracks.remove(i - 1);
1753                 newCCTrack = false;
1754                 break;
1755             }
1756         }
1757
1758         if (!newCCTrack)
1759             continue;
1760         
1761         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
1762     }
1763
1764     processNewAndRemovedTextTracks(removedTextTracks);
1765 }
1766 #endif
1767
1768 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1769 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
1770 {
1771     if (!m_avAsset)
1772         return nil;
1773     
1774     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
1775         return nil;
1776     
1777     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1778 }
1779
1780 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
1781 {
1782     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1783     if (!legibleGroup) {
1784         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1785         return;
1786     }
1787
1788     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1789     // but set the selected legible track to nil so text tracks will not be automatically configured.
1790     if (!m_textTracks.size())
1791         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1792
1793     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1794     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
1795     for (AVMediaSelectionOptionType *option in legibleOptions) {
1796         bool newTrack = true;
1797         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1798              if (removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack())
1799                  continue;
1800
1801             RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
1802             if ([track->mediaSelectionOption() isEqual:option]) {
1803                 removedTextTracks.remove(i - 1);
1804                 newTrack = false;
1805                 break;
1806             }
1807         }
1808         if (!newTrack)
1809             continue;
1810
1811 #if ENABLE(AVF_CAPTIONS)
1812         // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
1813         if ([option outOfBandSource])
1814             continue;
1815 #endif
1816
1817         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option));
1818     }
1819
1820     processNewAndRemovedTextTracks(removedTextTracks);
1821 }
1822
1823 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, double time)
1824 {
1825     if (!m_currentTrack)
1826         return;
1827
1828     m_currentTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), time);
1829 }
1830
1831 void MediaPlayerPrivateAVFoundationObjC::flushCues()
1832 {
1833     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
1834
1835     if (!m_currentTrack)
1836         return;
1837     
1838     m_currentTrack->resetCueValues();
1839 }
1840 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1841
1842 void MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(InbandTextTrackPrivateAVF *track)
1843 {
1844     if (m_currentTrack == track)
1845         return;
1846
1847     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1848         
1849     m_currentTrack = track;
1850
1851     if (track) {
1852         if (track->isLegacyClosedCaptionsTrack())
1853             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
1854 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1855         else
1856             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1857 #endif
1858     } else {
1859 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1860         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1861 #endif
1862         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
1863     }
1864
1865 }
1866
1867 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
1868 {
1869     if (!m_languageOfPrimaryAudioTrack.isNull())
1870         return m_languageOfPrimaryAudioTrack;
1871
1872     if (!m_avPlayerItem.get())
1873         return emptyString();
1874
1875 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1876     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1877     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
1878     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
1879     if (currentlySelectedAudibleOption) {
1880         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
1881         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1882
1883         return m_languageOfPrimaryAudioTrack;
1884     }
1885 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1886
1887     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1888     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1889     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
1890     if (!tracks || [tracks count] != 1) {
1891         m_languageOfPrimaryAudioTrack = emptyString();
1892         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
1893         return m_languageOfPrimaryAudioTrack;
1894     }
1895
1896     AVAssetTrack *track = [tracks objectAtIndex:0];
1897     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
1898
1899 #if !LOG_DISABLED
1900     if (m_languageOfPrimaryAudioTrack == emptyString())
1901         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1902     else
1903         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1904 #endif
1905
1906     return m_languageOfPrimaryAudioTrack;
1907 }
1908
1909 #if ENABLE(IOS_AIRPLAY)
1910 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
1911 {
1912     if (!m_avPlayer)
1913         return false;
1914
1915     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
1916     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
1917     return wirelessTarget;
1918 }
1919
1920 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
1921 {
1922     if (!m_avPlayer)
1923         return !m_allowsWirelessVideoPlayback;
1924     
1925     m_allowsWirelessVideoPlayback = ![m_avPlayer.get() allowsExternalPlayback];
1926     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
1927
1928     return !m_allowsWirelessVideoPlayback;
1929 }
1930
1931 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
1932 {
1933     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
1934     m_allowsWirelessVideoPlayback = !disabled;
1935     if (!m_avPlayer)
1936         return;
1937     
1938     [m_avPlayer.get() setAllowsExternalPlayback:disabled];
1939 }
1940 #endif
1941
1942 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
1943 {
1944     m_cachedItemStatus = status;
1945
1946     updateStates();
1947 }
1948
1949 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
1950 {
1951     m_pendingStatusChanges++;
1952 }
1953
1954 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
1955 {
1956     m_cachedLikelyToKeepUp = likelyToKeepUp;
1957
1958     ASSERT(m_pendingStatusChanges);
1959     if (!--m_pendingStatusChanges)
1960         updateStates();
1961 }
1962
1963 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
1964 {
1965     m_pendingStatusChanges++;
1966 }
1967
1968 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
1969 {
1970     m_cachedBufferEmpty = bufferEmpty;
1971
1972     ASSERT(m_pendingStatusChanges);
1973     if (!--m_pendingStatusChanges)
1974         updateStates();
1975 }
1976
1977 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
1978 {
1979     m_pendingStatusChanges++;
1980 }
1981
1982 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
1983 {
1984     m_cachedBufferFull = bufferFull;
1985
1986     ASSERT(m_pendingStatusChanges);
1987     if (!--m_pendingStatusChanges)
1988         updateStates();
1989 }
1990
1991 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
1992 {
1993     m_cachedSeekableRanges = seekableRanges;
1994
1995     seekableTimeRangesChanged();
1996     updateStates();
1997 }
1998
1999 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2000 {
2001     m_cachedLoadedRanges = loadedRanges;
2002
2003     loadedTimeRangesChanged();
2004     updateStates();
2005 }
2006
2007 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2008 {
2009     m_cachedTracks = tracks;
2010
2011     tracksChanged();
2012     updateStates();
2013 }
2014
2015 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2016 {
2017     m_cachedHasEnabledAudio = hasEnabledAudio;
2018
2019     tracksChanged();
2020     updateStates();
2021 }
2022
2023 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2024 {
2025     m_cachedPresentationSize = size;
2026
2027     sizeChanged();
2028     updateStates();
2029 }
2030
2031 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(double duration)
2032 {
2033     m_cachedDuration = duration;
2034
2035     invalidateCachedDuration();
2036 }
2037
2038 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2039 {
2040     m_cachedRate = rate;
2041
2042     updateStates();
2043     rateChanged();
2044 }
2045     
2046 #if ENABLE(IOS_AIRPLAY)
2047 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2048 {
2049     playbackTargetIsWirelessChanged();
2050 }
2051 #endif
2052
2053 NSArray* assetMetadataKeyNames()
2054 {
2055     static NSArray* keys;
2056     if (!keys) {
2057         keys = [[NSArray alloc] initWithObjects:@"duration",
2058                     @"naturalSize",
2059                     @"preferredTransform",
2060                     @"preferredVolume",
2061                     @"preferredRate",
2062                     @"playable",
2063                     @"tracks",
2064                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2065                    nil];
2066     }
2067     return keys;
2068 }
2069
2070 NSArray* itemKVOProperties()
2071 {
2072     static NSArray* keys;
2073     if (!keys) {
2074         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2075                 @"status",
2076                 @"asset",
2077                 @"tracks",
2078                 @"seekableTimeRanges",
2079                 @"loadedTimeRanges",
2080                 @"playbackLikelyToKeepUp",
2081                 @"playbackBufferFull",
2082                 @"playbackBufferEmpty",
2083                 @"duration",
2084                 @"hasEnabledAudio",
2085                 nil];
2086     }
2087     return keys;
2088 }
2089
2090 } // namespace WebCore
2091
2092 @implementation WebCoreAVFMovieObserver
2093
2094 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2095 {
2096     self = [super init];
2097     if (!self)
2098         return nil;
2099     m_callback = callback;
2100     return self;
2101 }
2102
2103 - (void)disconnect
2104 {
2105     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2106     m_callback = 0;
2107 }
2108
2109 - (void)metadataLoaded
2110 {
2111     if (!m_callback)
2112         return;
2113
2114     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2115 }
2116
2117 - (void)playableKnown
2118 {
2119     if (!m_callback)
2120         return;
2121
2122     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
2123 }
2124
2125 - (void)didEnd:(NSNotification *)unusedNotification
2126 {
2127     UNUSED_PARAM(unusedNotification);
2128     if (!m_callback)
2129         return;
2130     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2131 }
2132
2133 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2134 {
2135     UNUSED_PARAM(object);
2136     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2137
2138     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2139
2140     if (!m_callback)
2141         return;
2142
2143     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2144
2145     WTF::Function<void ()> function;
2146
2147     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2148         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2149             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2150         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2151             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2152         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2153             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2154     }
2155
2156     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2157         // A value changed for an AVPlayerItem
2158         if ([keyPath isEqualToString:@"status"])
2159             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2160         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2161             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2162         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2163             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2164         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2165             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2166         else if ([keyPath isEqualToString:@"asset"])
2167             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2168         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2169             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2170         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2171             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2172         else if ([keyPath isEqualToString:@"tracks"])
2173             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2174         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2175             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2176         else if ([keyPath isEqualToString:@"presentationSize"])
2177             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2178         else if ([keyPath isEqualToString:@"duration"])
2179             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, CMTimeGetSeconds([newValue CMTimeValue]));
2180     }
2181
2182     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
2183         // A value changed for an AVPlayer.
2184         if ([keyPath isEqualToString:@"rate"])
2185             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
2186 #if ENABLE(IOS_AIRPLAY)
2187         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
2188             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
2189 #endif
2190     }
2191     
2192     if (function.isNull())
2193         return;
2194
2195     auto weakThis = m_callback->createWeakPtr();
2196     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
2197         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
2198         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
2199         if (!weakThis)
2200             return;
2201         function();
2202     }));
2203 }
2204
2205 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2206 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
2207 {
2208     UNUSED_PARAM(output);
2209     UNUSED_PARAM(nativeSamples);
2210
2211     if (!m_callback)
2212         return;
2213
2214     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2215     RetainPtr<NSArray> strongStrings = strings;
2216     callOnMainThread([strongSelf, strongStrings, itemTime] {
2217         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2218         if (!callback)
2219             return;
2220         callback->processCue(strongStrings.get(), CMTimeGetSeconds(itemTime));
2221     });
2222 }
2223
2224 - (void)outputSequenceWasFlushed:(id)output
2225 {
2226     UNUSED_PARAM(output);
2227
2228     if (!m_callback)
2229         return;
2230     
2231     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2232     callOnMainThread([strongSelf] {
2233         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
2234             callback->flushCues();
2235     });
2236 }
2237 #endif
2238
2239 @end
2240
2241 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
2242 @implementation WebCoreAVFLoaderDelegate
2243
2244 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2245 {
2246     self = [super init];
2247     if (!self)
2248         return nil;
2249     m_callback = callback;
2250     return self;
2251 }
2252
2253 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
2254 {
2255     UNUSED_PARAM(resourceLoader);
2256     if (!m_callback)
2257         return NO;
2258
2259     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2260     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2261     callOnMainThread([strongSelf, strongRequest] {
2262         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2263         if (!callback) {
2264             [strongRequest finishLoadingWithError:nil];
2265             return;
2266         }
2267
2268         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
2269             [strongRequest finishLoadingWithError:nil];
2270     });
2271
2272     return YES;
2273 }
2274
2275 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
2276 {
2277     UNUSED_PARAM(resourceLoader);
2278     if (!m_callback)
2279         return NO;
2280
2281     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
2282         return NO;
2283
2284     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2285     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
2286     callOnMainThread([strongSelf, strongChallenge] {
2287         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2288         if (!callback) {
2289             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2290             return;
2291         }
2292
2293         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
2294             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2295     });
2296
2297     return YES;
2298 }
2299
2300 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
2301 {
2302     UNUSED_PARAM(resourceLoader);
2303     if (!m_callback)
2304         return;
2305
2306     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2307     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2308     callOnMainThread([strongSelf, strongRequest] {
2309         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2310         if (callback)
2311             callback->didCancelLoadingRequest(strongRequest.get());
2312     });
2313 }
2314
2315 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2316 {
2317     m_callback = callback;
2318 }
2319 @end
2320 #endif
2321
2322 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2323 @implementation WebCoreAVFPullDelegate
2324 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2325 {
2326     self = [super init];
2327     if (self)
2328         m_callback = callback;
2329     return self;
2330 }
2331
2332 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2333 {
2334     m_callback = callback;
2335 }
2336
2337 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
2338 {
2339     if (m_callback)
2340         m_callback->outputMediaDataWillChange(output);
2341 }
2342
2343 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
2344 {
2345     UNUSED_PARAM(output);
2346     // No-op.
2347 }
2348 @end
2349 #endif
2350
2351 #endif