[iOS] Show external device name/type in placeholder
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011-2014 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29
30 #import "MediaPlayerPrivateAVFoundationObjC.h"
31
32 #import "AVTrackPrivateAVFObjCImpl.h"
33 #import "AudioTrackPrivateAVFObjC.h"
34 #import "AuthenticationChallenge.h"
35 #import "BlockExceptions.h"
36 #import "CDMSessionAVFoundationObjC.h"
37 #import "ExceptionCodePlaceholder.h"
38 #import "FloatConversion.h"
39 #import "FloatConversion.h"
40 #import "FrameView.h"
41 #import "GraphicsContext.h"
42 #import "GraphicsContextCG.h"
43 #import "InbandTextTrackPrivateAVFObjC.h"
44 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
45 #import "URL.h"
46 #import "Logging.h"
47 #import "PlatformTimeRanges.h"
48 #import "SecurityOrigin.h"
49 #import "SoftLinking.h"
50 #import "UUID.h"
51 #import "VideoTrackPrivateAVFObjC.h"
52 #import "WebCoreAVFResourceLoader.h"
53 #import "WebCoreSystemInterface.h"
54 #import <objc/runtime.h>
55 #import <runtime/DataView.h>
56 #import <runtime/JSCInlines.h>
57 #import <runtime/TypedArrayInlines.h>
58 #import <runtime/Uint16Array.h>
59 #import <runtime/Uint32Array.h>
60 #import <runtime/Uint8Array.h>
61 #import <wtf/CurrentTime.h>
62 #import <wtf/Functional.h>
63 #import <wtf/text/CString.h>
64
65 #if ENABLE(AVF_CAPTIONS)
66 #include "TextTrack.h"
67 #endif
68
69 #import <AVFoundation/AVFoundation.h>
70 #if PLATFORM(IOS)
71 #import <CoreImage/CoreImage.h>
72 #else
73 #import <QuartzCore/CoreImage.h>
74 #endif
75 #import <CoreMedia/CoreMedia.h>
76
77 #if USE(VIDEOTOOLBOX)
78 #import <CoreVideo/CoreVideo.h>
79 #import <VideoToolbox/VideoToolbox.h>
80 #endif
81
82 #if ENABLE(AVF_CAPTIONS)
83 // Note: This must be defined before our SOFT_LINK macros:
84 @class AVMediaSelectionOption;
85 @interface AVMediaSelectionOption (OutOfBandExtensions)
86 @property (nonatomic, readonly) NSString *outOfBandSource /*NS_AVAILABLE(TBD, TBD)*/;
87 @end
88 #endif
89
90 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
91 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
92 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreImage)
93 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
94
95 #if USE(VIDEOTOOLBOX)
96 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
97 #endif
98
99 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
100 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
101 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
102 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
103
104 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
105 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
106 SOFT_LINK(CoreVideo, CVPixelBufferGetBaseAddress, void*, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
107 SOFT_LINK(CoreVideo, CVPixelBufferGetBytesPerRow, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
108 SOFT_LINK(CoreVideo, CVPixelBufferGetDataSize, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
109 SOFT_LINK(CoreVideo, CVPixelBufferGetPixelFormatType, OSType, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
110 SOFT_LINK(CoreVideo, CVPixelBufferLockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
111 SOFT_LINK(CoreVideo, CVPixelBufferUnlockBaseAddress, CVReturn, (CVPixelBufferRef pixelBuffer, CVOptionFlags lockFlags), (pixelBuffer, lockFlags))
112
113 #if USE(VIDEOTOOLBOX)
114 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
115 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
116 #endif
117
118 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
119 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
120 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
121 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
122 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
123 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
124 SOFT_LINK_CLASS(CoreImage, CIContext)
125 SOFT_LINK_CLASS(CoreImage, CIImage)
126
127 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
128 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
129 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
130 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
131 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
132 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
133 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
134 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
135 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
136 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
137 SOFT_LINK_POINTER(CoreVideo, kCVPixelBufferPixelFormatTypeKey, NSString *)
138
139 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
140
141 #define AVPlayer getAVPlayerClass()
142 #define AVPlayerItem getAVPlayerItemClass()
143 #define AVPlayerLayer getAVPlayerLayerClass()
144 #define AVURLAsset getAVURLAssetClass()
145 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
146
147 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
148 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
149 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
150 #define AVMediaTypeVideo getAVMediaTypeVideo()
151 #define AVMediaTypeAudio getAVMediaTypeAudio()
152 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
153 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
154 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
155 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
156 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
157 #define kCVPixelBufferPixelFormatTypeKey getkCVPixelBufferPixelFormatTypeKey()
158
159 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
160 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
161 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
162
163 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
164 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
165 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
166
167 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
168 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
169 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
170 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
171
172 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
173 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
174 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
175 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
176 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
177 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
178 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
179 #endif
180
181 #if ENABLE(AVF_CAPTIONS)
182 SOFT_LINK_POINTER(AVFoundation, AVURLAssetOutOfBandAlternateTracksKey, NSString*)
183 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackDisplayNameKey, NSString*)
184 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackExtendedLanguageTagKey, NSString*)
185 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIsDefaultKey, NSString*)
186 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackMediaCharactersticsKey, NSString*)
187 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackIdentifierKey, NSString*)
188 SOFT_LINK_POINTER(AVFoundation, AVOutOfBandAlternateTrackSourceKey, NSString*)
189 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, NSString*)
190 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, NSString*)
191
192 #define AVURLAssetOutOfBandAlternateTracksKey getAVURLAssetOutOfBandAlternateTracksKey()
193 #define AVOutOfBandAlternateTrackDisplayNameKey getAVOutOfBandAlternateTrackDisplayNameKey()
194 #define AVOutOfBandAlternateTrackExtendedLanguageTagKey getAVOutOfBandAlternateTrackExtendedLanguageTagKey()
195 #define AVOutOfBandAlternateTrackIsDefaultKey getAVOutOfBandAlternateTrackIsDefaultKey()
196 #define AVOutOfBandAlternateTrackMediaCharactersticsKey getAVOutOfBandAlternateTrackMediaCharactersticsKey()
197 #define AVOutOfBandAlternateTrackIdentifierKey getAVOutOfBandAlternateTrackIdentifierKey()
198 #define AVOutOfBandAlternateTrackSourceKey getAVOutOfBandAlternateTrackSourceKey()
199 #define AVMediaCharacteristicDescribesMusicAndSoundForAccessibility getAVMediaCharacteristicDescribesMusicAndSoundForAccessibility()
200 #define AVMediaCharacteristicTranscribesSpokenDialogForAccessibility getAVMediaCharacteristicTranscribesSpokenDialogForAccessibility()
201 #endif
202
203 #define kCMTimeZero getkCMTimeZero()
204
205 using namespace WebCore;
206
207 enum MediaPlayerAVFoundationObservationContext {
208     MediaPlayerAVFoundationObservationContextPlayerItem,
209     MediaPlayerAVFoundationObservationContextPlayer
210 };
211
212 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
213 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
214 #else
215 @interface WebCoreAVFMovieObserver : NSObject
216 #endif
217 {
218     MediaPlayerPrivateAVFoundationObjC* m_callback;
219     int m_delayCallbacks;
220 }
221 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
222 -(void)disconnect;
223 -(void)playableKnown;
224 -(void)metadataLoaded;
225 -(void)didEnd:(NSNotification *)notification;
226 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
227 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
228 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
229 - (void)outputSequenceWasFlushed:(id)output;
230 #endif
231 @end
232
233 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
234 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
235     MediaPlayerPrivateAVFoundationObjC* m_callback;
236 }
237 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
238 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
239 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
240 @end
241 #endif
242
243 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
244 @interface WebCoreAVFPullDelegate : NSObject<AVPlayerItemOutputPullDelegate> {
245     MediaPlayerPrivateAVFoundationObjC *m_callback;
246     dispatch_semaphore_t m_semaphore;
247 }
248 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback;
249 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
250 - (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender;
251 - (void)outputSequenceWasFlushed:(AVPlayerItemOutput *)output;
252 @end
253 #endif
254
255 namespace WebCore {
256
257 static NSArray *assetMetadataKeyNames();
258 static NSArray *itemKVOProperties();
259
260 #if !LOG_DISABLED
261 static const char *boolString(bool val)
262 {
263     return val ? "true" : "false";
264 }
265 #endif
266
267 #if ENABLE(ENCRYPTED_MEDIA_V2)
268 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
269 static PlayerToPrivateMapType& playerToPrivateMap()
270 {
271     DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
272     return map;
273 };
274 #endif
275
276 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
277 static dispatch_queue_t globalLoaderDelegateQueue()
278 {
279     static dispatch_queue_t globalQueue;
280     static dispatch_once_t onceToken;
281     dispatch_once(&onceToken, ^{
282         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
283     });
284     return globalQueue;
285 }
286 #endif
287
288 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
289 static dispatch_queue_t globalPullDelegateQueue()
290 {
291     static dispatch_queue_t globalQueue;
292     static dispatch_once_t onceToken;
293     dispatch_once(&onceToken, ^{
294         globalQueue = dispatch_queue_create("WebCoreAVFPullDelegate queue", DISPATCH_QUEUE_SERIAL);
295     });
296     return globalQueue;
297 }
298 #endif
299
300 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
301
302     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
303 }
304
305 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
306 {
307     if (isAvailable())
308         registrar(create, getSupportedTypes, supportsType, 0, 0, 0, supportsKeySystem);
309 }
310
311 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
312     : MediaPlayerPrivateAVFoundation(player)
313     , m_weakPtrFactory(this)
314     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
315     , m_videoFrameHasDrawn(false)
316     , m_haveCheckedPlayability(false)
317 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
318     , m_videoOutputDelegate(adoptNS([[WebCoreAVFPullDelegate alloc] initWithCallback:this]))
319     , m_videoOutputSemaphore(nullptr)
320 #endif
321 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
322     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
323 #endif
324     , m_currentTrack(0)
325     , m_cachedDuration(MediaPlayer::invalidTime())
326     , m_cachedRate(0)
327     , m_pendingStatusChanges(0)
328     , m_cachedItemStatus(MediaPlayerAVPlayerItemStatusDoesNotExist)
329     , m_cachedLikelyToKeepUp(false)
330     , m_cachedBufferEmpty(false)
331     , m_cachedBufferFull(false)
332     , m_cachedHasEnabledAudio(false)
333 #if ENABLE(IOS_AIRPLAY)
334     , m_allowsWirelessVideoPlayback(true)
335 #endif
336 {
337 #if ENABLE(ENCRYPTED_MEDIA_V2)
338     playerToPrivateMap().set(player, this);
339 #endif
340 }
341
342 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
343 {
344 #if ENABLE(ENCRYPTED_MEDIA_V2)
345     playerToPrivateMap().remove(player());
346 #endif
347 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
348     [m_loaderDelegate.get() setCallback:0];
349     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
350 #endif
351 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
352     [m_videoOutputDelegate setCallback:0];
353     [m_videoOutput setDelegate:nil queue:0];
354 #endif
355     cancelLoad();
356 }
357
358 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
359 {
360     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
361     tearDownVideoRendering();
362
363     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
364     [m_objcObserver.get() disconnect];
365
366     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
367     setIgnoreLoadStateChanges(true);
368     if (m_avAsset) {
369         [m_avAsset.get() cancelLoading];
370         m_avAsset = nil;
371     }
372
373     clearTextTracks();
374
375 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
376     if (m_legibleOutput) {
377         if (m_avPlayerItem)
378             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
379         m_legibleOutput = nil;
380     }
381 #endif
382
383     if (m_avPlayerItem) {
384         for (NSString *keyName in itemKVOProperties())
385             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
386         
387         m_avPlayerItem = nil;
388     }
389     if (m_avPlayer) {
390         if (m_timeObserver)
391             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
392         m_timeObserver = nil;
393         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
394 #if ENABLE(IOS_AIRPLAY)
395         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive"];
396 #endif
397         m_avPlayer = nil;
398     }
399
400     // Reset cached properties
401     m_pendingStatusChanges = 0;
402     m_cachedItemStatus = MediaPlayerAVPlayerItemStatusDoesNotExist;
403     m_cachedSeekableRanges = nullptr;
404     m_cachedLoadedRanges = nullptr;
405     m_cachedTracks = nullptr;
406     m_cachedHasEnabledAudio = false;
407     m_cachedPresentationSize = FloatSize();
408     m_cachedDuration = 0;
409
410     setIgnoreLoadStateChanges(false);
411 }
412
413 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
414 {
415     return m_videoLayer;
416 }
417
418 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
419 {
420 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
421     if (m_videoOutput)
422         return true;
423 #endif
424     return m_imageGenerator;
425 }
426
427 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
428 {
429 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
430     createVideoOutput();
431 #else
432     createImageGenerator();
433 #endif
434 }
435
436 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
437 {
438     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
439
440     if (!m_avAsset || m_imageGenerator)
441         return;
442
443     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
444
445     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
446     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
447     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
448     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
449
450     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
451 }
452
453 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
454 {
455 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
456     destroyVideoOutput();
457 #endif
458     destroyImageGenerator();
459 }
460
461 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
462 {
463     if (!m_imageGenerator)
464         return;
465
466     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
467
468     m_imageGenerator = 0;
469 }
470
471 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
472 {
473     if (!m_avPlayer || m_videoLayer)
474         return;
475
476     auto weakThis = createWeakPtr();
477     callOnMainThread([this, weakThis] {
478         if (!weakThis)
479             return;
480
481         if (!m_avPlayer || m_videoLayer)
482             return;
483
484         m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
485         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
486         [m_videoLayer.get() setBackgroundColor:cachedCGColor(Color::black, ColorSpaceDeviceRGB)];
487 #ifndef NDEBUG
488         [m_videoLayer.get() setName:@"Video layer"];
489 #endif
490         updateVideoLayerGravity();
491         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
492
493         player()->mediaPlayerClient()->mediaPlayerRenderingModeChanged(player());
494     });
495 }
496
497 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
498 {
499     if (!m_videoLayer)
500         return;
501
502     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying %p", this, m_videoLayer.get());
503
504     [m_videoLayer.get() setPlayer:nil];
505
506     m_videoLayer = 0;
507 }
508
509 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
510 {
511     if (currentRenderingMode() == MediaRenderingToLayer)
512         return m_videoLayer && [m_videoLayer.get() isReadyForDisplay];
513
514     return m_videoFrameHasDrawn;
515 }
516
517 #if ENABLE(AVF_CAPTIONS)
518 static const NSArray* mediaDescriptionForKind(PlatformTextTrack::TrackKind kind)
519 {
520     // FIXME: Match these to correct types:
521     if (kind == PlatformTextTrack::Caption)
522         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
523
524     if (kind == PlatformTextTrack::Subtitle)
525         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
526
527     if (kind == PlatformTextTrack::Description)
528         return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, AVMediaCharacteristicDescribesMusicAndSoundForAccessibility, nil];
529
530     if (kind == PlatformTextTrack::Forced)
531         return [NSArray arrayWithObjects: AVMediaCharacteristicContainsOnlyForcedSubtitles, nil];
532
533     return [NSArray arrayWithObjects: AVMediaCharacteristicTranscribesSpokenDialogForAccessibility, nil];
534 }
535 #endif
536
537 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
538 {
539     if (m_avAsset)
540         return;
541
542     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p)", this);
543
544     setDelayCallbacks(true);
545
546     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
547
548     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
549
550     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
551
552     String referrer = player()->referrer();
553     if (!referrer.isEmpty())
554         [headerFields.get() setObject:referrer forKey:@"Referer"];
555
556     String userAgent = player()->userAgent();
557     if (!userAgent.isEmpty())
558         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
559
560     if ([headerFields.get() count])
561         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
562
563 #if ENABLE(AVF_CAPTIONS)
564     const Vector<RefPtr<PlatformTextTrack>>& outOfBandTrackSources = player()->outOfBandTrackSources();
565     if (!outOfBandTrackSources.isEmpty()) {
566         NSMutableArray* outOfBandTracks = [[NSMutableArray alloc] init];
567         for (auto& trackSource : outOfBandTrackSources) {
568             RetainPtr<CFStringRef> label = trackSource->label().createCFString();
569             RetainPtr<CFStringRef> language = trackSource->language().createCFString();
570             RetainPtr<CFStringRef> uniqueID = String::number(trackSource->uniqueId()).createCFString();
571             RetainPtr<CFStringRef> url = trackSource->url().createCFString();
572             [outOfBandTracks addObject:
573                 [NSDictionary dictionaryWithObjectsAndKeys:
574                     reinterpret_cast<const NSString*>(label.get()), AVOutOfBandAlternateTrackDisplayNameKey,
575                     reinterpret_cast<const NSString*>(language.get()), AVOutOfBandAlternateTrackExtendedLanguageTagKey,
576                     [NSNumber numberWithBool: (trackSource->isDefault() ? YES : NO)], AVOutOfBandAlternateTrackIsDefaultKey,
577                     reinterpret_cast<const NSString*>(uniqueID.get()), AVOutOfBandAlternateTrackIdentifierKey,
578                     reinterpret_cast<const NSString*>(url.get()), AVOutOfBandAlternateTrackSourceKey,
579                     mediaDescriptionForKind(trackSource->kind()), AVOutOfBandAlternateTrackMediaCharactersticsKey,
580                     nil]];
581         }
582
583         [options.get() setObject: outOfBandTracks forKey: AVURLAssetOutOfBandAlternateTracksKey];
584     }
585 #endif
586     
587     NSURL *cocoaURL = URL(ParsedURLString, url);
588     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
589
590 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
591     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
592 #endif
593
594     m_haveCheckedPlayability = false;
595
596     setDelayCallbacks(false);
597 }
598
599 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
600 {
601     if (m_avPlayer)
602         return;
603
604     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
605
606     setDelayCallbacks(true);
607
608     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
609     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
610 #if ENABLE(IOS_AIRPLAY)
611     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"externalPlaybackActive" options:NSKeyValueObservingOptionNew context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
612 #endif
613
614 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
615     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
616 #endif
617
618 #if ENABLE(IOS_AIRPLAY)
619     [m_avPlayer.get() setAllowsExternalPlayback:m_allowsWirelessVideoPlayback];
620 #endif
621
622     if (m_avPlayerItem)
623         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
624
625     setDelayCallbacks(false);
626 }
627
628 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
629 {
630     if (m_avPlayerItem)
631         return;
632
633     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
634
635     setDelayCallbacks(true);
636
637     // Create the player item so we can load media data. 
638     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
639
640     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
641
642     NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew | NSKeyValueObservingOptionPrior;
643     for (NSString *keyName in itemKVOProperties())
644         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:options context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
645
646     if (m_avPlayer)
647         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
648
649 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
650     const NSTimeInterval legibleOutputAdvanceInterval = 2;
651
652     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:[NSArray array]]);
653     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
654
655     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
656     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
657     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
658     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
659 #endif
660
661     setDelayCallbacks(false);
662 }
663
664 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
665 {
666     if (m_haveCheckedPlayability)
667         return;
668     m_haveCheckedPlayability = true;
669
670     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
671
672     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
673         [m_objcObserver.get() playableKnown];
674     }];
675 }
676
677 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
678 {
679     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
680     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[assetMetadataKeyNames() retain] completionHandler:^{
681         [m_objcObserver.get() metadataLoaded];
682     }];
683 }
684
685 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
686 {
687     if (!m_avPlayerItem)
688         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
689
690     if (m_cachedItemStatus == AVPlayerItemStatusUnknown)
691         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
692     if (m_cachedItemStatus == AVPlayerItemStatusFailed)
693         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
694     if (m_cachedLikelyToKeepUp)
695         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
696     if (m_cachedBufferFull)
697         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
698     if (m_cachedBufferEmpty)
699         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
700
701     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
702 }
703
704 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
705 {
706     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
707     PlatformMedia pm;
708     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
709     pm.media.avfMediaPlayer = m_avPlayer.get();
710     return pm;
711 }
712
713 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
714 {
715     return m_videoLayer.get();
716 }
717
718 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
719 {
720     [CATransaction begin];
721     [CATransaction setDisableActions:YES];    
722     if (m_videoLayer)
723         [m_videoLayer.get() setHidden:!isVisible];
724     [CATransaction commit];
725 }
726     
727 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
728 {
729     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
730     if (!metaDataAvailable())
731         return;
732
733     setDelayCallbacks(true);
734     m_cachedRate = requestedRate();
735     [m_avPlayer.get() setRate:requestedRate()];
736     setDelayCallbacks(false);
737 }
738
739 void MediaPlayerPrivateAVFoundationObjC::platformPause()
740 {
741     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
742     if (!metaDataAvailable())
743         return;
744
745     setDelayCallbacks(true);
746     m_cachedRate = 0;
747     [m_avPlayer.get() setRate:nil];
748     setDelayCallbacks(false);
749 }
750
751 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
752 {
753     // Do not ask the asset for duration before it has been loaded or it will fetch the
754     // answer synchronously.
755     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
756          return MediaPlayer::invalidTime();
757     
758     CMTime cmDuration;
759     
760     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
761     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
762         cmDuration = [m_avPlayerItem.get() duration];
763     else
764         cmDuration= [m_avAsset.get() duration];
765
766     if (CMTIME_IS_NUMERIC(cmDuration))
767         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
768
769     if (CMTIME_IS_INDEFINITE(cmDuration)) {
770         return std::numeric_limits<float>::infinity();
771     }
772
773     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
774     return MediaPlayer::invalidTime();
775 }
776
777 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
778 {
779     if (!metaDataAvailable() || !m_avPlayerItem)
780         return 0;
781
782     CMTime itemTime = [m_avPlayerItem.get() currentTime];
783     if (CMTIME_IS_NUMERIC(itemTime))
784         return std::max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
785
786     return 0;
787 }
788
789 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time, double negativeTolerance, double positiveTolerance)
790 {
791     // setCurrentTime generates several event callbacks, update afterwards.
792     setDelayCallbacks(true);
793
794     CMTime cmTime = CMTimeMakeWithSeconds(time, 600);
795     CMTime cmBefore = CMTimeMakeWithSeconds(negativeTolerance, 600);
796     CMTime cmAfter = CMTimeMakeWithSeconds(positiveTolerance, 600);
797
798     auto weakThis = createWeakPtr();
799
800     [m_avPlayerItem.get() seekToTime:cmTime toleranceBefore:cmBefore toleranceAfter:cmAfter completionHandler:^(BOOL finished) {
801         callOnMainThread([weakThis, finished] {
802             auto _this = weakThis.get();
803             if (!_this)
804                 return;
805
806             _this->seekCompleted(finished);
807         });
808     }];
809
810     setDelayCallbacks(false);
811 }
812
813 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
814 {
815     if (!metaDataAvailable())
816         return;
817
818     [m_avPlayer.get() setVolume:volume];
819 }
820
821 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
822 {
823     UNUSED_PARAM(closedCaptionsVisible);
824
825     if (!metaDataAvailable())
826         return;
827
828     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
829 }
830
831 void MediaPlayerPrivateAVFoundationObjC::updateRate()
832 {
833     setDelayCallbacks(true);
834     m_cachedRate = requestedRate();
835     [m_avPlayer.get() setRate:requestedRate()];
836     setDelayCallbacks(false);
837 }
838
839 float MediaPlayerPrivateAVFoundationObjC::rate() const
840 {
841     if (!metaDataAvailable())
842         return 0;
843
844     return m_cachedRate;
845 }
846
847 std::unique_ptr<PlatformTimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
848 {
849     auto timeRanges = PlatformTimeRanges::create();
850
851     if (!m_avPlayerItem)
852         return timeRanges;
853
854     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
855         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
856         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange)) {
857             float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(timeRange.start));
858             float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
859             timeRanges->add(rangeStart, rangeEnd);
860         }
861     }
862     return timeRanges;
863 }
864
865 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
866 {
867     if (!m_cachedSeekableRanges || ![m_cachedSeekableRanges count])
868         return 0;
869
870     double minTimeSeekable = std::numeric_limits<double>::infinity();
871     bool hasValidRange = false;
872     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
873         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
874         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
875             continue;
876
877         hasValidRange = true;
878         double startOfRange = CMTimeGetSeconds(timeRange.start);
879         if (minTimeSeekable > startOfRange)
880             minTimeSeekable = startOfRange;
881     }
882     return hasValidRange ? minTimeSeekable : 0;
883 }
884
885 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
886 {
887     if (!m_cachedSeekableRanges)
888         m_cachedSeekableRanges = [m_avPlayerItem seekableTimeRanges];
889
890     double maxTimeSeekable = 0;
891     for (NSValue *thisRangeValue in m_cachedSeekableRanges.get()) {
892         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
893         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
894             continue;
895         
896         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
897         if (maxTimeSeekable < endOfRange)
898             maxTimeSeekable = endOfRange;
899     }
900     return maxTimeSeekable;
901 }
902
903 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
904 {
905 #if !PLATFORM(IOS) && __MAC_OS_X_VERSION_MIN_REQUIRED <= 1080
906     // AVFoundation on Mountain Lion will occasionally not send a KVO notification
907     // when loadedTimeRanges changes when there is no video output. In that case
908     // update the cached value explicitly.
909     if (!hasLayerRenderer() && !hasContextRenderer())
910         m_cachedLoadedRanges = [m_avPlayerItem loadedTimeRanges];
911 #endif
912
913     if (!m_cachedLoadedRanges)
914         return 0;
915
916     float maxTimeLoaded = 0;
917     for (NSValue *thisRangeValue in m_cachedLoadedRanges.get()) {
918         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
919         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
920             continue;
921         
922         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
923         if (maxTimeLoaded < endOfRange)
924             maxTimeLoaded = endOfRange;
925     }
926
927     return maxTimeLoaded;   
928 }
929
930 unsigned long long MediaPlayerPrivateAVFoundationObjC::totalBytes() const
931 {
932     if (!metaDataAvailable())
933         return 0;
934
935     long long totalMediaSize = 0;
936     for (AVPlayerItemTrack *thisTrack in m_cachedTracks.get())
937         totalMediaSize += [[thisTrack assetTrack] totalSampleDataLength];
938
939     return totalMediaSize;
940 }
941
942 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
943 {
944     m_avAsset = asset;
945 }
946
947 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
948 {
949     if (!m_avAsset)
950         return MediaPlayerAVAssetStatusDoesNotExist;
951
952     for (NSString *keyName in assetMetadataKeyNames()) {
953         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:nil];
954
955         if (keyStatus < AVKeyValueStatusLoaded)
956             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
957         
958         if (keyStatus == AVKeyValueStatusFailed)
959             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
960
961         if (keyStatus == AVKeyValueStatusCancelled)
962             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
963     }
964
965     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
966         return MediaPlayerAVAssetStatusPlayable;
967
968     return MediaPlayerAVAssetStatusLoaded;
969 }
970
971 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
972 {
973     if (!metaDataAvailable() || context->paintingDisabled())
974         return;
975
976     setDelayCallbacks(true);
977     BEGIN_BLOCK_OBJC_EXCEPTIONS;
978
979 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
980     if (videoOutputHasAvailableFrame())
981         paintWithVideoOutput(context, rect);
982     else
983 #endif
984         paintWithImageGenerator(context, rect);
985
986     END_BLOCK_OBJC_EXCEPTIONS;
987     setDelayCallbacks(false);
988
989     m_videoFrameHasDrawn = true;
990 }
991
992 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
993 {
994     if (!metaDataAvailable() || context->paintingDisabled())
995         return;
996
997     // We can ignore the request if we are already rendering to a layer.
998     if (currentRenderingMode() == MediaRenderingToLayer)
999         return;
1000
1001     paintCurrentFrameInContext(context, rect);
1002 }
1003
1004 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
1005 {
1006     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
1007     if (image) {
1008         GraphicsContextStateSaver stateSaver(*context);
1009         context->translate(rect.x(), rect.y() + rect.height());
1010         context->scale(FloatSize(1.0f, -1.0f));
1011         context->setImageInterpolationQuality(InterpolationLow);
1012         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
1013         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
1014         image = 0;
1015     }
1016 }
1017
1018 static HashSet<String> mimeTypeCache()
1019 {
1020     DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
1021     static bool typeListInitialized = false;
1022
1023     if (typeListInitialized)
1024         return cache;
1025     typeListInitialized = true;
1026
1027     NSArray *types = [AVURLAsset audiovisualMIMETypes];
1028     for (NSString *mimeType in types)
1029         cache.add(mimeType);
1030
1031     return cache;
1032
1033
1034 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
1035 {
1036     if (!m_imageGenerator)
1037         createImageGenerator();
1038     ASSERT(m_imageGenerator);
1039
1040 #if !LOG_DISABLED
1041     double start = monotonicallyIncreasingTime();
1042 #endif
1043
1044     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
1045     RetainPtr<CGImageRef> rawImage = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
1046     RetainPtr<CGImageRef> image = adoptCF(CGImageCreateCopyWithColorSpace(rawImage.get(), deviceRGBColorSpaceRef()));
1047
1048 #if !LOG_DISABLED
1049     double duration = monotonicallyIncreasingTime() - start;
1050     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
1051 #endif
1052
1053     return image;
1054 }
1055
1056 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
1057 {
1058     supportedTypes = mimeTypeCache();
1059
1060
1061 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1062 static bool keySystemIsSupported(const String& keySystem)
1063 {
1064     if (equalIgnoringCase(keySystem, "com.apple.fps") || equalIgnoringCase(keySystem, "com.apple.fps.1_0"))
1065         return true;
1066     return false;
1067 }
1068 #endif
1069
1070 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const MediaEngineSupportParameters& parameters)
1071 {
1072 #if ENABLE(ENCRYPTED_MEDIA)
1073     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
1074     // In addition to the steps in the current specification, this method must run the following steps:
1075
1076     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
1077     //    If keySystem is null, continue to the next step.
1078     if (!parameters.keySystem.isNull() && !parameters.keySystem.isEmpty()) {
1079         // If keySystem contains an unrecognized or unsupported Key System, return the empty string
1080         if (!keySystemIsSupported(parameters.keySystem))
1081             return MediaPlayer::IsNotSupported;
1082
1083         // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
1084         // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
1085     }
1086
1087     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
1088 #endif
1089
1090 #if ENABLE(MEDIA_SOURCE)
1091     if (parameters.isMediaSource)
1092         return MediaPlayer::IsNotSupported;
1093 #endif
1094
1095     if (!mimeTypeCache().contains(parameters.type))
1096         return MediaPlayer::IsNotSupported;
1097
1098     // The spec says:
1099     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
1100     if (parameters.codecs.isEmpty())
1101         return MediaPlayer::MayBeSupported;
1102
1103     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)parameters.type, (NSString *)parameters.codecs];
1104     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
1105 }
1106
1107 bool MediaPlayerPrivateAVFoundationObjC::supportsKeySystem(const String& keySystem, const String& mimeType)
1108 {
1109 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1110     if (!keySystem.isEmpty()) {
1111         if (!keySystemIsSupported(keySystem))
1112             return false;
1113
1114         if (!mimeType.isEmpty() && !mimeTypeCache().contains(mimeType))
1115             return false;
1116
1117         return true;
1118     }
1119 #else
1120     UNUSED_PARAM(keySystem);
1121     UNUSED_PARAM(mimeType);
1122 #endif
1123     return false;
1124 }
1125
1126 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
1127 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
1128 {
1129     String scheme = [[[avRequest request] URL] scheme];
1130     String keyURI = [[[avRequest request] URL] absoluteString];
1131
1132 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1133     if (scheme == "skd") {
1134         // Create an initData with the following layout:
1135         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
1136         unsigned keyURISize = keyURI.length() * sizeof(UChar);
1137         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
1138         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1139         initDataView->set<uint32_t>(0, keyURISize, true);
1140
1141         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
1142         keyURIArray->setRange(keyURI.deprecatedCharacters(), keyURI.length() / sizeof(unsigned char), 0);
1143
1144 #if ENABLE(ENCRYPTED_MEDIA)
1145         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
1146 #elif ENABLE(ENCRYPTED_MEDIA_V2)
1147         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
1148         if (!player()->keyNeeded(initData.get()))
1149 #endif
1150             return false;
1151
1152         m_keyURIToRequestMap.set(keyURI, avRequest);
1153         return true;
1154     }
1155 #endif
1156
1157     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
1158     m_resourceLoaderMap.add(avRequest, resourceLoader);
1159     resourceLoader->startLoading();
1160     return true;
1161 }
1162
1163 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForResponseToAuthenticationChallenge(NSURLAuthenticationChallenge* nsChallenge)
1164 {
1165 #if USE(CFNETWORK)
1166     UNUSED_PARAM(nsChallenge);
1167     // FIXME: <rdar://problem/15799844>
1168     return false;
1169 #else
1170     AuthenticationChallenge challenge(nsChallenge);
1171
1172     return player()->shouldWaitForResponseToAuthenticationChallenge(challenge);
1173 #endif
1174 }
1175
1176 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
1177 {
1178     String scheme = [[[avRequest request] URL] scheme];
1179
1180     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
1181
1182     if (resourceLoader)
1183         resourceLoader->stopLoading();
1184 }
1185
1186 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
1187 {
1188     m_resourceLoaderMap.remove(avRequest);
1189 }
1190 #endif
1191
1192 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
1193 {
1194     return AVFoundationLibrary() && CoreMediaLibrary();
1195 }
1196
1197 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
1198 {
1199     if (!metaDataAvailable())
1200         return timeValue;
1201
1202     // FIXME - impossible to implement until rdar://8721510 is fixed.
1203     return timeValue;
1204 }
1205
1206 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
1207 {
1208     if (!m_videoLayer)
1209         return;
1210
1211     [CATransaction begin];
1212     [CATransaction setDisableActions:YES];    
1213     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1214     [m_videoLayer.get() setVideoGravity:gravity];
1215     [CATransaction commit];
1216 }
1217
1218 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1219 {
1220     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1221     m_languageOfPrimaryAudioTrack = String();
1222
1223     if (!m_avAsset)
1224         return;
1225
1226     setDelayCharacteristicsChangedNotification(true);
1227
1228     bool haveCCTrack = false;
1229     bool hasCaptions = false;
1230
1231     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1232     // asked about those fairly fequently.
1233     if (!m_avPlayerItem) {
1234         // We don't have a player item yet, so check with the asset because some assets support inspection
1235         // prior to becoming ready to play.
1236         setHasVideo([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual] count]);
1237         setHasAudio([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible] count]);
1238 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1239         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1240 #endif
1241     } else {
1242         bool hasVideo = false;
1243         bool hasAudio = false;
1244         for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1245             if ([track isEnabled]) {
1246                 AVAssetTrack *assetTrack = [track assetTrack];
1247                 if ([[assetTrack mediaType] isEqualToString:AVMediaTypeVideo])
1248                     hasVideo = true;
1249                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeAudio])
1250                     hasAudio = true;
1251                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption]) {
1252 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1253                     hasCaptions = true;
1254 #endif
1255                     haveCCTrack = true;
1256                 }
1257             }
1258         }
1259         setHasVideo(hasVideo);
1260         setHasAudio(hasAudio);
1261
1262
1263 #if ENABLE(VIDEO_TRACK)
1264         updateAudioTracks();
1265         updateVideoTracks();
1266 #endif
1267     }
1268
1269 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1270     if (AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia()) {
1271         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1272         if (hasCaptions)
1273             processMediaSelectionOptions();
1274     }
1275 #endif
1276
1277 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1278     if (!hasCaptions && haveCCTrack)
1279         processLegacyClosedCaptionsTracks();
1280 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1281     if (haveCCTrack)
1282         processLegacyClosedCaptionsTracks();
1283 #endif
1284
1285     setHasClosedCaptions(hasCaptions);
1286
1287     LOG(Media, "WebCoreAVFMovieObserver:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1288         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1289
1290     sizeChanged();
1291
1292     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1293         characteristicsChanged();
1294
1295     setDelayCharacteristicsChangedNotification(false);
1296 }
1297
1298 #if ENABLE(VIDEO_TRACK)
1299 template <typename RefT, typename PassRefT>
1300 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*removedFunction)(PassRefT), void (MediaPlayer::*addedFunction)(PassRefT))
1301 {
1302     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1303         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1304     }]]]);
1305     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1306
1307     typedef Vector<RefT> ItemVector;
1308     for (auto i = oldItems.begin(); i != oldItems.end(); ++i)
1309         [oldTracks addObject:(*i)->playerItemTrack()];
1310
1311     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1312     [removedTracks minusSet:newTracks.get()];
1313
1314     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1315     [addedTracks minusSet:oldTracks.get()];
1316
1317     ItemVector replacementItems;
1318     ItemVector addedItems;
1319     ItemVector removedItems;
1320     for (auto i = oldItems.begin(); i != oldItems.end(); ++i) {
1321         if ([removedTracks containsObject:(*i)->playerItemTrack()])
1322             removedItems.append(*i);
1323         else
1324             replacementItems.append(*i);
1325     }
1326
1327     for (AVPlayerItemTrack* track in addedTracks.get())
1328         addedItems.append(itemFactory(track));
1329
1330     replacementItems.appendVector(addedItems);
1331     oldItems.swap(replacementItems);
1332
1333     for (auto i = removedItems.begin(); i != removedItems.end(); ++i)
1334         (player->*removedFunction)(*i);
1335
1336     for (auto i = addedItems.begin(); i != addedItems.end(); ++i)
1337         (player->*addedFunction)(*i);
1338 }
1339
1340 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1341 {
1342     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1343 }
1344
1345 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1346 {
1347     determineChangedTracksFromNewTracksAndOldItems(m_cachedTracks.get(), AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1348 }
1349 #endif // ENABLE(VIDEO_TRACK)
1350
1351 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1352 {
1353     if (!m_avAsset)
1354         return;
1355
1356     // Some assets don't report track properties until they are completely ready to play, but we
1357     // want to report a size as early as possible so use presentationSize when an asset has no tracks.
1358     if (m_avPlayerItem && ![m_cachedTracks count]) {
1359         setNaturalSize(roundedIntSize(m_cachedPresentationSize));
1360         return;
1361     }
1362
1363     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1364     // the union of all visual track rects.
1365     CGRect trackUnionRect = CGRectZero;
1366     for (AVPlayerItemTrack *track in m_cachedTracks.get()) {
1367         AVAssetTrack* assetTrack = [track assetTrack];
1368         CGSize trackSize = [assetTrack naturalSize];
1369         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1370         trackUnionRect = CGRectUnion(trackUnionRect, CGRectApplyAffineTransform(trackRect, [assetTrack preferredTransform]));
1371     }
1372
1373     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1374     trackUnionRect = CGRectOffset(trackUnionRect, trackUnionRect.origin.x, trackUnionRect.origin.y);
1375     
1376     // Also look at the asset's preferred transform so we account for a movie matrix.
1377     CGSize naturalSize = CGSizeApplyAffineTransform(trackUnionRect.size, [m_avAsset.get() preferredTransform]);
1378
1379     // Cache the natural size (setNaturalSize will notify the player if it has changed).
1380     setNaturalSize(IntSize(naturalSize));
1381 }
1382
1383 #if PLATFORM(IOS)
1384 // FIXME: Implement for iOS in WebKit System Interface.
1385 static inline NSURL *wkAVAssetResolvedURL(AVAsset*)
1386 {
1387     return nil;
1388 }
1389 #endif
1390
1391 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1392 {
1393     if (!m_avAsset)
1394         return false;
1395     
1396     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL(wkAVAssetResolvedURL(m_avAsset.get())));
1397     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1398     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1399 }
1400
1401 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
1402 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1403 {
1404     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1405
1406     if (!m_avPlayerItem || m_videoOutput)
1407         return;
1408
1409 #if USE(VIDEOTOOLBOX)
1410     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1411 #else
1412     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
1413                                 nil];
1414 #endif
1415     m_videoOutput = adoptNS([[getAVPlayerItemVideoOutputClass() alloc] initWithPixelBufferAttributes:attributes]);
1416     ASSERT(m_videoOutput);
1417
1418     [m_videoOutput setDelegate:m_videoOutputDelegate.get() queue:globalPullDelegateQueue()];
1419
1420     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1421
1422     waitForVideoOutputMediaDataWillChange();
1423
1424     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1425 }
1426
1427 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1428 {
1429     if (!m_videoOutput)
1430         return;
1431
1432     if (m_avPlayerItem)
1433         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1434     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1435
1436     m_videoOutput = 0;
1437 }
1438
1439 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1440 {
1441     if (!m_videoOutput)
1442         createVideoOutput();
1443     ASSERT(m_videoOutput);
1444
1445 #if !LOG_DISABLED
1446     double start = monotonicallyIncreasingTime();
1447 #endif
1448
1449     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1450
1451     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1452         return 0;
1453
1454     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1455     if (!buffer)
1456         return 0;
1457
1458 #if USE(VIDEOTOOLBOX)
1459     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1460     if (!m_pixelTransferSession) {
1461         VTPixelTransferSessionRef session = 0;
1462         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1463         m_pixelTransferSession = adoptCF(session);
1464     }
1465
1466     CVPixelBufferRef outputBuffer;
1467     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), kCVPixelFormatType_32BGRA, 0, &outputBuffer);
1468     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1469     buffer = adoptCF(outputBuffer);
1470 #endif
1471
1472 #if !LOG_DISABLED
1473     double duration = monotonicallyIncreasingTime() - start;
1474     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer(%p) - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1475 #endif
1476
1477     return buffer;
1478 }
1479
1480 bool MediaPlayerPrivateAVFoundationObjC::videoOutputHasAvailableFrame()
1481 {
1482     if (!m_avPlayerItem)
1483         return false;
1484
1485     if (m_lastImage)
1486         return true;
1487
1488     if (!m_videoOutput)
1489         createVideoOutput();
1490
1491     return [m_videoOutput hasNewPixelBufferForItemTime:[m_avPlayerItem currentTime]];
1492 }
1493
1494 static const void* CVPixelBufferGetBytePointerCallback(void* info)
1495 {
1496     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1497     CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1498     return CVPixelBufferGetBaseAddress(pixelBuffer);
1499 }
1500
1501 static void CVPixelBufferReleaseBytePointerCallback(void* info, const void*)
1502 {
1503     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1504     CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
1505 }
1506
1507 static void CVPixelBufferReleaseInfoCallback(void* info)
1508 {
1509     CVPixelBufferRef pixelBuffer = static_cast<CVPixelBufferRef>(info);
1510     CFRelease(pixelBuffer);
1511 }
1512
1513 static RetainPtr<CGImageRef> createImageFromPixelBuffer(CVPixelBufferRef pixelBuffer)
1514 {
1515     // pixelBuffer will be of type kCVPixelFormatType_32BGRA.
1516     ASSERT(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA);
1517
1518     size_t width = CVPixelBufferGetWidth(pixelBuffer);
1519     size_t height = CVPixelBufferGetHeight(pixelBuffer);
1520     size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
1521     size_t byteLength = CVPixelBufferGetDataSize(pixelBuffer);
1522     CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst;
1523
1524     CFRetain(pixelBuffer); // Balanced by CVPixelBufferReleaseInfoCallback in providerCallbacks.
1525     CGDataProviderDirectCallbacks providerCallbacks = { 0, CVPixelBufferGetBytePointerCallback, CVPixelBufferReleaseBytePointerCallback, 0, CVPixelBufferReleaseInfoCallback };
1526     RetainPtr<CGDataProviderRef> provider = adoptCF(CGDataProviderCreateDirect(pixelBuffer, byteLength, &providerCallbacks));
1527
1528     return adoptCF(CGImageCreate(width, height, 8, 32, bytesPerRow, deviceRGBColorSpaceRef(), bitmapInfo, provider.get(), NULL, false, kCGRenderingIntentDefault));
1529 }
1530
1531 void MediaPlayerPrivateAVFoundationObjC::updateLastImage()
1532 {
1533     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1534
1535     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1536     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1537     // should be displayed.
1538     if (pixelBuffer)
1539         m_lastImage = createImageFromPixelBuffer(pixelBuffer.get());
1540 }
1541
1542 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& outputRect)
1543 {
1544     updateLastImage();
1545
1546     if (m_lastImage) {
1547         GraphicsContextStateSaver stateSaver(*context);
1548
1549         IntRect imageRect(0, 0, CGImageGetWidth(m_lastImage.get()), CGImageGetHeight(m_lastImage.get()));
1550
1551         context->drawNativeImage(m_lastImage.get(), imageRect.size(), ColorSpaceDeviceRGB, outputRect, imageRect);
1552
1553         // If we have created an AVAssetImageGenerator in the past due to m_videoOutput not having an available
1554         // video frame, destroy it now that it is no longer needed.
1555         if (m_imageGenerator)
1556             destroyImageGenerator();
1557     }
1558 }
1559
1560 PassNativeImagePtr MediaPlayerPrivateAVFoundationObjC::nativeImageForCurrentTime()
1561 {
1562     updateLastImage();
1563     return m_lastImage.get();
1564 }
1565
1566 void MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange()
1567 {
1568     if (!m_videoOutputSemaphore)
1569         m_videoOutputSemaphore = dispatch_semaphore_create(0);
1570
1571     [m_videoOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0];
1572
1573     // Wait for 1 second.
1574     long result = dispatch_semaphore_wait(m_videoOutputSemaphore, dispatch_time(0, 1 * NSEC_PER_SEC));
1575
1576     if (result)
1577         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::waitForVideoOutputMediaDataWillChange(%p) timed out", this);
1578 }
1579
1580 void MediaPlayerPrivateAVFoundationObjC::outputMediaDataWillChange(AVPlayerItemVideoOutput*)
1581 {
1582     dispatch_semaphore_signal(m_videoOutputSemaphore);
1583 }
1584 #endif
1585
1586 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1587 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1588 {
1589     // initData should have the following layout:
1590     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1591     if (initData->byteLength() < 4)
1592         return false;
1593
1594     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1595
1596     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1597     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1598     uint32_t offset = 0;
1599     bool status = true;
1600
1601     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1602     offset += 4;
1603     if (!status || offset + keyURILength > initData->length())
1604         return false;
1605
1606     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1607     if (!keyURIArray)
1608         return false;
1609
1610     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1611     offset += keyURILength;
1612
1613     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1614     offset += 4;
1615     if (!status || offset + keyIDLength > initData->length())
1616         return false;
1617
1618     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1619     if (!keyIDArray)
1620         return false;
1621
1622     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1623     offset += keyIDLength;
1624
1625     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1626     offset += 4;
1627     if (!status || offset + certificateLength > initData->length())
1628         return false;
1629
1630     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1631     if (!certificate)
1632         return false;
1633
1634     return true;
1635 }
1636 #endif
1637
1638 #if ENABLE(ENCRYPTED_MEDIA)
1639 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1640 {
1641     if (!keySystemIsSupported(keySystem))
1642         return MediaPlayer::KeySystemNotSupported;
1643
1644     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1645     String keyURI;
1646     String keyID;
1647     RefPtr<Uint8Array> certificate;
1648     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1649         return MediaPlayer::InvalidPlayerState;
1650
1651     if (!m_keyURIToRequestMap.contains(keyURI))
1652         return MediaPlayer::InvalidPlayerState;
1653
1654     String sessionID = createCanonicalUUIDString();
1655
1656     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1657
1658     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1659     NSString* assetStr = keyID;
1660     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
1661     NSError* error = 0;
1662     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
1663
1664     if (!keyRequest) {
1665         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
1666         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
1667         return MediaPlayer::NoError;
1668     }
1669
1670     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
1671     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
1672     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
1673
1674     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
1675     m_sessionIDToRequestMap.set(sessionID, avRequest);
1676     m_keyURIToRequestMap.remove(keyURI);
1677
1678     return MediaPlayer::NoError;
1679 }
1680
1681 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
1682 {
1683     if (!keySystemIsSupported(keySystem))
1684         return MediaPlayer::KeySystemNotSupported;
1685
1686     if (!m_sessionIDToRequestMap.contains(sessionID))
1687         return MediaPlayer::InvalidPlayerState;
1688
1689     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
1690     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
1691     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
1692     [avRequest.get() finishLoading];
1693     m_sessionIDToRequestMap.remove(sessionID);
1694
1695     player()->keyAdded(keySystem, sessionID);
1696
1697     UNUSED_PARAM(initDataPtr);
1698     UNUSED_PARAM(initDataLength);
1699     return MediaPlayer::NoError;
1700 }
1701
1702 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
1703 {
1704     if (!keySystemIsSupported(keySystem))
1705         return MediaPlayer::KeySystemNotSupported;
1706
1707     if (!m_sessionIDToRequestMap.contains(sessionID))
1708         return MediaPlayer::InvalidPlayerState;
1709
1710     m_sessionIDToRequestMap.remove(sessionID);
1711     return MediaPlayer::NoError;
1712 }
1713 #endif
1714
1715 #if ENABLE(ENCRYPTED_MEDIA_V2)
1716 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForKeyURI(const String& keyURI)
1717 {
1718     return m_keyURIToRequestMap.take(keyURI);
1719 }
1720
1721 std::unique_ptr<CDMSession> MediaPlayerPrivateAVFoundationObjC::createSession(const String& keySystem)
1722 {
1723     if (!keySystemIsSupported(keySystem))
1724         return nullptr;
1725
1726     return std::make_unique<CDMSessionAVFoundationObjC>(this);
1727 }
1728 #endif
1729
1730 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1731 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
1732 {
1733 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1734     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1735 #endif
1736
1737     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1738     for (AVPlayerItemTrack *playerItemTrack in m_cachedTracks.get()) {
1739
1740         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
1741         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
1742             continue;
1743
1744         bool newCCTrack = true;
1745         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1746             if (!removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack())
1747                 continue;
1748
1749             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
1750             if (track->avPlayerItemTrack() == playerItemTrack) {
1751                 removedTextTracks.remove(i - 1);
1752                 newCCTrack = false;
1753                 break;
1754             }
1755         }
1756
1757         if (!newCCTrack)
1758             continue;
1759         
1760         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
1761     }
1762
1763     processNewAndRemovedTextTracks(removedTextTracks);
1764 }
1765 #endif
1766
1767 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1768 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
1769 {
1770     if (!m_avAsset)
1771         return nil;
1772     
1773     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
1774         return nil;
1775     
1776     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1777 }
1778
1779 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
1780 {
1781     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1782     if (!legibleGroup) {
1783         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1784         return;
1785     }
1786
1787     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1788     // but set the selected legible track to nil so text tracks will not be automatically configured.
1789     if (!m_textTracks.size())
1790         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1791
1792     Vector<RefPtr<InbandTextTrackPrivateAVF>> removedTextTracks = m_textTracks;
1793     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
1794     for (AVMediaSelectionOptionType *option in legibleOptions) {
1795         bool newTrack = true;
1796         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1797              if (removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack())
1798                  continue;
1799
1800             RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
1801             if ([track->mediaSelectionOption() isEqual:option]) {
1802                 removedTextTracks.remove(i - 1);
1803                 newTrack = false;
1804                 break;
1805             }
1806         }
1807         if (!newTrack)
1808             continue;
1809
1810 #if ENABLE(AVF_CAPTIONS)
1811         // Ignore out-of-band tracks that we passed to AVFoundation so we do not double-count them
1812         if ([option outOfBandSource])
1813             continue;
1814 #endif
1815
1816         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option));
1817     }
1818
1819     processNewAndRemovedTextTracks(removedTextTracks);
1820 }
1821
1822 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, double time)
1823 {
1824     if (!m_currentTrack)
1825         return;
1826
1827     m_currentTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), time);
1828 }
1829
1830 void MediaPlayerPrivateAVFoundationObjC::flushCues()
1831 {
1832     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
1833
1834     if (!m_currentTrack)
1835         return;
1836     
1837     m_currentTrack->resetCueValues();
1838 }
1839 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1840
1841 void MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(InbandTextTrackPrivateAVF *track)
1842 {
1843     if (m_currentTrack == track)
1844         return;
1845
1846     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1847         
1848     m_currentTrack = track;
1849
1850     if (track) {
1851         if (track->isLegacyClosedCaptionsTrack())
1852             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
1853 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1854         else
1855             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1856 #endif
1857     } else {
1858 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1859         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1860 #endif
1861         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
1862     }
1863
1864 }
1865
1866 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
1867 {
1868     if (!m_languageOfPrimaryAudioTrack.isNull())
1869         return m_languageOfPrimaryAudioTrack;
1870
1871     if (!m_avPlayerItem.get())
1872         return emptyString();
1873
1874 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1875     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1876     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
1877     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
1878     if (currentlySelectedAudibleOption) {
1879         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
1880         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1881
1882         return m_languageOfPrimaryAudioTrack;
1883     }
1884 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1885
1886     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1887     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1888     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
1889     if (!tracks || [tracks count] != 1) {
1890         m_languageOfPrimaryAudioTrack = emptyString();
1891         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %lu audio tracks, returning emptyString()", this, static_cast<unsigned long>(tracks ? [tracks count] : 0));
1892         return m_languageOfPrimaryAudioTrack;
1893     }
1894
1895     AVAssetTrack *track = [tracks objectAtIndex:0];
1896     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
1897
1898 #if !LOG_DISABLED
1899     if (m_languageOfPrimaryAudioTrack == emptyString())
1900         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1901     else
1902         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1903 #endif
1904
1905     return m_languageOfPrimaryAudioTrack;
1906 }
1907
1908 #if ENABLE(IOS_AIRPLAY)
1909 bool MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless() const
1910 {
1911     if (!m_avPlayer)
1912         return false;
1913
1914     bool wirelessTarget = [m_avPlayer.get() isExternalPlaybackActive];
1915     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::isCurrentPlaybackTargetWireless(%p) - returning %s", this, boolString(wirelessTarget));
1916     return wirelessTarget;
1917 }
1918
1919 MediaPlayer::WirelessPlaybackTargetType MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetType() const
1920 {
1921     if (!m_avPlayer)
1922         return MediaPlayer::TargetTypeNone;
1923
1924     switch (wkExernalDeviceTypeForPlayer(m_avPlayer.get())) {
1925     case wkExternalPlaybackTypeNone:
1926         return MediaPlayer::TargetTypeNone;
1927     case wkExternalPlaybackTypeAirPlay:
1928         return MediaPlayer::TargetTypeAirPlay;
1929     case wkExternalPlaybackTypeTVOut:
1930         return MediaPlayer::TargetTypeTVOut;
1931     }
1932
1933     ASSERT_NOT_REACHED();
1934     return MediaPlayer::TargetTypeNone;
1935 }
1936
1937 String MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName() const
1938 {
1939     if (!m_avPlayer)
1940         return emptyString();
1941     
1942     String wirelessTargetName = wkExernalDeviceDisplayNameForPlayer(m_avPlayer.get());
1943     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessPlaybackTargetName(%p) - returning %s", this, wirelessTargetName.utf8().data());
1944
1945     return wirelessTargetName;
1946 }
1947
1948 bool MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled() const
1949 {
1950     if (!m_avPlayer)
1951         return !m_allowsWirelessVideoPlayback;
1952     
1953     m_allowsWirelessVideoPlayback = ![m_avPlayer.get() allowsExternalPlayback];
1954     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::wirelessVideoPlaybackDisabled(%p) - returning %s", this, boolString(!m_allowsWirelessVideoPlayback));
1955
1956     return !m_allowsWirelessVideoPlayback;
1957 }
1958
1959 void MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(bool disabled)
1960 {
1961     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setWirelessVideoPlaybackDisabled(%p) - %s", this, boolString(disabled));
1962     m_allowsWirelessVideoPlayback = !disabled;
1963     if (!m_avPlayer)
1964         return;
1965     
1966     [m_avPlayer.get() setAllowsExternalPlayback:disabled];
1967 }
1968 #endif
1969
1970 void MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange(int status)
1971 {
1972     m_cachedItemStatus = status;
1973
1974     updateStates();
1975 }
1976
1977 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange()
1978 {
1979     m_pendingStatusChanges++;
1980 }
1981
1982 void MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange(bool likelyToKeepUp)
1983 {
1984     m_cachedLikelyToKeepUp = likelyToKeepUp;
1985
1986     ASSERT(m_pendingStatusChanges);
1987     if (!--m_pendingStatusChanges)
1988         updateStates();
1989 }
1990
1991 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange()
1992 {
1993     m_pendingStatusChanges++;
1994 }
1995
1996 void MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange(bool bufferEmpty)
1997 {
1998     m_cachedBufferEmpty = bufferEmpty;
1999
2000     ASSERT(m_pendingStatusChanges);
2001     if (!--m_pendingStatusChanges)
2002         updateStates();
2003 }
2004
2005 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange()
2006 {
2007     m_pendingStatusChanges++;
2008 }
2009
2010 void MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange(bool bufferFull)
2011 {
2012     m_cachedBufferFull = bufferFull;
2013
2014     ASSERT(m_pendingStatusChanges);
2015     if (!--m_pendingStatusChanges)
2016         updateStates();
2017 }
2018
2019 void MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange(RetainPtr<NSArray> seekableRanges)
2020 {
2021     m_cachedSeekableRanges = seekableRanges;
2022
2023     seekableTimeRangesChanged();
2024     updateStates();
2025 }
2026
2027 void MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange(RetainPtr<NSArray> loadedRanges)
2028 {
2029     m_cachedLoadedRanges = loadedRanges;
2030
2031     loadedTimeRangesChanged();
2032     updateStates();
2033 }
2034
2035 void MediaPlayerPrivateAVFoundationObjC::tracksDidChange(RetainPtr<NSArray> tracks)
2036 {
2037     m_cachedTracks = tracks;
2038
2039     tracksChanged();
2040     updateStates();
2041 }
2042
2043 void MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange(bool hasEnabledAudio)
2044 {
2045     m_cachedHasEnabledAudio = hasEnabledAudio;
2046
2047     tracksChanged();
2048     updateStates();
2049 }
2050
2051 void MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange(FloatSize size)
2052 {
2053     m_cachedPresentationSize = size;
2054
2055     sizeChanged();
2056     updateStates();
2057 }
2058
2059 void MediaPlayerPrivateAVFoundationObjC::durationDidChange(double duration)
2060 {
2061     m_cachedDuration = duration;
2062
2063     invalidateCachedDuration();
2064 }
2065
2066 void MediaPlayerPrivateAVFoundationObjC::rateDidChange(double rate)
2067 {
2068     m_cachedRate = rate;
2069
2070     updateStates();
2071     rateChanged();
2072 }
2073     
2074 #if ENABLE(IOS_AIRPLAY)
2075 void MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange()
2076 {
2077     playbackTargetIsWirelessChanged();
2078 }
2079 #endif
2080
2081 NSArray* assetMetadataKeyNames()
2082 {
2083     static NSArray* keys;
2084     if (!keys) {
2085         keys = [[NSArray alloc] initWithObjects:@"duration",
2086                     @"naturalSize",
2087                     @"preferredTransform",
2088                     @"preferredVolume",
2089                     @"preferredRate",
2090                     @"playable",
2091                     @"tracks",
2092                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
2093                    nil];
2094     }
2095     return keys;
2096 }
2097
2098 NSArray* itemKVOProperties()
2099 {
2100     static NSArray* keys;
2101     if (!keys) {
2102         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
2103                 @"status",
2104                 @"asset",
2105                 @"tracks",
2106                 @"seekableTimeRanges",
2107                 @"loadedTimeRanges",
2108                 @"playbackLikelyToKeepUp",
2109                 @"playbackBufferFull",
2110                 @"playbackBufferEmpty",
2111                 @"duration",
2112                 @"hasEnabledAudio",
2113                 nil];
2114     }
2115     return keys;
2116 }
2117
2118 } // namespace WebCore
2119
2120 @implementation WebCoreAVFMovieObserver
2121
2122 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2123 {
2124     self = [super init];
2125     if (!self)
2126         return nil;
2127     m_callback = callback;
2128     return self;
2129 }
2130
2131 - (void)disconnect
2132 {
2133     [NSObject cancelPreviousPerformRequestsWithTarget:self];
2134     m_callback = 0;
2135 }
2136
2137 - (void)metadataLoaded
2138 {
2139     if (!m_callback)
2140         return;
2141
2142     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
2143 }
2144
2145 - (void)playableKnown
2146 {
2147     if (!m_callback)
2148         return;
2149
2150     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
2151 }
2152
2153 - (void)didEnd:(NSNotification *)unusedNotification
2154 {
2155     UNUSED_PARAM(unusedNotification);
2156     if (!m_callback)
2157         return;
2158     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
2159 }
2160
2161 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
2162 {
2163     UNUSED_PARAM(object);
2164     id newValue = [change valueForKey:NSKeyValueChangeNewKey];
2165
2166     LOG(Media, "WebCoreAVFMovieObserver::observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
2167
2168     if (!m_callback)
2169         return;
2170
2171     bool willChange = [[change valueForKey:NSKeyValueChangeNotificationIsPriorKey] boolValue];
2172
2173     WTF::Function<void ()> function;
2174
2175     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && willChange) {
2176         if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2177             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpWillChange, m_callback);
2178         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2179             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyWillChange, m_callback);
2180         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2181             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullWillChange, m_callback);
2182     }
2183
2184     if (context == MediaPlayerAVFoundationObservationContextPlayerItem && !willChange) {
2185         // A value changed for an AVPlayerItem
2186         if ([keyPath isEqualToString:@"status"])
2187             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playerItemStatusDidChange, m_callback, [newValue intValue]);
2188         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
2189             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackLikelyToKeepUpDidChange, m_callback, [newValue boolValue]);
2190         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
2191             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferEmptyDidChange, m_callback, [newValue boolValue]);
2192         else if ([keyPath isEqualToString:@"playbackBufferFull"])
2193             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackBufferFullDidChange, m_callback, [newValue boolValue]);
2194         else if ([keyPath isEqualToString:@"asset"])
2195             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::setAsset, m_callback, RetainPtr<NSArray>(newValue));
2196         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
2197             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::loadedTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2198         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
2199             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::seekableTimeRangesDidChange, m_callback, RetainPtr<NSArray>(newValue));
2200         else if ([keyPath isEqualToString:@"tracks"])
2201             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::tracksDidChange, m_callback, RetainPtr<NSArray>(newValue));
2202         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
2203             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::hasEnabledAudioDidChange, m_callback, [newValue boolValue]);
2204         else if ([keyPath isEqualToString:@"presentationSize"])
2205             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::presentationSizeDidChange, m_callback, FloatSize([newValue sizeValue]));
2206         else if ([keyPath isEqualToString:@"duration"])
2207             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::durationDidChange, m_callback, CMTimeGetSeconds([newValue CMTimeValue]));
2208     }
2209
2210     if (context == MediaPlayerAVFoundationObservationContextPlayer && !willChange) {
2211         // A value changed for an AVPlayer.
2212         if ([keyPath isEqualToString:@"rate"])
2213             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::rateDidChange, m_callback, [newValue doubleValue]);
2214 #if ENABLE(IOS_AIRPLAY)
2215         else if ([keyPath isEqualToString:@"externalPlaybackActive"])
2216             function = WTF::bind(&MediaPlayerPrivateAVFoundationObjC::playbackTargetIsWirelessDidChange, m_callback);
2217 #endif
2218     }
2219     
2220     if (function.isNull())
2221         return;
2222
2223     auto weakThis = m_callback->createWeakPtr();
2224     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification([weakThis, function]{
2225         // weakThis and function both refer to the same MediaPlayerPrivateAVFoundationObjC instance. If the WeakPtr has
2226         // been cleared, the underlying object has been destroyed, and it is unsafe to call function().
2227         if (!weakThis)
2228             return;
2229         function();
2230     }));
2231 }
2232
2233 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
2234 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
2235 {
2236     UNUSED_PARAM(output);
2237     UNUSED_PARAM(nativeSamples);
2238
2239     if (!m_callback)
2240         return;
2241
2242     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2243     RetainPtr<NSArray> strongStrings = strings;
2244     callOnMainThread([strongSelf, strongStrings, itemTime] {
2245         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2246         if (!callback)
2247             return;
2248         callback->processCue(strongStrings.get(), CMTimeGetSeconds(itemTime));
2249     });
2250 }
2251
2252 - (void)outputSequenceWasFlushed:(id)output
2253 {
2254     UNUSED_PARAM(output);
2255
2256     if (!m_callback)
2257         return;
2258     
2259     RetainPtr<WebCoreAVFMovieObserver> strongSelf = self;
2260     callOnMainThread([strongSelf] {
2261         if (MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback)
2262             callback->flushCues();
2263     });
2264 }
2265 #endif
2266
2267 @end
2268
2269 #if HAVE(AVFOUNDATION_LOADER_DELEGATE)
2270 @implementation WebCoreAVFLoaderDelegate
2271
2272 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2273 {
2274     self = [super init];
2275     if (!self)
2276         return nil;
2277     m_callback = callback;
2278     return self;
2279 }
2280
2281 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
2282 {
2283     UNUSED_PARAM(resourceLoader);
2284     if (!m_callback)
2285         return NO;
2286
2287     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2288     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2289     callOnMainThread([strongSelf, strongRequest] {
2290         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2291         if (!callback) {
2292             [strongRequest finishLoadingWithError:nil];
2293             return;
2294         }
2295
2296         if (!callback->shouldWaitForLoadingOfResource(strongRequest.get()))
2297             [strongRequest finishLoadingWithError:nil];
2298     });
2299
2300     return YES;
2301 }
2302
2303 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForResponseToAuthenticationChallenge:(NSURLAuthenticationChallenge *)challenge
2304 {
2305     UNUSED_PARAM(resourceLoader);
2306     if (!m_callback)
2307         return NO;
2308
2309     if ([[[challenge protectionSpace] authenticationMethod] isEqualToString:NSURLAuthenticationMethodServerTrust])
2310         return NO;
2311
2312     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2313     RetainPtr<NSURLAuthenticationChallenge> strongChallenge = challenge;
2314     callOnMainThread([strongSelf, strongChallenge] {
2315         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2316         if (!callback) {
2317             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2318             return;
2319         }
2320
2321         if (!callback->shouldWaitForResponseToAuthenticationChallenge(strongChallenge.get()))
2322             [[strongChallenge sender] cancelAuthenticationChallenge:strongChallenge.get()];
2323     });
2324
2325     return YES;
2326 }
2327
2328 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
2329 {
2330     UNUSED_PARAM(resourceLoader);
2331     if (!m_callback)
2332         return;
2333
2334     RetainPtr<WebCoreAVFLoaderDelegate> strongSelf = self;
2335     RetainPtr<AVAssetResourceLoadingRequest> strongRequest = loadingRequest;
2336     callOnMainThread([strongSelf, strongRequest] {
2337         MediaPlayerPrivateAVFoundationObjC* callback = strongSelf->m_callback;
2338         if (callback)
2339             callback->didCancelLoadingRequest(strongRequest.get());
2340     });
2341 }
2342
2343 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
2344 {
2345     m_callback = callback;
2346 }
2347 @end
2348 #endif
2349
2350 #if HAVE(AVFOUNDATION_VIDEO_OUTPUT)
2351 @implementation WebCoreAVFPullDelegate
2352 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2353 {
2354     self = [super init];
2355     if (self)
2356         m_callback = callback;
2357     return self;
2358 }
2359
2360 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC *)callback
2361 {
2362     m_callback = callback;
2363 }
2364
2365 - (void)outputMediaDataWillChange:(AVPlayerItemVideoOutput *)output
2366 {
2367     if (m_callback)
2368         m_callback->outputMediaDataWillChange(output);
2369 }
2370
2371 - (void)outputSequenceWasFlushed:(AVPlayerItemVideoOutput *)output
2372 {
2373     UNUSED_PARAM(output);
2374     // No-op.
2375 }
2376 @end
2377 #endif
2378
2379 #endif