[Mac] Add support for VideoTrack to MediaPlayerPrivateAVFObjC
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29
30 #import "MediaPlayerPrivateAVFoundationObjC.h"
31
32 #import "AudioTrackPrivateAVFObjC.h"
33 #import "AVTrackPrivateAVFObjCImpl.h"
34 #import "BlockExceptions.h"
35 #import "ExceptionCodePlaceholder.h"
36 #import "FloatConversion.h"
37 #import "FloatConversion.h"
38 #import "FrameView.h"
39 #import "GraphicsContext.h"
40 #import "InbandTextTrackPrivateAVFObjC.h"
41 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
42 #import "URL.h"
43 #import "Logging.h"
44 #import "SecurityOrigin.h"
45 #import "SoftLinking.h"
46 #import "TimeRanges.h"
47 #import "UUID.h"
48 #import "VideoTrackPrivateAVFObjC.h"
49 #import "WebCoreAVFResourceLoader.h"
50 #import "WebCoreSystemInterface.h"
51 #import <objc/runtime.h>
52 #import <runtime/DataView.h>
53 #import <runtime/Operations.h>
54 #import <runtime/TypedArrayInlines.h>
55 #import <runtime/Uint16Array.h>
56 #import <runtime/Uint32Array.h>
57 #import <runtime/Uint8Array.h>
58 #import <wtf/CurrentTime.h>
59 #import <wtf/text/CString.h>
60
61 #import <AVFoundation/AVFoundation.h>
62 #import <CoreMedia/CoreMedia.h>
63
64 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
65 #import <CoreVideo/CoreVideo.h>
66 #import <VideoToolbox/VideoToolbox.h>
67 #endif
68
69 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
70 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
71
72 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
73 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
74 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
75 #endif
76
77 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
78 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
79 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
80 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
81
82 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
83 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
84 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
85 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
86 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
87 #endif
88
89 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
90 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
91 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
92 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
93 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
94 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
95
96 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
97 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
98 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
99 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
100 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
101 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
102 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
103 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
104 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
105 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
106
107 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
108
109 #define AVPlayer getAVPlayerClass()
110 #define AVPlayerItem getAVPlayerItemClass()
111 #define AVPlayerItemVideoOutput getAVPlayerItemVideoOutputClass()
112 #define AVPlayerLayer getAVPlayerLayerClass()
113 #define AVURLAsset getAVURLAssetClass()
114 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
115
116 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
117 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
118 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
119 #define AVMediaTypeVideo getAVMediaTypeVideo()
120 #define AVMediaTypeAudio getAVMediaTypeAudio()
121 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
122 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
123 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
124 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
125 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
126
127 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
128 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
129 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
130
131 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
132 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
133 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
134
135 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
136 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
137 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
138 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
139
140 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
141 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
142 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
143 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
144 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
145 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
146 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
147 #endif
148
149 #define kCMTimeZero getkCMTimeZero()
150
151 using namespace WebCore;
152 using namespace std;
153
154 enum MediaPlayerAVFoundationObservationContext {
155     MediaPlayerAVFoundationObservationContextPlayerItem,
156     MediaPlayerAVFoundationObservationContextPlayer
157 };
158
159 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
160 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
161 #else
162 @interface WebCoreAVFMovieObserver : NSObject
163 #endif
164 {
165     MediaPlayerPrivateAVFoundationObjC* m_callback;
166     int m_delayCallbacks;
167 }
168 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
169 -(void)disconnect;
170 -(void)playableKnown;
171 -(void)metadataLoaded;
172 -(void)seekCompleted:(BOOL)finished;
173 -(void)didEnd:(NSNotification *)notification;
174 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
175 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
176 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
177 - (void)outputSequenceWasFlushed:(id)output;
178 #endif
179 @end
180
181 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
182 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
183     MediaPlayerPrivateAVFoundationObjC* m_callback;
184 }
185 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
186 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
187 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
188 @end
189 #endif
190
191 namespace WebCore {
192
193 static NSArray *assetMetadataKeyNames();
194 static NSArray *itemKVOProperties();
195
196 #if !LOG_DISABLED
197 static const char *boolString(bool val)
198 {
199     return val ? "true" : "false";
200 }
201 #endif
202
203 #if ENABLE(ENCRYPTED_MEDIA_V2)
204 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
205 static PlayerToPrivateMapType& playerToPrivateMap()
206 {
207     DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
208     return map;
209 };
210 #endif
211
212 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
213 static dispatch_queue_t globalLoaderDelegateQueue()
214 {
215     static dispatch_queue_t globalQueue;
216     static dispatch_once_t onceToken;
217     dispatch_once(&onceToken, ^{
218         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
219     });
220     return globalQueue;
221 }
222 #endif
223
224 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
225
226     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
227 }
228
229 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
230 {
231     if (isAvailable())
232 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
233         registrar(create, getSupportedTypes, extendedSupportsType, 0, 0, 0);
234 #else
235         registrar(create, getSupportedTypes, supportsType, 0, 0, 0);
236 #endif
237 }
238
239 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
240     : MediaPlayerPrivateAVFoundation(player)
241     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
242     , m_videoFrameHasDrawn(false)
243     , m_haveCheckedPlayability(false)
244 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
245     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
246 #endif
247     , m_currentTrack(0)
248 {
249 #if ENABLE(ENCRYPTED_MEDIA_V2)
250     playerToPrivateMap().set(player, this);
251 #endif
252 }
253
254 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
255 {
256 #if ENABLE(ENCRYPTED_MEDIA_V2)
257     playerToPrivateMap().remove(player());
258 #endif
259 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
260     [m_loaderDelegate.get() setCallback:0];
261     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
262 #endif
263     cancelLoad();
264 }
265
266 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
267 {
268     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
269     tearDownVideoRendering();
270
271     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
272     [m_objcObserver.get() disconnect];
273
274     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
275     setIgnoreLoadStateChanges(true);
276     if (m_avAsset) {
277         [m_avAsset.get() cancelLoading];
278         m_avAsset = nil;
279     }
280
281     clearTextTracks();
282
283 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
284     if (m_legibleOutput) {
285         if (m_avPlayerItem)
286             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
287         m_legibleOutput = nil;
288     }
289 #endif
290
291     if (m_avPlayerItem) {
292         for (NSString *keyName in itemKVOProperties())
293             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
294         
295         m_avPlayerItem = nil;
296     }
297     if (m_avPlayer) {
298         if (m_timeObserver)
299             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
300         m_timeObserver = nil;
301         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
302         m_avPlayer = nil;
303     }
304     setIgnoreLoadStateChanges(false);
305 }
306
307 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
308 {
309     return m_videoLayer;
310 }
311
312 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
313 {
314 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
315     return m_videoOutput;
316 #else
317     return m_imageGenerator;
318 #endif
319 }
320
321 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
322 {
323 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
324     createVideoOutput();
325 #else
326     createImageGenerator();
327 #endif
328 }
329
330 #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1080
331 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
332 {
333     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
334
335     if (!m_avAsset || m_imageGenerator)
336         return;
337
338     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
339
340     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
341     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
342     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
343     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
344
345     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
346 }
347 #endif
348
349 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
350 {
351 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
352     destroyVideoOutput();
353 #else
354     destroyImageGenerator();
355 #endif
356 }
357
358 #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1080
359 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
360 {
361     if (!m_imageGenerator)
362         return;
363
364     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
365
366     m_imageGenerator = 0;
367 }
368 #endif
369
370 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
371 {
372     if (!m_avPlayer)
373         return;
374
375     if (!m_videoLayer) {
376         m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
377         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
378         [m_videoLayer.get() setBackgroundColor:CGColorGetConstantColor(kCGColorBlack)];
379 #ifndef NDEBUG
380         [m_videoLayer.get() setName:@"Video layer"];
381 #endif
382         updateVideoLayerGravity();
383         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
384     }
385 }
386
387 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
388 {
389     if (!m_videoLayer)
390         return;
391
392     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying", this, m_videoLayer.get());
393
394     [m_videoLayer.get() setPlayer:nil];
395
396     m_videoLayer = 0;
397 }
398
399 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
400 {
401     if (currentRenderingMode() == MediaRenderingToLayer)
402         return m_videoLayer && [m_videoLayer.get() isReadyForDisplay];
403
404     return m_videoFrameHasDrawn;
405 }
406
407 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
408 {
409     if (m_avAsset)
410         return;
411
412     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p)", this);
413
414     setDelayCallbacks(true);
415
416     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
417
418     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
419
420 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
421     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
422
423     String referrer = player()->referrer();
424     if (!referrer.isEmpty())
425         [headerFields.get() setObject:referrer forKey:@"Referer"];
426
427     String userAgent = player()->userAgent();
428     if (!userAgent.isEmpty())
429         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
430
431     if ([headerFields.get() count])
432         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
433 #endif
434
435     NSURL *cocoaURL = URL(ParsedURLString, url);
436     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
437
438 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
439     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
440 #endif
441
442     m_haveCheckedPlayability = false;
443
444     setDelayCallbacks(false);
445 }
446
447 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
448 {
449     if (m_avPlayer)
450         return;
451
452     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
453
454     setDelayCallbacks(true);
455
456     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
457     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:nil context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
458
459 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
460     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
461 #endif
462
463     if (m_avPlayerItem)
464         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
465
466     setDelayCallbacks(false);
467 }
468
469 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
470 {
471     if (m_avPlayerItem)
472         return;
473
474     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
475
476     setDelayCallbacks(true);
477
478     // Create the player item so we can load media data. 
479     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
480
481     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
482
483     for (NSString *keyName in itemKVOProperties())
484         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:nil context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
485
486     if (m_avPlayer)
487         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
488
489 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
490     const NSTimeInterval legibleOutputAdvanceInterval = 2;
491
492     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:[NSArray array]]);
493     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
494
495     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
496     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
497     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
498     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
499 #endif
500
501     setDelayCallbacks(false);
502 }
503
504 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
505 {
506     if (m_haveCheckedPlayability)
507         return;
508     m_haveCheckedPlayability = true;
509
510     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
511
512     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
513         [m_objcObserver.get() playableKnown];
514     }];
515 }
516
517 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
518 {
519     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
520     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[assetMetadataKeyNames() retain] completionHandler:^{
521         [m_objcObserver.get() metadataLoaded];
522     }];
523 }
524
525 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
526 {
527     if (!m_avPlayerItem)
528         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
529
530     AVPlayerItemStatus status = [m_avPlayerItem.get() status];
531     if (status == AVPlayerItemStatusUnknown)
532         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
533     if (status == AVPlayerItemStatusFailed)
534         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
535     if ([m_avPlayerItem.get() isPlaybackLikelyToKeepUp])
536         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
537     if ([m_avPlayerItem.get() isPlaybackBufferFull])
538         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
539     if ([m_avPlayerItem.get() isPlaybackBufferEmpty])
540         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
541
542     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
543 }
544
545 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
546 {
547     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
548     PlatformMedia pm;
549     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
550     pm.media.avfMediaPlayer = m_avPlayer.get();
551     return pm;
552 }
553
554 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
555 {
556     return m_videoLayer.get();
557 }
558
559 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
560 {
561     [CATransaction begin];
562     [CATransaction setDisableActions:YES];    
563     if (m_videoLayer)
564         [m_videoLayer.get() setHidden:!isVisible];
565     [CATransaction commit];
566 }
567     
568 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
569 {
570     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
571     if (!metaDataAvailable())
572         return;
573
574     setDelayCallbacks(true);
575     [m_avPlayer.get() setRate:requestedRate()];
576     setDelayCallbacks(false);
577 }
578
579 void MediaPlayerPrivateAVFoundationObjC::platformPause()
580 {
581     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
582     if (!metaDataAvailable())
583         return;
584
585     setDelayCallbacks(true);
586     [m_avPlayer.get() setRate:nil];
587     setDelayCallbacks(false);
588 }
589
590 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
591 {
592     // Do not ask the asset for duration before it has been loaded or it will fetch the
593     // answer synchronously.
594     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
595          return MediaPlayer::invalidTime();
596     
597     CMTime cmDuration;
598     
599     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
600     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
601         cmDuration = [m_avPlayerItem.get() duration];
602     else
603         cmDuration= [m_avAsset.get() duration];
604
605     if (CMTIME_IS_NUMERIC(cmDuration))
606         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
607
608     if (CMTIME_IS_INDEFINITE(cmDuration)) {
609         return numeric_limits<float>::infinity();
610     }
611
612     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
613     return MediaPlayer::invalidTime();
614 }
615
616 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
617 {
618     if (!metaDataAvailable() || !m_avPlayerItem)
619         return 0;
620
621     CMTime itemTime = [m_avPlayerItem.get() currentTime];
622     if (CMTIME_IS_NUMERIC(itemTime)) {
623         return max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
624     }
625
626     return 0;
627 }
628
629 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time)
630 {
631     // setCurrentTime generates several event callbacks, update afterwards.
632     setDelayCallbacks(true);
633
634     WebCoreAVFMovieObserver *observer = m_objcObserver.get();
635     [m_avPlayerItem.get() seekToTime:CMTimeMakeWithSeconds(time, 600) toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:^(BOOL finished) {
636         [observer seekCompleted:finished];
637     }];
638
639     setDelayCallbacks(false);
640 }
641
642 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
643 {
644     if (!metaDataAvailable())
645         return;
646
647     [m_avPlayer.get() setVolume:volume];
648 }
649
650 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
651 {
652     UNUSED_PARAM(closedCaptionsVisible);
653
654     if (!metaDataAvailable())
655         return;
656
657     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
658 }
659
660 void MediaPlayerPrivateAVFoundationObjC::updateRate()
661 {
662     setDelayCallbacks(true);
663     [m_avPlayer.get() setRate:requestedRate()];
664     setDelayCallbacks(false);
665 }
666
667 float MediaPlayerPrivateAVFoundationObjC::rate() const
668 {
669     if (!metaDataAvailable())
670         return 0;
671
672     return [m_avPlayer.get() rate];
673 }
674
675 PassRefPtr<TimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
676 {
677     RefPtr<TimeRanges> timeRanges = TimeRanges::create();
678
679     if (!m_avPlayerItem)
680         return timeRanges.release();
681
682     NSArray *loadedRanges = [m_avPlayerItem.get() loadedTimeRanges];
683     for (NSValue *thisRangeValue in loadedRanges) {
684         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
685         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange)) {
686             float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(timeRange.start));
687             float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
688             timeRanges->add(rangeStart, rangeEnd);
689         }
690     }
691     return timeRanges.release();
692 }
693
694 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
695 {
696     NSArray *seekableRanges = [m_avPlayerItem.get() seekableTimeRanges];
697     if (!seekableRanges || ![seekableRanges count])
698         return 0;
699
700     double minTimeSeekable = std::numeric_limits<double>::infinity();
701     bool hasValidRange = false;
702     for (NSValue *thisRangeValue in seekableRanges) {
703         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
704         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
705             continue;
706
707         hasValidRange = true;
708         double startOfRange = CMTimeGetSeconds(timeRange.start);
709         if (minTimeSeekable > startOfRange)
710             minTimeSeekable = startOfRange;
711     }
712     return hasValidRange ? minTimeSeekable : 0;
713 }
714
715 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
716 {
717     NSArray *seekableRanges = [m_avPlayerItem.get() seekableTimeRanges];
718     if (!seekableRanges)
719         return 0;
720
721     double maxTimeSeekable = 0;
722     for (NSValue *thisRangeValue in seekableRanges) {
723         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
724         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
725             continue;
726         
727         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
728         if (maxTimeSeekable < endOfRange)
729             maxTimeSeekable = endOfRange;
730     }
731     return maxTimeSeekable;
732 }
733
734 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
735 {
736     NSArray *loadedRanges = [m_avPlayerItem.get() loadedTimeRanges];
737     if (!loadedRanges)
738         return 0;
739
740     float maxTimeLoaded = 0;
741     for (NSValue *thisRangeValue in loadedRanges) {
742         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
743         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
744             continue;
745         
746         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
747         if (maxTimeLoaded < endOfRange)
748             maxTimeLoaded = endOfRange;
749     }
750
751     return maxTimeLoaded;   
752 }
753
754 unsigned MediaPlayerPrivateAVFoundationObjC::totalBytes() const
755 {
756     if (!metaDataAvailable())
757         return 0;
758
759     long long totalMediaSize = 0;
760     NSArray *tracks = [m_avAsset.get() tracks];
761     for (AVAssetTrack *thisTrack in tracks)
762         totalMediaSize += [thisTrack totalSampleDataLength];
763
764     return static_cast<unsigned>(totalMediaSize);
765 }
766
767 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
768 {
769     m_avAsset = asset;
770 }
771
772 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
773 {
774     if (!m_avAsset)
775         return MediaPlayerAVAssetStatusDoesNotExist;
776
777     for (NSString *keyName in assetMetadataKeyNames()) {
778         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:nil];
779
780         if (keyStatus < AVKeyValueStatusLoaded)
781             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
782         
783         if (keyStatus == AVKeyValueStatusFailed)
784             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
785
786         if (keyStatus == AVKeyValueStatusCancelled)
787             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
788     }
789
790     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
791         return MediaPlayerAVAssetStatusPlayable;
792
793     return MediaPlayerAVAssetStatusLoaded;
794 }
795
796 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
797 {
798     if (!metaDataAvailable() || context->paintingDisabled())
799         return;
800
801     setDelayCallbacks(true);
802     BEGIN_BLOCK_OBJC_EXCEPTIONS;
803
804 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
805     paintWithVideoOutput(context, rect);
806 #else
807     paintWithImageGenerator(context, rect);
808 #endif
809
810     END_BLOCK_OBJC_EXCEPTIONS;
811     setDelayCallbacks(false);
812
813     m_videoFrameHasDrawn = true;
814 }
815
816 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
817 {
818     if (!metaDataAvailable() || context->paintingDisabled())
819         return;
820
821     // We can ignore the request if we are already rendering to a layer.
822     if (currentRenderingMode() == MediaRenderingToLayer)
823         return;
824
825     paintCurrentFrameInContext(context, rect);
826 }
827
828 #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1080
829 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
830 {
831     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
832     if (image) {
833         GraphicsContextStateSaver stateSaver(*context);
834         context->translate(rect.x(), rect.y() + rect.height());
835         context->scale(FloatSize(1.0f, -1.0f));
836         context->setImageInterpolationQuality(InterpolationLow);
837         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
838         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
839         image = 0;
840     }
841 }
842 #endif
843
844 static HashSet<String> mimeTypeCache()
845 {
846     DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
847     static bool typeListInitialized = false;
848
849     if (typeListInitialized)
850         return cache;
851     typeListInitialized = true;
852
853     NSArray *types = [AVURLAsset audiovisualMIMETypes];
854     for (NSString *mimeType in types)
855         cache.add(mimeType);
856
857     return cache;
858
859
860 #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1080
861 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
862 {
863     if (!m_imageGenerator)
864         createImageGenerator();
865     ASSERT(m_imageGenerator);
866
867 #if !LOG_DISABLED
868     double start = monotonicallyIncreasingTime();
869 #endif
870
871     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
872     RetainPtr<CGImageRef> image = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
873
874 #if !LOG_DISABLED
875     double duration = monotonicallyIncreasingTime() - start;
876     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
877 #endif
878
879     return image;
880 }
881 #endif
882
883 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
884 {
885     supportedTypes = mimeTypeCache();
886
887
888 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const String& type, const String& codecs, const URL&)
889 {
890     if (!mimeTypeCache().contains(type))
891         return MediaPlayer::IsNotSupported;
892
893     // The spec says:
894     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
895     if (codecs.isEmpty())
896         return MediaPlayer::MayBeSupported;
897
898     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
899     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
900 }
901
902 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
903 static bool keySystemIsSupported(const String& keySystem)
904 {
905     if (equalIgnoringCase(keySystem, "com.apple.lskd") || equalIgnoringCase(keySystem, "com.apple.lskd.1_0"))
906         return true;
907
908     return false;
909 }
910
911 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::extendedSupportsType(const String& type, const String& codecs, const String& keySystem, const URL& url)
912 {
913     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
914     // In addition to the steps in the current specification, this method must run the following steps:
915
916     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
917     //    If keySystem is null, continue to the next step.
918     if (keySystem.isNull() || keySystem.isEmpty())
919         return supportsType(type, codecs, url);
920
921     // If keySystem contains an unrecognized or unsupported Key System, return the empty string
922     if (!keySystemIsSupported(keySystem))
923         return MediaPlayer::IsNotSupported;
924
925     // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
926     // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
927
928     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
929     return supportsType(type, codecs, url);
930 }
931
932 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
933 {
934     String scheme = [[[avRequest request] URL] scheme];
935     String keyURI = [[[avRequest request] URL] absoluteString];
936
937 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
938     if (scheme == "skd") {
939         // Create an initData with the following layout:
940         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
941         unsigned keyURISize = keyURI.length() * sizeof(UChar);
942         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
943         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
944         initDataView->set<uint32_t>(0, keyURISize, true);
945
946         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
947         keyURIArray->setRange(keyURI.characters(), keyURI.length() / sizeof(unsigned char), 0);
948
949 #if ENABLE(ENCRYPTED_MEDIA)
950         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
951 #elif ENABLE(ENCRYPTED_MEDIA_V2)
952         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
953         if (!player()->keyNeeded(initData.get()))
954 #endif
955             return false;
956
957         m_keyURIToRequestMap.set(keyURI, avRequest);
958         return true;
959     }
960 #endif
961
962     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
963     m_resourceLoaderMap.add(avRequest, resourceLoader);
964     resourceLoader->startLoading();
965     return true;
966 }
967
968 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
969 {
970     String scheme = [[[avRequest request] URL] scheme];
971
972     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
973
974     if (resourceLoader)
975         resourceLoader->stopLoading();
976 }
977
978 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
979 {
980     m_resourceLoaderMap.remove(avRequest);
981 }
982 #endif
983
984 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
985 {
986     return AVFoundationLibrary() && CoreMediaLibrary();
987 }
988
989 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
990 {
991     if (!metaDataAvailable())
992         return timeValue;
993
994     // FIXME - impossible to implement until rdar://8721510 is fixed.
995     return timeValue;
996 }
997
998 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
999 {
1000     if (!m_videoLayer)
1001         return;
1002
1003     [CATransaction begin];
1004     [CATransaction setDisableActions:YES];    
1005     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1006     [m_videoLayer.get() setVideoGravity:gravity];
1007     [CATransaction commit];
1008 }
1009
1010 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1011 {
1012     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1013     m_languageOfPrimaryAudioTrack = String();
1014
1015     if (!m_avAsset)
1016         return;
1017
1018     setDelayCharacteristicsChangedNotification(true);
1019
1020     bool haveCCTrack = false;
1021     bool hasCaptions = false;
1022
1023     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1024     // asked about those fairly fequently.
1025     if (!m_avPlayerItem) {
1026         // We don't have a player item yet, so check with the asset because some assets support inspection
1027         // prior to becoming ready to play.
1028         setHasVideo([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual] count]);
1029         setHasAudio([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible] count]);
1030 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1031         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1032 #endif
1033     } else {
1034         bool hasVideo = false;
1035         bool hasAudio = false;
1036         NSArray *tracks = [m_avPlayerItem.get() tracks];
1037         for (AVPlayerItemTrack *track in tracks) {
1038             if ([track isEnabled]) {
1039                 AVAssetTrack *assetTrack = [track assetTrack];
1040                 if ([[assetTrack mediaType] isEqualToString:AVMediaTypeVideo])
1041                     hasVideo = true;
1042                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeAudio])
1043                     hasAudio = true;
1044                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption]) {
1045 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1046                     hasCaptions = true;
1047 #endif
1048                     haveCCTrack = true;
1049                 }
1050             }
1051         }
1052         setHasVideo(hasVideo);
1053         setHasAudio(hasAudio);
1054
1055
1056 #if ENABLE(VIDEO_TRACK)
1057         updateAudioTracks();
1058         updateVideoTracks();
1059 #endif
1060     }
1061
1062 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1063     if (AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia()) {
1064         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1065         if (hasCaptions)
1066             processMediaSelectionOptions();
1067     }
1068 #endif
1069
1070 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1071     if (!hasCaptions && haveCCTrack)
1072         processLegacyClosedCaptionsTracks();
1073 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1074     if (haveCCTrack)
1075         processLegacyClosedCaptionsTracks();
1076 #endif
1077
1078     setHasClosedCaptions(hasCaptions);
1079
1080     LOG(Media, "WebCoreAVFMovieObserver:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1081         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1082
1083     sizeChanged();
1084
1085     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1086         characteristicsChanged();
1087
1088     setDelayCharacteristicsChangedNotification(false);
1089 }
1090
1091 #if ENABLE(VIDEO_TRACK)
1092 template <typename RefT, typename PassRefT>
1093 void determineChangedTracksFromNewTracksAndOldItems(NSArray* tracks, NSString* trackType, Vector<RefT>& oldItems, RefT (*itemFactory)(AVPlayerItemTrack*), MediaPlayer* player, void (MediaPlayer::*addedFunction)(PassRefT), void (MediaPlayer::*removedFunction)(PassRefT))
1094 {
1095     RetainPtr<NSSet> newTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1096         return [[[track assetTrack] mediaType] isEqualToString:trackType];
1097     }]]]);
1098     RetainPtr<NSMutableSet> oldTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:oldItems.size()]);
1099
1100     typedef Vector<RefT> ItemVector;
1101     for (auto i = oldItems.begin(); i != oldItems.end(); ++i)
1102         [oldTracks addObject:(*i)->playerItemTrack()];
1103
1104     RetainPtr<NSMutableSet> removedTracks = adoptNS([oldTracks mutableCopy]);
1105     [removedTracks minusSet:newTracks.get()];
1106
1107     RetainPtr<NSMutableSet> addedTracks = adoptNS([newTracks mutableCopy]);
1108     [addedTracks minusSet:oldTracks.get()];
1109
1110     ItemVector replacementItems;
1111     ItemVector addedItems;
1112     ItemVector removedItems;
1113     for (auto i = oldItems.begin(); i != oldItems.end(); ++i) {
1114         if ([removedTracks containsObject:(*i)->playerItemTrack()])
1115             removedItems.append(*i);
1116         else
1117             replacementItems.append(*i);
1118     }
1119
1120     for (AVPlayerItemTrack* track in addedTracks.get())
1121         addedItems.append(itemFactory(track));
1122
1123     replacementItems.appendVector(addedItems);
1124     oldItems.swap(replacementItems);
1125
1126     for (auto i = removedItems.begin(); i != removedItems.end(); ++i)
1127         (player->*removedFunction)(*i);
1128
1129     for (auto i = addedItems.begin(); i != addedItems.end(); ++i)
1130         (player->*addedFunction)(*i);
1131 }
1132
1133 void MediaPlayerPrivateAVFoundationObjC::updateAudioTracks()
1134 {
1135     determineChangedTracksFromNewTracksAndOldItems([m_avPlayerItem tracks], AVMediaTypeAudio, m_audioTracks, &AudioTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeAudioTrack, &MediaPlayer::addAudioTrack);
1136 }
1137
1138 void MediaPlayerPrivateAVFoundationObjC::updateVideoTracks()
1139 {
1140     determineChangedTracksFromNewTracksAndOldItems([m_avPlayerItem tracks], AVMediaTypeVideo, m_videoTracks, &VideoTrackPrivateAVFObjC::create, player(), &MediaPlayer::removeVideoTrack, &MediaPlayer::addVideoTrack);
1141 }
1142 #endif // ENABLE(VIDEO_TRACK)
1143
1144 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1145 {
1146     if (!m_avAsset)
1147         return;
1148
1149     NSArray *tracks = [m_avAsset.get() tracks];
1150
1151     // Some assets don't report track properties until they are completely ready to play, but we
1152     // want to report a size as early as possible so use presentationSize when an asset has no tracks.
1153     if (m_avPlayerItem && ![tracks count]) {
1154         setNaturalSize(IntSize([m_avPlayerItem.get() presentationSize]));
1155         return;
1156     }
1157
1158     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1159     // the union of all visual track rects.
1160     CGRect trackUnionRect = CGRectZero;
1161     for (AVAssetTrack *track in tracks) {
1162         CGSize trackSize = [track naturalSize];
1163         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1164         trackUnionRect = CGRectUnion(trackUnionRect, CGRectApplyAffineTransform(trackRect, [track preferredTransform]));
1165     }
1166
1167     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1168     trackUnionRect = CGRectOffset(trackUnionRect, trackUnionRect.origin.x, trackUnionRect.origin.y);
1169     
1170     // Also look at the asset's preferred transform so we account for a movie matrix.
1171     CGSize naturalSize = CGSizeApplyAffineTransform(trackUnionRect.size, [m_avAsset.get() preferredTransform]);
1172
1173     // Cache the natural size (setNaturalSize will notify the player if it has changed).
1174     setNaturalSize(IntSize(naturalSize));
1175 }
1176
1177 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1178 {
1179     if (!m_avAsset)
1180         return false;
1181     
1182     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL(wkAVAssetResolvedURL(m_avAsset.get())));
1183     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1184     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1185 }
1186
1187 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
1188 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1189 {
1190     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1191
1192     if (!m_avPlayerItem || m_videoOutput)
1193         return;
1194
1195 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
1196     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1197 #else
1198     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:k32BGRAPixelFormat], kCVPixelBufferPixelFormatTypeKey,
1199                                 nil];
1200 #endif
1201     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
1202     ASSERT(m_videoOutput);
1203
1204     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1205
1206     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1207 }
1208
1209 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1210 {
1211     if (!m_videoOutput)
1212         return;
1213
1214     if (m_avPlayerItem)
1215         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1216     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1217
1218     m_videoOutput = 0;
1219 }
1220
1221 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1222 {
1223     if (!m_videoOutput)
1224         createVideoOutput();
1225     ASSERT(m_videoOutput);
1226
1227 #if !LOG_DISABLED
1228     double start = monotonicallyIncreasingTime();
1229 #endif
1230
1231     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1232
1233     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1234         return 0;
1235
1236     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1237     if (!buffer)
1238         return 0;
1239
1240 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
1241     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1242     if (!m_pixelTransferSession) {
1243         VTPixelTransferSessionRef session = 0;
1244         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1245         m_pixelTransferSession = adoptCF(session);
1246     }
1247
1248     CVPixelBufferRef outputBuffer;
1249     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), k32BGRAPixelFormat, 0, &outputBuffer);
1250     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1251     buffer = adoptCF(outputBuffer);
1252 #endif
1253
1254 #if !LOG_DISABLED
1255     double duration = monotonicallyIncreasingTime() - start;
1256     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer() - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1257 #endif
1258
1259     return buffer;
1260 }
1261
1262 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& rect)
1263 {
1264     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1265
1266     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1267     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1268     // should be displayed.
1269     if (pixelBuffer)
1270         m_lastImage = pixelBuffer;
1271
1272     if (m_lastImage) {
1273         GraphicsContextStateSaver stateSaver(*context);
1274         context->translate(rect.x(), rect.y() + rect.height());
1275         context->scale(FloatSize(1.0f, -1.0f));
1276         RetainPtr<CIImage> image = adoptNS([[CIImage alloc] initWithCVImageBuffer:m_lastImage.get()]);
1277
1278         // ciContext does not use a RetainPtr for results of contextWithCGContext:, as the returned value
1279         // is autoreleased, and there is no non-autoreleased version of that function.
1280         CIContext* ciContext = [CIContext contextWithCGContext:context->platformContext() options:nil];
1281         CGRect outputRect = { CGPointZero, rect.size() };
1282         CGRect imageRect = CGRectMake(0, 0, CVPixelBufferGetWidth(m_lastImage.get()), CVPixelBufferGetHeight(m_lastImage.get()));
1283         [ciContext drawImage:image.get() inRect:outputRect fromRect:imageRect];
1284     }
1285 }
1286 #endif
1287
1288 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1289 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1290 {
1291     // initData should have the following layout:
1292     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1293     if (initData->byteLength() < 4)
1294         return false;
1295
1296     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1297
1298     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1299     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1300     uint32_t offset = 0;
1301     bool status = true;
1302
1303     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1304     offset += 4;
1305     if (!status || offset + keyURILength > initData->length())
1306         return false;
1307
1308     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1309     if (!keyURIArray)
1310         return false;
1311
1312     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1313     offset += keyURILength;
1314
1315     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1316     offset += 4;
1317     if (!status || offset + keyIDLength > initData->length())
1318         return false;
1319
1320     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1321     if (!keyIDArray)
1322         return false;
1323
1324     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1325     offset += keyIDLength;
1326
1327     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1328     offset += 4;
1329     if (!status || offset + certificateLength > initData->length())
1330         return false;
1331
1332     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1333     if (!certificate)
1334         return false;
1335
1336     return true;
1337 }
1338 #endif
1339
1340 #if ENABLE(ENCRYPTED_MEDIA)
1341 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1342 {
1343     if (!keySystemIsSupported(keySystem))
1344         return MediaPlayer::KeySystemNotSupported;
1345
1346     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1347     String keyURI;
1348     String keyID;
1349     RefPtr<Uint8Array> certificate;
1350     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1351         return MediaPlayer::InvalidPlayerState;
1352
1353     if (!m_keyURIToRequestMap.contains(keyURI))
1354         return MediaPlayer::InvalidPlayerState;
1355
1356     String sessionID = createCanonicalUUIDString();
1357
1358     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1359
1360     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1361     NSString* assetStr = keyID;
1362     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
1363     NSError* error = 0;
1364     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
1365
1366     if (!keyRequest) {
1367         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
1368         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
1369         return MediaPlayer::NoError;
1370     }
1371
1372     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
1373     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
1374     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
1375
1376     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
1377     m_sessionIDToRequestMap.set(sessionID, avRequest);
1378     m_keyURIToRequestMap.remove(keyURI);
1379
1380     return MediaPlayer::NoError;
1381 }
1382
1383 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
1384 {
1385     if (!keySystemIsSupported(keySystem))
1386         return MediaPlayer::KeySystemNotSupported;
1387
1388     if (!m_sessionIDToRequestMap.contains(sessionID))
1389         return MediaPlayer::InvalidPlayerState;
1390
1391     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
1392     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
1393     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
1394     [avRequest.get() finishLoading];
1395     m_sessionIDToRequestMap.remove(sessionID);
1396
1397     player()->keyAdded(keySystem, sessionID);
1398
1399     UNUSED_PARAM(initDataPtr);
1400     UNUSED_PARAM(initDataLength);
1401     return MediaPlayer::NoError;
1402 }
1403
1404 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
1405 {
1406     if (!keySystemIsSupported(keySystem))
1407         return MediaPlayer::KeySystemNotSupported;
1408
1409     if (!m_sessionIDToRequestMap.contains(sessionID))
1410         return MediaPlayer::InvalidPlayerState;
1411
1412     m_sessionIDToRequestMap.remove(sessionID);
1413     return MediaPlayer::NoError;
1414 }
1415 #endif
1416
1417 #if ENABLE(ENCRYPTED_MEDIA_V2)
1418 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForPlayerAndKeyURI(MediaPlayer* player, const String& keyURI)
1419 {
1420     MediaPlayerPrivateAVFoundationObjC* _this = playerToPrivateMap().get(player);
1421     if (!_this)
1422         return nullptr;
1423
1424     return _this->m_keyURIToRequestMap.take(keyURI);
1425
1426 }
1427 #endif
1428
1429 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1430 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
1431 {
1432 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1433     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1434 #endif
1435
1436     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1437     NSArray *tracks = [m_avPlayerItem.get() tracks];
1438     for (AVPlayerItemTrack *playerItemTrack in tracks) {
1439
1440         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
1441         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
1442             continue;
1443
1444         bool newCCTrack = true;
1445         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1446             if (!removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack())
1447                 continue;
1448
1449             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
1450             if (track->avPlayerItemTrack() == playerItemTrack) {
1451                 removedTextTracks.remove(i - 1);
1452                 newCCTrack = false;
1453                 break;
1454             }
1455         }
1456
1457         if (!newCCTrack)
1458             continue;
1459         
1460         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
1461     }
1462
1463     processNewAndRemovedTextTracks(removedTextTracks);
1464 }
1465 #endif
1466
1467 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1468 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
1469 {
1470     if (!m_avAsset)
1471         return nil;
1472     
1473     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
1474         return nil;
1475     
1476     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1477 }
1478
1479 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
1480 {
1481     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1482     if (!legibleGroup) {
1483         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1484         return;
1485     }
1486
1487     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1488     // but set the selected legible track to nil so text tracks will not be automatically configured.
1489     if (!m_textTracks.size())
1490         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1491
1492     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1493     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
1494     for (AVMediaSelectionOptionType *option in legibleOptions) {
1495         bool newTrack = true;
1496         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1497              if (removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack())
1498                  continue;
1499
1500             RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
1501             if ([track->mediaSelectionOption() isEqual:option]) {
1502                 removedTextTracks.remove(i - 1);
1503                 newTrack = false;
1504                 break;
1505             }
1506         }
1507         if (!newTrack)
1508             continue;
1509
1510         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option));
1511     }
1512
1513     processNewAndRemovedTextTracks(removedTextTracks);
1514 }
1515
1516 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, double time)
1517 {
1518     if (!m_currentTrack)
1519         return;
1520
1521     m_currentTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), time);
1522 }
1523
1524 void MediaPlayerPrivateAVFoundationObjC::flushCues()
1525 {
1526     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
1527
1528     if (!m_currentTrack)
1529         return;
1530     
1531     m_currentTrack->resetCueValues();
1532 }
1533 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1534
1535 void MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(InbandTextTrackPrivateAVF *track)
1536 {
1537     if (m_currentTrack == track)
1538         return;
1539
1540     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1541         
1542     m_currentTrack = track;
1543
1544     if (track) {
1545         if (track->isLegacyClosedCaptionsTrack())
1546             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
1547 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1548         else
1549             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1550 #endif
1551     } else {
1552 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1553         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1554 #endif
1555         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
1556     }
1557
1558 }
1559
1560 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
1561 {
1562     if (!m_languageOfPrimaryAudioTrack.isNull())
1563         return m_languageOfPrimaryAudioTrack;
1564
1565     if (!m_avPlayerItem.get())
1566         return emptyString();
1567
1568 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1569     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1570     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
1571     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
1572     if (currentlySelectedAudibleOption) {
1573         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
1574         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1575
1576         return m_languageOfPrimaryAudioTrack;
1577     }
1578 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1579
1580     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1581     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1582     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
1583     if (!tracks || [tracks count] != 1) {
1584         m_languageOfPrimaryAudioTrack = emptyString();
1585         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? [tracks count] : 0));
1586         return m_languageOfPrimaryAudioTrack;
1587     }
1588
1589     AVAssetTrack *track = [tracks objectAtIndex:0];
1590     m_languageOfPrimaryAudioTrack = AVTrackPrivateAVFObjCImpl::languageForAVAssetTrack(track);
1591
1592 #if !LOG_DISABLED
1593     if (m_languageOfPrimaryAudioTrack == emptyString())
1594         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1595     else
1596         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1597 #endif
1598
1599     return m_languageOfPrimaryAudioTrack;
1600 }
1601
1602 NSArray* assetMetadataKeyNames()
1603 {
1604     static NSArray* keys;
1605     if (!keys) {
1606         keys = [[NSArray alloc] initWithObjects:@"duration",
1607                     @"naturalSize",
1608                     @"preferredTransform",
1609                     @"preferredVolume",
1610                     @"preferredRate",
1611                     @"playable",
1612                     @"tracks",
1613                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
1614                    nil];
1615     }
1616     return keys;
1617 }
1618
1619 NSArray* itemKVOProperties()
1620 {
1621     static NSArray* keys;
1622     if (!keys) {
1623         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
1624                 @"status",
1625                 @"asset",
1626                 @"tracks",
1627                 @"seekableTimeRanges",
1628                 @"loadedTimeRanges",
1629                 @"playbackLikelyToKeepUp",
1630                 @"playbackBufferFull",
1631                 @"playbackBufferEmpty",
1632                 @"duration",
1633                 @"hasEnabledAudio",
1634                 nil];
1635     }
1636     return keys;
1637 }
1638
1639 } // namespace WebCore
1640
1641 @implementation WebCoreAVFMovieObserver
1642
1643 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
1644 {
1645     self = [super init];
1646     if (!self)
1647         return nil;
1648     m_callback = callback;
1649     return self;
1650 }
1651
1652 - (void)disconnect
1653 {
1654     [NSObject cancelPreviousPerformRequestsWithTarget:self];
1655     m_callback = 0;
1656 }
1657
1658 - (void)metadataLoaded
1659 {
1660     if (!m_callback)
1661         return;
1662
1663     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
1664 }
1665
1666 - (void)playableKnown
1667 {
1668     if (!m_callback)
1669         return;
1670
1671     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1672 }
1673
1674 - (void)seekCompleted:(BOOL)finished
1675 {
1676     if (!m_callback)
1677         return;
1678     
1679     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
1680 }
1681
1682 - (void)didEnd:(NSNotification *)unusedNotification
1683 {
1684     UNUSED_PARAM(unusedNotification);
1685     if (!m_callback)
1686         return;
1687     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
1688 }
1689
1690 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
1691 {
1692     UNUSED_PARAM(change);
1693
1694     LOG(Media, "WebCoreAVFMovieObserver:observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
1695
1696     if (!m_callback)
1697         return;
1698
1699     if (context == MediaPlayerAVFoundationObservationContextPlayerItem) {
1700         // A value changed for an AVPlayerItem
1701         if ([keyPath isEqualToString:@"status"])
1702             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged);
1703         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
1704             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged);
1705         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
1706             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged);
1707         else if ([keyPath isEqualToString:@"playbackBufferFull"])
1708             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged);
1709         else if ([keyPath isEqualToString:@"asset"])
1710             m_callback->setAsset([object asset]);
1711         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
1712             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged);
1713         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
1714             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged);
1715         else if ([keyPath isEqualToString:@"tracks"])
1716             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1717         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
1718             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1719         else if ([keyPath isEqualToString:@"presentationSize"])
1720             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged);
1721         else if ([keyPath isEqualToString:@"duration"])
1722             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged);
1723
1724         return;
1725     }
1726
1727     if (context == MediaPlayerAVFoundationObservationContextPlayer) {
1728         // A value changed for an AVPlayer.
1729         if ([keyPath isEqualToString:@"rate"])
1730             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged);
1731     }
1732 }
1733
1734 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1735 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
1736 {
1737     UNUSED_PARAM(output);
1738     UNUSED_PARAM(nativeSamples);
1739
1740     if (!m_callback)
1741         return;
1742
1743     dispatch_async(dispatch_get_main_queue(), ^{
1744         if (!m_callback)
1745             return;
1746         m_callback->processCue(strings, CMTimeGetSeconds(itemTime));
1747     });
1748 }
1749
1750 - (void)outputSequenceWasFlushed:(id)output
1751 {
1752     UNUSED_PARAM(output);
1753
1754     if (!m_callback)
1755         return;
1756     
1757     dispatch_async(dispatch_get_main_queue(), ^{
1758         if (!m_callback)
1759             return;
1760         m_callback->flushCues();
1761     });
1762 }
1763 #endif
1764
1765 @end
1766
1767 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
1768 @implementation WebCoreAVFLoaderDelegate
1769
1770 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
1771 {
1772     self = [super init];
1773     if (!self)
1774         return nil;
1775     m_callback = callback;
1776     return self;
1777 }
1778
1779 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
1780 {
1781     UNUSED_PARAM(resourceLoader);
1782     if (!m_callback)
1783         return NO;
1784
1785     dispatch_async(dispatch_get_main_queue(), ^{
1786         if (!m_callback) {
1787             [loadingRequest finishLoadingWithError:nil];
1788             return;
1789         }
1790
1791         if (!m_callback->shouldWaitForLoadingOfResource(loadingRequest))
1792             [loadingRequest finishLoadingWithError:nil];
1793     });
1794
1795     return YES;
1796 }
1797
1798 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
1799 {
1800     UNUSED_PARAM(resourceLoader);
1801     if (!m_callback)
1802         return;
1803
1804     dispatch_async(dispatch_get_main_queue(), ^{
1805         if (m_callback)
1806             m_callback->didCancelLoadingRequest(loadingRequest);
1807     });
1808 }
1809
1810 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
1811 {
1812     m_callback = callback;
1813 }
1814 @end
1815 #endif
1816
1817 #endif