7762f07dd430f95a2e302ad7d58e57c6931418c9
[WebKit-https.git] / Source / WebCore / platform / graphics / avfoundation / objc / MediaPlayerPrivateAVFoundationObjC.mm
1 /*
2  * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1. Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2. Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE COMPUTER, INC. OR
17  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
24  */
25
26 #import "config.h"
27
28 #if ENABLE(VIDEO) && USE(AVFOUNDATION)
29
30 #import "MediaPlayerPrivateAVFoundationObjC.h"
31
32 #import "AudioTrackPrivateAVFObjC.h"
33 #import "BlockExceptions.h"
34 #import "ExceptionCodePlaceholder.h"
35 #import "FloatConversion.h"
36 #import "FloatConversion.h"
37 #import "FrameView.h"
38 #import "GraphicsContext.h"
39 #import "InbandTextTrackPrivateAVFObjC.h"
40 #import "InbandTextTrackPrivateLegacyAVFObjC.h"
41 #import "URL.h"
42 #import "Logging.h"
43 #import "SecurityOrigin.h"
44 #import "SoftLinking.h"
45 #import "TimeRanges.h"
46 #import "UUID.h"
47 #import "WebCoreAVFResourceLoader.h"
48 #import "WebCoreSystemInterface.h"
49 #import <objc/runtime.h>
50 #import <runtime/DataView.h>
51 #import <runtime/Operations.h>
52 #import <runtime/TypedArrayInlines.h>
53 #import <runtime/Uint16Array.h>
54 #import <runtime/Uint32Array.h>
55 #import <runtime/Uint8Array.h>
56 #import <wtf/CurrentTime.h>
57 #import <wtf/text/CString.h>
58
59 #import <AVFoundation/AVFoundation.h>
60 #import <CoreMedia/CoreMedia.h>
61
62 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
63 #import <CoreVideo/CoreVideo.h>
64 #import <VideoToolbox/VideoToolbox.h>
65 #endif
66
67 SOFT_LINK_FRAMEWORK_OPTIONAL(AVFoundation)
68 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreMedia)
69
70 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
71 SOFT_LINK_FRAMEWORK_OPTIONAL(CoreVideo)
72 SOFT_LINK_FRAMEWORK_OPTIONAL(VideoToolbox)
73 #endif
74
75 SOFT_LINK(CoreMedia, CMTimeCompare, int32_t, (CMTime time1, CMTime time2), (time1, time2))
76 SOFT_LINK(CoreMedia, CMTimeMakeWithSeconds, CMTime, (Float64 seconds, int32_t preferredTimeScale), (seconds, preferredTimeScale))
77 SOFT_LINK(CoreMedia, CMTimeGetSeconds, Float64, (CMTime time), (time))
78 SOFT_LINK(CoreMedia, CMTimeRangeGetEnd, CMTime, (CMTimeRange range), (range))
79
80 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
81 SOFT_LINK(CoreVideo, CVPixelBufferGetWidth, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
82 SOFT_LINK(CoreVideo, CVPixelBufferGetHeight, size_t, (CVPixelBufferRef pixelBuffer), (pixelBuffer))
83 SOFT_LINK(VideoToolbox, VTPixelTransferSessionCreate, OSStatus, (CFAllocatorRef allocator, VTPixelTransferSessionRef *pixelTransferSessionOut), (allocator, pixelTransferSessionOut))
84 SOFT_LINK(VideoToolbox, VTPixelTransferSessionTransferImage, OSStatus, (VTPixelTransferSessionRef session, CVPixelBufferRef sourceBuffer, CVPixelBufferRef destinationBuffer), (session, sourceBuffer, destinationBuffer))
85 #endif
86
87 SOFT_LINK_CLASS(AVFoundation, AVPlayer)
88 SOFT_LINK_CLASS(AVFoundation, AVPlayerItem)
89 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemVideoOutput)
90 SOFT_LINK_CLASS(AVFoundation, AVPlayerLayer)
91 SOFT_LINK_CLASS(AVFoundation, AVURLAsset)
92 SOFT_LINK_CLASS(AVFoundation, AVAssetImageGenerator)
93
94 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicVisual, NSString *)
95 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicAudible, NSString *)
96 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeClosedCaption, NSString *)
97 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeVideo, NSString *)
98 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeAudio, NSString *)
99 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemDidPlayToEndTimeNotification, NSString *)
100 SOFT_LINK_POINTER(AVFoundation, AVAssetImageGeneratorApertureModeCleanAperture, NSString *)
101 SOFT_LINK_POINTER(AVFoundation, AVURLAssetReferenceRestrictionsKey, NSString *)
102 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResizeAspect, NSString *)
103 SOFT_LINK_POINTER(AVFoundation, AVLayerVideoGravityResize, NSString *)
104
105 SOFT_LINK_CONSTANT(CoreMedia, kCMTimeZero, CMTime)
106
107 #define AVPlayer getAVPlayerClass()
108 #define AVPlayerItem getAVPlayerItemClass()
109 #define AVPlayerItemVideoOutput getAVPlayerItemVideoOutputClass()
110 #define AVPlayerLayer getAVPlayerLayerClass()
111 #define AVURLAsset getAVURLAssetClass()
112 #define AVAssetImageGenerator getAVAssetImageGeneratorClass()
113
114 #define AVMediaCharacteristicVisual getAVMediaCharacteristicVisual()
115 #define AVMediaCharacteristicAudible getAVMediaCharacteristicAudible()
116 #define AVMediaTypeClosedCaption getAVMediaTypeClosedCaption()
117 #define AVMediaTypeVideo getAVMediaTypeVideo()
118 #define AVMediaTypeAudio getAVMediaTypeAudio()
119 #define AVPlayerItemDidPlayToEndTimeNotification getAVPlayerItemDidPlayToEndTimeNotification()
120 #define AVAssetImageGeneratorApertureModeCleanAperture getAVAssetImageGeneratorApertureModeCleanAperture()
121 #define AVURLAssetReferenceRestrictionsKey getAVURLAssetReferenceRestrictionsKey()
122 #define AVLayerVideoGravityResizeAspect getAVLayerVideoGravityResizeAspect()
123 #define AVLayerVideoGravityResize getAVLayerVideoGravityResize()
124
125 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
126 typedef AVMediaSelectionGroup AVMediaSelectionGroupType;
127 typedef AVMediaSelectionOption AVMediaSelectionOptionType;
128
129 SOFT_LINK_CLASS(AVFoundation, AVPlayerItemLegibleOutput)
130 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionGroup)
131 SOFT_LINK_CLASS(AVFoundation, AVMediaSelectionOption)
132
133 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicLegible, NSString *)
134 SOFT_LINK_POINTER(AVFoundation, AVMediaTypeSubtitle, NSString *)
135 SOFT_LINK_POINTER(AVFoundation, AVMediaCharacteristicContainsOnlyForcedSubtitles, NSString *)
136 SOFT_LINK_POINTER(AVFoundation, AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly, NSString *)
137
138 #define AVPlayerItemLegibleOutput getAVPlayerItemLegibleOutputClass()
139 #define AVMediaSelectionGroup getAVMediaSelectionGroupClass()
140 #define AVMediaSelectionOption getAVMediaSelectionOptionClass()
141 #define AVMediaCharacteristicLegible getAVMediaCharacteristicLegible()
142 #define AVMediaTypeSubtitle getAVMediaTypeSubtitle()
143 #define AVMediaCharacteristicContainsOnlyForcedSubtitles getAVMediaCharacteristicContainsOnlyForcedSubtitles()
144 #define AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly getAVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly()
145 #endif
146
147 #define kCMTimeZero getkCMTimeZero()
148
149 using namespace WebCore;
150 using namespace std;
151
152 enum MediaPlayerAVFoundationObservationContext {
153     MediaPlayerAVFoundationObservationContextPlayerItem,
154     MediaPlayerAVFoundationObservationContextPlayer
155 };
156
157 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
158 @interface WebCoreAVFMovieObserver : NSObject <AVPlayerItemLegibleOutputPushDelegate>
159 #else
160 @interface WebCoreAVFMovieObserver : NSObject
161 #endif
162 {
163     MediaPlayerPrivateAVFoundationObjC* m_callback;
164     int m_delayCallbacks;
165 }
166 -(id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
167 -(void)disconnect;
168 -(void)playableKnown;
169 -(void)metadataLoaded;
170 -(void)seekCompleted:(BOOL)finished;
171 -(void)didEnd:(NSNotification *)notification;
172 -(void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context;
173 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
174 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime;
175 - (void)outputSequenceWasFlushed:(id)output;
176 #endif
177 @end
178
179 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
180 @interface WebCoreAVFLoaderDelegate : NSObject<AVAssetResourceLoaderDelegate> {
181     MediaPlayerPrivateAVFoundationObjC* m_callback;
182 }
183 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
184 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest;
185 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback;
186 @end
187 #endif
188
189 namespace WebCore {
190
191 static NSArray *assetMetadataKeyNames();
192 static NSArray *itemKVOProperties();
193
194 #if !LOG_DISABLED
195 static const char *boolString(bool val)
196 {
197     return val ? "true" : "false";
198 }
199 #endif
200
201 #if ENABLE(ENCRYPTED_MEDIA_V2)
202 typedef HashMap<MediaPlayer*, MediaPlayerPrivateAVFoundationObjC*> PlayerToPrivateMapType;
203 static PlayerToPrivateMapType& playerToPrivateMap()
204 {
205     DEFINE_STATIC_LOCAL(PlayerToPrivateMapType, map, ());
206     return map;
207 };
208 #endif
209
210 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
211 static dispatch_queue_t globalLoaderDelegateQueue()
212 {
213     static dispatch_queue_t globalQueue;
214     static dispatch_once_t onceToken;
215     dispatch_once(&onceToken, ^{
216         globalQueue = dispatch_queue_create("WebCoreAVFLoaderDelegate queue", DISPATCH_QUEUE_SERIAL);
217     });
218     return globalQueue;
219 }
220 #endif
221
222 PassOwnPtr<MediaPlayerPrivateInterface> MediaPlayerPrivateAVFoundationObjC::create(MediaPlayer* player)
223
224     return adoptPtr(new MediaPlayerPrivateAVFoundationObjC(player));
225 }
226
227 void MediaPlayerPrivateAVFoundationObjC::registerMediaEngine(MediaEngineRegistrar registrar)
228 {
229     if (isAvailable())
230 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
231         registrar(create, getSupportedTypes, extendedSupportsType, 0, 0, 0);
232 #else
233         registrar(create, getSupportedTypes, supportsType, 0, 0, 0);
234 #endif
235 }
236
237 MediaPlayerPrivateAVFoundationObjC::MediaPlayerPrivateAVFoundationObjC(MediaPlayer* player)
238     : MediaPlayerPrivateAVFoundation(player)
239     , m_objcObserver(adoptNS([[WebCoreAVFMovieObserver alloc] initWithCallback:this]))
240     , m_videoFrameHasDrawn(false)
241     , m_haveCheckedPlayability(false)
242 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
243     , m_loaderDelegate(adoptNS([[WebCoreAVFLoaderDelegate alloc] initWithCallback:this]))
244 #endif
245     , m_currentTrack(0)
246 {
247 #if ENABLE(ENCRYPTED_MEDIA_V2)
248     playerToPrivateMap().set(player, this);
249 #endif
250 }
251
252 MediaPlayerPrivateAVFoundationObjC::~MediaPlayerPrivateAVFoundationObjC()
253 {
254 #if ENABLE(ENCRYPTED_MEDIA_V2)
255     playerToPrivateMap().remove(player());
256 #endif
257 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
258     [m_loaderDelegate.get() setCallback:0];
259     [[m_avAsset.get() resourceLoader] setDelegate:nil queue:0];
260 #endif
261     cancelLoad();
262 }
263
264 void MediaPlayerPrivateAVFoundationObjC::cancelLoad()
265 {
266     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::cancelLoad(%p)", this);
267     tearDownVideoRendering();
268
269     [[NSNotificationCenter defaultCenter] removeObserver:m_objcObserver.get()];
270     [m_objcObserver.get() disconnect];
271
272     // Tell our observer to do nothing when our cancellation of pending loading calls its completion handler.
273     setIgnoreLoadStateChanges(true);
274     if (m_avAsset) {
275         [m_avAsset.get() cancelLoading];
276         m_avAsset = nil;
277     }
278
279     clearTextTracks();
280
281 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
282     if (m_legibleOutput) {
283         if (m_avPlayerItem)
284             [m_avPlayerItem.get() removeOutput:m_legibleOutput.get()];
285         m_legibleOutput = nil;
286     }
287 #endif
288
289     if (m_avPlayerItem) {
290         for (NSString *keyName in itemKVOProperties())
291             [m_avPlayerItem.get() removeObserver:m_objcObserver.get() forKeyPath:keyName];
292         
293         m_avPlayerItem = nil;
294     }
295     if (m_avPlayer) {
296         if (m_timeObserver)
297             [m_avPlayer.get() removeTimeObserver:m_timeObserver.get()];
298         m_timeObserver = nil;
299         [m_avPlayer.get() removeObserver:m_objcObserver.get() forKeyPath:@"rate"];
300         m_avPlayer = nil;
301     }
302     setIgnoreLoadStateChanges(false);
303 }
304
305 bool MediaPlayerPrivateAVFoundationObjC::hasLayerRenderer() const
306 {
307     return m_videoLayer;
308 }
309
310 bool MediaPlayerPrivateAVFoundationObjC::hasContextRenderer() const
311 {
312 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
313     return m_videoOutput;
314 #else
315     return m_imageGenerator;
316 #endif
317 }
318
319 void MediaPlayerPrivateAVFoundationObjC::createContextVideoRenderer()
320 {
321 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
322     createVideoOutput();
323 #else
324     createImageGenerator();
325 #endif
326 }
327
328 #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1080
329 void MediaPlayerPrivateAVFoundationObjC::createImageGenerator()
330 {
331     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p)", this);
332
333     if (!m_avAsset || m_imageGenerator)
334         return;
335
336     m_imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:m_avAsset.get()];
337
338     [m_imageGenerator.get() setApertureMode:AVAssetImageGeneratorApertureModeCleanAperture];
339     [m_imageGenerator.get() setAppliesPreferredTrackTransform:YES];
340     [m_imageGenerator.get() setRequestedTimeToleranceBefore:kCMTimeZero];
341     [m_imageGenerator.get() setRequestedTimeToleranceAfter:kCMTimeZero];
342
343     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageGenerator(%p) - returning %p", this, m_imageGenerator.get());
344 }
345 #endif
346
347 void MediaPlayerPrivateAVFoundationObjC::destroyContextVideoRenderer()
348 {
349 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
350     destroyVideoOutput();
351 #else
352     destroyImageGenerator();
353 #endif
354 }
355
356 #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1080
357 void MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator()
358 {
359     if (!m_imageGenerator)
360         return;
361
362     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyImageGenerator(%p) - destroying  %p", this, m_imageGenerator.get());
363
364     m_imageGenerator = 0;
365 }
366 #endif
367
368 void MediaPlayerPrivateAVFoundationObjC::createVideoLayer()
369 {
370     if (!m_avPlayer)
371         return;
372
373     if (!m_videoLayer) {
374         m_videoLayer = adoptNS([[AVPlayerLayer alloc] init]);
375         [m_videoLayer.get() setPlayer:m_avPlayer.get()];
376         [m_videoLayer.get() setBackgroundColor:CGColorGetConstantColor(kCGColorBlack)];
377 #ifndef NDEBUG
378         [m_videoLayer.get() setName:@"Video layer"];
379 #endif
380         updateVideoLayerGravity();
381         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoLayer(%p) - returning %p", this, m_videoLayer.get());
382     }
383 }
384
385 void MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer()
386 {
387     if (!m_videoLayer)
388         return;
389
390     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoLayer(%p) - destroying", this, m_videoLayer.get());
391
392     [m_videoLayer.get() setPlayer:nil];
393
394     m_videoLayer = 0;
395 }
396
397 bool MediaPlayerPrivateAVFoundationObjC::hasAvailableVideoFrame() const
398 {
399     if (currentRenderingMode() == MediaRenderingToLayer)
400         return m_videoLayer && [m_videoLayer.get() isReadyForDisplay];
401
402     return m_videoFrameHasDrawn;
403 }
404
405 void MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(const String& url)
406 {
407     if (m_avAsset)
408         return;
409
410     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVAssetForURL(%p)", this);
411
412     setDelayCallbacks(true);
413
414     RetainPtr<NSMutableDictionary> options = adoptNS([[NSMutableDictionary alloc] init]);    
415
416     [options.get() setObject:[NSNumber numberWithInt:AVAssetReferenceRestrictionForbidRemoteReferenceToLocal | AVAssetReferenceRestrictionForbidLocalReferenceToRemote] forKey:AVURLAssetReferenceRestrictionsKey];
417
418 #if PLATFORM(IOS) || __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
419     RetainPtr<NSMutableDictionary> headerFields = adoptNS([[NSMutableDictionary alloc] init]);
420
421     String referrer = player()->referrer();
422     if (!referrer.isEmpty())
423         [headerFields.get() setObject:referrer forKey:@"Referer"];
424
425     String userAgent = player()->userAgent();
426     if (!userAgent.isEmpty())
427         [headerFields.get() setObject:userAgent forKey:@"User-Agent"];
428
429     if ([headerFields.get() count])
430         [options.get() setObject:headerFields.get() forKey:@"AVURLAssetHTTPHeaderFieldsKey"];
431 #endif
432
433     NSURL *cocoaURL = URL(ParsedURLString, url);
434     m_avAsset = adoptNS([[AVURLAsset alloc] initWithURL:cocoaURL options:options.get()]);
435
436 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
437     [[m_avAsset.get() resourceLoader] setDelegate:m_loaderDelegate.get() queue:globalLoaderDelegateQueue()];
438 #endif
439
440     m_haveCheckedPlayability = false;
441
442     setDelayCallbacks(false);
443 }
444
445 void MediaPlayerPrivateAVFoundationObjC::createAVPlayer()
446 {
447     if (m_avPlayer)
448         return;
449
450     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayer(%p)", this);
451
452     setDelayCallbacks(true);
453
454     m_avPlayer = adoptNS([[AVPlayer alloc] init]);
455     [m_avPlayer.get() addObserver:m_objcObserver.get() forKeyPath:@"rate" options:nil context:(void *)MediaPlayerAVFoundationObservationContextPlayer];
456
457 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
458     [m_avPlayer.get() setAppliesMediaSelectionCriteriaAutomatically:YES];
459 #endif
460
461     if (m_avPlayerItem)
462         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
463
464     setDelayCallbacks(false);
465 }
466
467 void MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem()
468 {
469     if (m_avPlayerItem)
470         return;
471
472     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createAVPlayerItem(%p)", this);
473
474     setDelayCallbacks(true);
475
476     // Create the player item so we can load media data. 
477     m_avPlayerItem = adoptNS([[AVPlayerItem alloc] initWithAsset:m_avAsset.get()]);
478
479     [[NSNotificationCenter defaultCenter] addObserver:m_objcObserver.get() selector:@selector(didEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:m_avPlayerItem.get()];
480
481     for (NSString *keyName in itemKVOProperties())
482         [m_avPlayerItem.get() addObserver:m_objcObserver.get() forKeyPath:keyName options:nil context:(void *)MediaPlayerAVFoundationObservationContextPlayerItem];
483
484     if (m_avPlayer)
485         [m_avPlayer.get() replaceCurrentItemWithPlayerItem:m_avPlayerItem.get()];
486
487 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP) && HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
488     const NSTimeInterval legibleOutputAdvanceInterval = 2;
489
490     m_legibleOutput = adoptNS([[AVPlayerItemLegibleOutput alloc] initWithMediaSubtypesForNativeRepresentation:[NSArray array]]);
491     [m_legibleOutput.get() setSuppressesPlayerRendering:YES];
492
493     [m_legibleOutput.get() setDelegate:m_objcObserver.get() queue:dispatch_get_main_queue()];
494     [m_legibleOutput.get() setAdvanceIntervalForDelegateInvocation:legibleOutputAdvanceInterval];
495     [m_legibleOutput.get() setTextStylingResolution:AVPlayerItemLegibleOutputTextStylingResolutionSourceAndRulesOnly];
496     [m_avPlayerItem.get() addOutput:m_legibleOutput.get()];
497 #endif
498
499     setDelayCallbacks(false);
500 }
501
502 void MediaPlayerPrivateAVFoundationObjC::checkPlayability()
503 {
504     if (m_haveCheckedPlayability)
505         return;
506     m_haveCheckedPlayability = true;
507
508     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::checkPlayability(%p)", this);
509
510     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"playable"] completionHandler:^{
511         [m_objcObserver.get() playableKnown];
512     }];
513 }
514
515 void MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata()
516 {
517     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::beginLoadingMetadata(%p) - requesting metadata loading", this);
518     [m_avAsset.get() loadValuesAsynchronouslyForKeys:[assetMetadataKeyNames() retain] completionHandler:^{
519         [m_objcObserver.get() metadataLoaded];
520     }];
521 }
522
523 MediaPlayerPrivateAVFoundation::ItemStatus MediaPlayerPrivateAVFoundationObjC::playerItemStatus() const
524 {
525     if (!m_avPlayerItem)
526         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusDoesNotExist;
527
528     AVPlayerItemStatus status = [m_avPlayerItem.get() status];
529     if (status == AVPlayerItemStatusUnknown)
530         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusUnknown;
531     if (status == AVPlayerItemStatusFailed)
532         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusFailed;
533     if ([m_avPlayerItem.get() isPlaybackLikelyToKeepUp])
534         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackLikelyToKeepUp;
535     if ([m_avPlayerItem.get() isPlaybackBufferFull])
536         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferFull;
537     if ([m_avPlayerItem.get() isPlaybackBufferEmpty])
538         return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusPlaybackBufferEmpty;
539
540     return MediaPlayerPrivateAVFoundation::MediaPlayerAVPlayerItemStatusReadyToPlay;
541 }
542
543 PlatformMedia MediaPlayerPrivateAVFoundationObjC::platformMedia() const
544 {
545     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformMedia(%p)", this);
546     PlatformMedia pm;
547     pm.type = PlatformMedia::AVFoundationMediaPlayerType;
548     pm.media.avfMediaPlayer = m_avPlayer.get();
549     return pm;
550 }
551
552 PlatformLayer* MediaPlayerPrivateAVFoundationObjC::platformLayer() const
553 {
554     return m_videoLayer.get();
555 }
556
557 void MediaPlayerPrivateAVFoundationObjC::platformSetVisible(bool isVisible)
558 {
559     [CATransaction begin];
560     [CATransaction setDisableActions:YES];    
561     if (m_videoLayer)
562         [m_videoLayer.get() setHidden:!isVisible];
563     [CATransaction commit];
564 }
565     
566 void MediaPlayerPrivateAVFoundationObjC::platformPlay()
567 {
568     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPlay(%p)", this);
569     if (!metaDataAvailable())
570         return;
571
572     setDelayCallbacks(true);
573     [m_avPlayer.get() setRate:requestedRate()];
574     setDelayCallbacks(false);
575 }
576
577 void MediaPlayerPrivateAVFoundationObjC::platformPause()
578 {
579     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformPause(%p)", this);
580     if (!metaDataAvailable())
581         return;
582
583     setDelayCallbacks(true);
584     [m_avPlayer.get() setRate:nil];
585     setDelayCallbacks(false);
586 }
587
588 float MediaPlayerPrivateAVFoundationObjC::platformDuration() const
589 {
590     // Do not ask the asset for duration before it has been loaded or it will fetch the
591     // answer synchronously.
592     if (!m_avAsset || assetStatus() < MediaPlayerAVAssetStatusLoaded)
593          return MediaPlayer::invalidTime();
594     
595     CMTime cmDuration;
596     
597     // Check the AVItem if we have one and it has loaded duration, some assets never report duration.
598     if (m_avPlayerItem && playerItemStatus() >= MediaPlayerAVPlayerItemStatusReadyToPlay)
599         cmDuration = [m_avPlayerItem.get() duration];
600     else
601         cmDuration= [m_avAsset.get() duration];
602
603     if (CMTIME_IS_NUMERIC(cmDuration))
604         return narrowPrecisionToFloat(CMTimeGetSeconds(cmDuration));
605
606     if (CMTIME_IS_INDEFINITE(cmDuration)) {
607         return numeric_limits<float>::infinity();
608     }
609
610     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::platformDuration(%p) - invalid duration, returning %.0f", this, MediaPlayer::invalidTime());
611     return MediaPlayer::invalidTime();
612 }
613
614 float MediaPlayerPrivateAVFoundationObjC::currentTime() const
615 {
616     if (!metaDataAvailable() || !m_avPlayerItem)
617         return 0;
618
619     CMTime itemTime = [m_avPlayerItem.get() currentTime];
620     if (CMTIME_IS_NUMERIC(itemTime)) {
621         return max(narrowPrecisionToFloat(CMTimeGetSeconds(itemTime)), 0.0f);
622     }
623
624     return 0;
625 }
626
627 void MediaPlayerPrivateAVFoundationObjC::seekToTime(double time)
628 {
629     // setCurrentTime generates several event callbacks, update afterwards.
630     setDelayCallbacks(true);
631
632     WebCoreAVFMovieObserver *observer = m_objcObserver.get();
633     [m_avPlayerItem.get() seekToTime:CMTimeMakeWithSeconds(time, 600) toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero completionHandler:^(BOOL finished) {
634         [observer seekCompleted:finished];
635     }];
636
637     setDelayCallbacks(false);
638 }
639
640 void MediaPlayerPrivateAVFoundationObjC::setVolume(float volume)
641 {
642     if (!metaDataAvailable())
643         return;
644
645     [m_avPlayer.get() setVolume:volume];
646 }
647
648 void MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(bool closedCaptionsVisible)
649 {
650     UNUSED_PARAM(closedCaptionsVisible);
651
652     if (!metaDataAvailable())
653         return;
654
655     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setClosedCaptionsVisible(%p) - set to %s", this, boolString(closedCaptionsVisible));
656 }
657
658 void MediaPlayerPrivateAVFoundationObjC::updateRate()
659 {
660     setDelayCallbacks(true);
661     [m_avPlayer.get() setRate:requestedRate()];
662     setDelayCallbacks(false);
663 }
664
665 float MediaPlayerPrivateAVFoundationObjC::rate() const
666 {
667     if (!metaDataAvailable())
668         return 0;
669
670     return [m_avPlayer.get() rate];
671 }
672
673 PassRefPtr<TimeRanges> MediaPlayerPrivateAVFoundationObjC::platformBufferedTimeRanges() const
674 {
675     RefPtr<TimeRanges> timeRanges = TimeRanges::create();
676
677     if (!m_avPlayerItem)
678         return timeRanges.release();
679
680     NSArray *loadedRanges = [m_avPlayerItem.get() loadedTimeRanges];
681     for (NSValue *thisRangeValue in loadedRanges) {
682         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
683         if (CMTIMERANGE_IS_VALID(timeRange) && !CMTIMERANGE_IS_EMPTY(timeRange)) {
684             float rangeStart = narrowPrecisionToFloat(CMTimeGetSeconds(timeRange.start));
685             float rangeEnd = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
686             timeRanges->add(rangeStart, rangeEnd);
687         }
688     }
689     return timeRanges.release();
690 }
691
692 double MediaPlayerPrivateAVFoundationObjC::platformMinTimeSeekable() const
693 {
694     NSArray *seekableRanges = [m_avPlayerItem.get() seekableTimeRanges];
695     if (!seekableRanges || ![seekableRanges count])
696         return 0;
697
698     double minTimeSeekable = std::numeric_limits<double>::infinity();
699     bool hasValidRange = false;
700     for (NSValue *thisRangeValue in seekableRanges) {
701         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
702         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
703             continue;
704
705         hasValidRange = true;
706         double startOfRange = CMTimeGetSeconds(timeRange.start);
707         if (minTimeSeekable > startOfRange)
708             minTimeSeekable = startOfRange;
709     }
710     return hasValidRange ? minTimeSeekable : 0;
711 }
712
713 double MediaPlayerPrivateAVFoundationObjC::platformMaxTimeSeekable() const
714 {
715     NSArray *seekableRanges = [m_avPlayerItem.get() seekableTimeRanges];
716     if (!seekableRanges)
717         return 0;
718
719     double maxTimeSeekable = 0;
720     for (NSValue *thisRangeValue in seekableRanges) {
721         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
722         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
723             continue;
724         
725         double endOfRange = CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange));
726         if (maxTimeSeekable < endOfRange)
727             maxTimeSeekable = endOfRange;
728     }
729     return maxTimeSeekable;
730 }
731
732 float MediaPlayerPrivateAVFoundationObjC::platformMaxTimeLoaded() const
733 {
734     NSArray *loadedRanges = [m_avPlayerItem.get() loadedTimeRanges];
735     if (!loadedRanges)
736         return 0;
737
738     float maxTimeLoaded = 0;
739     for (NSValue *thisRangeValue in loadedRanges) {
740         CMTimeRange timeRange = [thisRangeValue CMTimeRangeValue];
741         if (!CMTIMERANGE_IS_VALID(timeRange) || CMTIMERANGE_IS_EMPTY(timeRange))
742             continue;
743         
744         float endOfRange = narrowPrecisionToFloat(CMTimeGetSeconds(CMTimeRangeGetEnd(timeRange)));
745         if (maxTimeLoaded < endOfRange)
746             maxTimeLoaded = endOfRange;
747     }
748
749     return maxTimeLoaded;   
750 }
751
752 unsigned MediaPlayerPrivateAVFoundationObjC::totalBytes() const
753 {
754     if (!metaDataAvailable())
755         return 0;
756
757     long long totalMediaSize = 0;
758     NSArray *tracks = [m_avAsset.get() tracks];
759     for (AVAssetTrack *thisTrack in tracks)
760         totalMediaSize += [thisTrack totalSampleDataLength];
761
762     return static_cast<unsigned>(totalMediaSize);
763 }
764
765 void MediaPlayerPrivateAVFoundationObjC::setAsset(id asset)
766 {
767     m_avAsset = asset;
768 }
769
770 MediaPlayerPrivateAVFoundation::AssetStatus MediaPlayerPrivateAVFoundationObjC::assetStatus() const
771 {
772     if (!m_avAsset)
773         return MediaPlayerAVAssetStatusDoesNotExist;
774
775     for (NSString *keyName in assetMetadataKeyNames()) {
776         AVKeyValueStatus keyStatus = [m_avAsset.get() statusOfValueForKey:keyName error:nil];
777
778         if (keyStatus < AVKeyValueStatusLoaded)
779             return MediaPlayerAVAssetStatusLoading;// At least one key is not loaded yet.
780         
781         if (keyStatus == AVKeyValueStatusFailed)
782             return MediaPlayerAVAssetStatusFailed; // At least one key could not be loaded.
783
784         if (keyStatus == AVKeyValueStatusCancelled)
785             return MediaPlayerAVAssetStatusCancelled; // Loading of at least one key was cancelled.
786     }
787
788     if ([[m_avAsset.get() valueForKey:@"playable"] boolValue])
789         return MediaPlayerAVAssetStatusPlayable;
790
791     return MediaPlayerAVAssetStatusLoaded;
792 }
793
794 void MediaPlayerPrivateAVFoundationObjC::paintCurrentFrameInContext(GraphicsContext* context, const IntRect& rect)
795 {
796     if (!metaDataAvailable() || context->paintingDisabled())
797         return;
798
799     setDelayCallbacks(true);
800     BEGIN_BLOCK_OBJC_EXCEPTIONS;
801
802 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
803     paintWithVideoOutput(context, rect);
804 #else
805     paintWithImageGenerator(context, rect);
806 #endif
807
808     END_BLOCK_OBJC_EXCEPTIONS;
809     setDelayCallbacks(false);
810
811     m_videoFrameHasDrawn = true;
812 }
813
814 void MediaPlayerPrivateAVFoundationObjC::paint(GraphicsContext* context, const IntRect& rect)
815 {
816     if (!metaDataAvailable() || context->paintingDisabled())
817         return;
818
819     // We can ignore the request if we are already rendering to a layer.
820     if (currentRenderingMode() == MediaRenderingToLayer)
821         return;
822
823     paintCurrentFrameInContext(context, rect);
824 }
825
826 #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1080
827 void MediaPlayerPrivateAVFoundationObjC::paintWithImageGenerator(GraphicsContext* context, const IntRect& rect)
828 {
829     RetainPtr<CGImageRef> image = createImageForTimeInRect(currentTime(), rect);
830     if (image) {
831         GraphicsContextStateSaver stateSaver(*context);
832         context->translate(rect.x(), rect.y() + rect.height());
833         context->scale(FloatSize(1.0f, -1.0f));
834         context->setImageInterpolationQuality(InterpolationLow);
835         IntRect paintRect(IntPoint(0, 0), IntSize(rect.width(), rect.height()));
836         CGContextDrawImage(context->platformContext(), CGRectMake(0, 0, paintRect.width(), paintRect.height()), image.get());
837         image = 0;
838     }
839 }
840 #endif
841
842 static HashSet<String> mimeTypeCache()
843 {
844     DEFINE_STATIC_LOCAL(HashSet<String>, cache, ());
845     static bool typeListInitialized = false;
846
847     if (typeListInitialized)
848         return cache;
849     typeListInitialized = true;
850
851     NSArray *types = [AVURLAsset audiovisualMIMETypes];
852     for (NSString *mimeType in types)
853         cache.add(mimeType);
854
855     return cache;
856
857
858 #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1080
859 RetainPtr<CGImageRef> MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(float time, const IntRect& rect)
860 {
861     if (!m_imageGenerator)
862         createImageGenerator();
863     ASSERT(m_imageGenerator);
864
865 #if !LOG_DISABLED
866     double start = monotonicallyIncreasingTime();
867 #endif
868
869     [m_imageGenerator.get() setMaximumSize:CGSize(rect.size())];
870     RetainPtr<CGImageRef> image = adoptCF([m_imageGenerator.get() copyCGImageAtTime:CMTimeMakeWithSeconds(time, 600) actualTime:nil error:nil]);
871
872 #if !LOG_DISABLED
873     double duration = monotonicallyIncreasingTime() - start;
874     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createImageForTimeInRect(%p) - creating image took %.4f", this, narrowPrecisionToFloat(duration));
875 #endif
876
877     return image;
878 }
879 #endif
880
881 void MediaPlayerPrivateAVFoundationObjC::getSupportedTypes(HashSet<String>& supportedTypes)
882 {
883     supportedTypes = mimeTypeCache();
884
885
886 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::supportsType(const String& type, const String& codecs, const URL&)
887 {
888     if (!mimeTypeCache().contains(type))
889         return MediaPlayer::IsNotSupported;
890
891     // The spec says:
892     // "Implementors are encouraged to return "maybe" unless the type can be confidently established as being supported or not."
893     if (codecs.isEmpty())
894         return MediaPlayer::MayBeSupported;
895
896     NSString *typeString = [NSString stringWithFormat:@"%@; codecs=\"%@\"", (NSString *)type, (NSString *)codecs];
897     return [AVURLAsset isPlayableExtendedMIMEType:typeString] ? MediaPlayer::IsSupported : MediaPlayer::MayBeSupported;;
898 }
899
900 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
901 static bool keySystemIsSupported(const String& keySystem)
902 {
903     if (equalIgnoringCase(keySystem, "com.apple.lskd") || equalIgnoringCase(keySystem, "com.apple.lskd.1_0"))
904         return true;
905
906     return false;
907 }
908
909 MediaPlayer::SupportsType MediaPlayerPrivateAVFoundationObjC::extendedSupportsType(const String& type, const String& codecs, const String& keySystem, const URL& url)
910 {
911     // From: <http://dvcs.w3.org/hg/html-media/raw-file/eme-v0.1b/encrypted-media/encrypted-media.html#dom-canplaytype>
912     // In addition to the steps in the current specification, this method must run the following steps:
913
914     // 1. Check whether the Key System is supported with the specified container and codec type(s) by following the steps for the first matching condition from the following list:
915     //    If keySystem is null, continue to the next step.
916     if (keySystem.isNull() || keySystem.isEmpty())
917         return supportsType(type, codecs, url);
918
919     // If keySystem contains an unrecognized or unsupported Key System, return the empty string
920     if (!keySystemIsSupported(keySystem))
921         return MediaPlayer::IsNotSupported;
922
923     // If the Key System specified by keySystem does not support decrypting the container and/or codec specified in the rest of the type string.
924     // (AVFoundation does not provide an API which would allow us to determine this, so this is a no-op)
925
926     // 2. Return "maybe" or "probably" as appropriate per the existing specification of canPlayType().
927     return supportsType(type, codecs, url);
928 }
929
930 bool MediaPlayerPrivateAVFoundationObjC::shouldWaitForLoadingOfResource(AVAssetResourceLoadingRequest* avRequest)
931 {
932     String scheme = [[[avRequest request] URL] scheme];
933     String keyURI = [[[avRequest request] URL] absoluteString];
934
935 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
936     if (scheme == "skd") {
937         // Create an initData with the following layout:
938         // [4 bytes: keyURI size], [keyURI size bytes: keyURI]
939         unsigned keyURISize = keyURI.length() * sizeof(UChar);
940         RefPtr<ArrayBuffer> initDataBuffer = ArrayBuffer::create(4 + keyURISize, 1);
941         RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
942         initDataView->set<uint32_t>(0, keyURISize, true);
943
944         RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, 4, keyURI.length());
945         keyURIArray->setRange(keyURI.characters(), keyURI.length() / sizeof(unsigned char), 0);
946
947 #if ENABLE(ENCRYPTED_MEDIA)
948         if (!player()->keyNeeded("com.apple.lskd", emptyString(), static_cast<const unsigned char*>(initDataBuffer->data()), initDataBuffer->byteLength()))
949 #elif ENABLE(ENCRYPTED_MEDIA_V2)
950         RefPtr<Uint8Array> initData = Uint8Array::create(initDataBuffer, 0, initDataBuffer->byteLength());
951         if (!player()->keyNeeded(initData.get()))
952 #endif
953             return false;
954
955         m_keyURIToRequestMap.set(keyURI, avRequest);
956         return true;
957     }
958 #endif
959
960     RefPtr<WebCoreAVFResourceLoader> resourceLoader = WebCoreAVFResourceLoader::create(this, avRequest);
961     m_resourceLoaderMap.add(avRequest, resourceLoader);
962     resourceLoader->startLoading();
963     return true;
964 }
965
966 void MediaPlayerPrivateAVFoundationObjC::didCancelLoadingRequest(AVAssetResourceLoadingRequest* avRequest)
967 {
968     String scheme = [[[avRequest request] URL] scheme];
969
970     WebCoreAVFResourceLoader* resourceLoader = m_resourceLoaderMap.get(avRequest);
971
972     if (resourceLoader)
973         resourceLoader->stopLoading();
974 }
975
976 void MediaPlayerPrivateAVFoundationObjC::didStopLoadingRequest(AVAssetResourceLoadingRequest *avRequest)
977 {
978     m_resourceLoaderMap.remove(avRequest);
979 }
980 #endif
981
982 bool MediaPlayerPrivateAVFoundationObjC::isAvailable()
983 {
984     return AVFoundationLibrary() && CoreMediaLibrary();
985 }
986
987 float MediaPlayerPrivateAVFoundationObjC::mediaTimeForTimeValue(float timeValue) const
988 {
989     if (!metaDataAvailable())
990         return timeValue;
991
992     // FIXME - impossible to implement until rdar://8721510 is fixed.
993     return timeValue;
994 }
995
996 void MediaPlayerPrivateAVFoundationObjC::updateVideoLayerGravity()
997 {
998     if (!m_videoLayer)
999         return;
1000
1001     [CATransaction begin];
1002     [CATransaction setDisableActions:YES];    
1003     NSString* gravity = shouldMaintainAspectRatio() ? AVLayerVideoGravityResizeAspect : AVLayerVideoGravityResize;
1004     [m_videoLayer.get() setVideoGravity:gravity];
1005     [CATransaction commit];
1006 }
1007
1008 void MediaPlayerPrivateAVFoundationObjC::tracksChanged()
1009 {
1010     String primaryAudioTrackLanguage = m_languageOfPrimaryAudioTrack;
1011     m_languageOfPrimaryAudioTrack = String();
1012
1013     if (!m_avAsset)
1014         return;
1015
1016     setDelayCharacteristicsChangedNotification(true);
1017
1018     bool haveCCTrack = false;
1019     bool hasCaptions = false;
1020
1021     // This is called whenever the tracks collection changes so cache hasVideo and hasAudio since we are
1022     // asked about those fairly fequently.
1023     if (!m_avPlayerItem) {
1024         // We don't have a player item yet, so check with the asset because some assets support inspection
1025         // prior to becoming ready to play.
1026         setHasVideo([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicVisual] count]);
1027         setHasAudio([[m_avAsset.get() tracksWithMediaCharacteristic:AVMediaCharacteristicAudible] count]);
1028 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1029         hasCaptions = [[m_avAsset.get() tracksWithMediaType:AVMediaTypeClosedCaption] count];
1030 #endif
1031     } else {
1032         bool hasVideo = false;
1033         bool hasAudio = false;
1034         NSArray *tracks = [m_avPlayerItem.get() tracks];
1035         for (AVPlayerItemTrack *track in tracks) {
1036             if ([track isEnabled]) {
1037                 AVAssetTrack *assetTrack = [track assetTrack];
1038                 if ([[assetTrack mediaType] isEqualToString:AVMediaTypeVideo])
1039                     hasVideo = true;
1040                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeAudio])
1041                     hasAudio = true;
1042                 else if ([[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption]) {
1043 #if !HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1044                     hasCaptions = true;
1045 #endif
1046                     haveCCTrack = true;
1047                 }
1048             }
1049         }
1050         setHasVideo(hasVideo);
1051         setHasAudio(hasAudio);
1052
1053
1054 #if ENABLE(VIDEO_TRACK)
1055         RetainPtr<NSSet> audioTracks = adoptNS([[NSSet alloc] initWithArray:[tracks objectsAtIndexes:[tracks indexesOfObjectsPassingTest:^(id track, NSUInteger, BOOL*){
1056             return [[[track assetTrack] mediaType] isEqualToString:AVMediaTypeAudio];
1057         }]]]);
1058         RetainPtr<NSMutableSet> oldAudioTracks = adoptNS([[NSMutableSet alloc] initWithCapacity:m_audioTracks.size()]);
1059
1060         typedef Vector<RefPtr<AudioTrackPrivateAVFObjC> > AudioTrackVector;
1061         for (AudioTrackVector::iterator i = m_audioTracks.begin(); i != m_audioTracks.end(); ++i)
1062             [oldAudioTracks.get() addObject:(*i)->playerItemTrack()];
1063
1064         RetainPtr<NSMutableSet> removedAVAudioTracks = adoptNS([oldAudioTracks.get() mutableCopy]);
1065         [removedAVAudioTracks.get() minusSet:audioTracks.get()];
1066
1067         RetainPtr<NSMutableSet> addedAVAudioTracks = adoptNS([audioTracks.get() mutableCopy]);
1068         [addedAVAudioTracks.get() minusSet:oldAudioTracks.get()];
1069
1070         AudioTrackVector replacementAudioTracks;
1071         AudioTrackVector addedAudioTracks;
1072         AudioTrackVector removedAudioTracks;
1073         for (AudioTrackVector::iterator i = m_audioTracks.begin(); i != m_audioTracks.end(); ++i) {
1074             if ([removedAVAudioTracks containsObject:(*i)->playerItemTrack()])
1075                 removedAudioTracks.append(*i);
1076             else
1077                 replacementAudioTracks.append(*i);
1078         }
1079
1080         for (AVPlayerItemTrack* playerItemTrack in addedAVAudioTracks.get())
1081             addedAudioTracks.append(AudioTrackPrivateAVFObjC::create(playerItemTrack));
1082
1083         replacementAudioTracks.appendVector(addedAudioTracks);
1084
1085         m_audioTracks.swap(replacementAudioTracks);
1086
1087         for (AudioTrackVector::iterator i = removedAudioTracks.begin(); i != removedAudioTracks.end(); ++i)
1088             player()->removeAudioTrack(*i);
1089
1090         for (AudioTrackVector::iterator i = addedAudioTracks.begin(); i != addedAudioTracks.end(); ++i)
1091             player()->addAudioTrack(*i);
1092 #endif
1093     }
1094
1095 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1096     if (AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia()) {
1097         hasCaptions = [[AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]] count];
1098         if (hasCaptions)
1099             processMediaSelectionOptions();
1100     }
1101 #endif
1102
1103 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT) && HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1104     if (!hasCaptions && haveCCTrack)
1105         processLegacyClosedCaptionsTracks();
1106 #elif !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1107     if (haveCCTrack)
1108         processLegacyClosedCaptionsTracks();
1109 #endif
1110
1111     setHasClosedCaptions(hasCaptions);
1112
1113     LOG(Media, "WebCoreAVFMovieObserver:tracksChanged(%p) - hasVideo = %s, hasAudio = %s, hasCaptions = %s",
1114         this, boolString(hasVideo()), boolString(hasAudio()), boolString(hasClosedCaptions()));
1115
1116     sizeChanged();
1117
1118     if (primaryAudioTrackLanguage != languageOfPrimaryAudioTrack())
1119         characteristicsChanged();
1120
1121     setDelayCharacteristicsChangedNotification(false);
1122 }
1123
1124 void MediaPlayerPrivateAVFoundationObjC::sizeChanged()
1125 {
1126     if (!m_avAsset)
1127         return;
1128
1129     NSArray *tracks = [m_avAsset.get() tracks];
1130
1131     // Some assets don't report track properties until they are completely ready to play, but we
1132     // want to report a size as early as possible so use presentationSize when an asset has no tracks.
1133     if (m_avPlayerItem && ![tracks count]) {
1134         setNaturalSize(IntSize([m_avPlayerItem.get() presentationSize]));
1135         return;
1136     }
1137
1138     // AVAsset's 'naturalSize' property only considers the movie's first video track, so we need to compute
1139     // the union of all visual track rects.
1140     CGRect trackUnionRect = CGRectZero;
1141     for (AVAssetTrack *track in tracks) {
1142         CGSize trackSize = [track naturalSize];
1143         CGRect trackRect = CGRectMake(0, 0, trackSize.width, trackSize.height);
1144         trackUnionRect = CGRectUnion(trackUnionRect, CGRectApplyAffineTransform(trackRect, [track preferredTransform]));
1145     }
1146
1147     // The movie is always displayed at 0,0 so move the track rect to the origin before using width and height.
1148     trackUnionRect = CGRectOffset(trackUnionRect, trackUnionRect.origin.x, trackUnionRect.origin.y);
1149     
1150     // Also look at the asset's preferred transform so we account for a movie matrix.
1151     CGSize naturalSize = CGSizeApplyAffineTransform(trackUnionRect.size, [m_avAsset.get() preferredTransform]);
1152
1153     // Cache the natural size (setNaturalSize will notify the player if it has changed).
1154     setNaturalSize(IntSize(naturalSize));
1155 }
1156
1157 bool MediaPlayerPrivateAVFoundationObjC::hasSingleSecurityOrigin() const 
1158 {
1159     if (!m_avAsset)
1160         return false;
1161     
1162     RefPtr<SecurityOrigin> resolvedOrigin = SecurityOrigin::create(URL(wkAVAssetResolvedURL(m_avAsset.get())));
1163     RefPtr<SecurityOrigin> requestedOrigin = SecurityOrigin::createFromString(assetURL());
1164     return resolvedOrigin->isSameSchemeHostPort(requestedOrigin.get());
1165 }
1166
1167 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
1168 void MediaPlayerPrivateAVFoundationObjC::createVideoOutput()
1169 {
1170     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p)", this);
1171
1172     if (!m_avPlayerItem || m_videoOutput)
1173         return;
1174
1175 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
1176     NSDictionary* attributes = @{ (NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_422YpCbCr8) };
1177 #else
1178     NSDictionary* attributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:k32BGRAPixelFormat], kCVPixelBufferPixelFormatTypeKey,
1179                                 nil];
1180 #endif
1181     m_videoOutput = adoptNS([[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes]);
1182     ASSERT(m_videoOutput);
1183
1184     [m_avPlayerItem.get() addOutput:m_videoOutput.get()];
1185
1186     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createVideoOutput(%p) - returning %p", this, m_videoOutput.get());
1187 }
1188
1189 void MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput()
1190 {
1191     if (!m_videoOutput)
1192         return;
1193
1194     if (m_avPlayerItem)
1195         [m_avPlayerItem.get() removeOutput:m_videoOutput.get()];
1196     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::destroyVideoOutput(%p) - destroying  %p", this, m_videoOutput.get());
1197
1198     m_videoOutput = 0;
1199 }
1200
1201 RetainPtr<CVPixelBufferRef> MediaPlayerPrivateAVFoundationObjC::createPixelBuffer()
1202 {
1203     if (!m_videoOutput)
1204         createVideoOutput();
1205     ASSERT(m_videoOutput);
1206
1207 #if !LOG_DISABLED
1208     double start = monotonicallyIncreasingTime();
1209 #endif
1210
1211     CMTime currentTime = [m_avPlayerItem.get() currentTime];
1212
1213     if (![m_videoOutput.get() hasNewPixelBufferForItemTime:currentTime])
1214         return 0;
1215
1216     RetainPtr<CVPixelBufferRef> buffer = adoptCF([m_videoOutput.get() copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nil]);
1217     if (!buffer)
1218         return 0;
1219
1220 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
1221     // Create a VTPixelTransferSession, if necessary, as we cannot guarantee timely delivery of ARGB pixels.
1222     if (!m_pixelTransferSession) {
1223         VTPixelTransferSessionRef session = 0;
1224         VTPixelTransferSessionCreate(kCFAllocatorDefault, &session);
1225         m_pixelTransferSession = adoptCF(session);
1226     }
1227
1228     CVPixelBufferRef outputBuffer;
1229     CVPixelBufferCreate(kCFAllocatorDefault, CVPixelBufferGetWidth(buffer.get()), CVPixelBufferGetHeight(buffer.get()), k32BGRAPixelFormat, 0, &outputBuffer);
1230     VTPixelTransferSessionTransferImage(m_pixelTransferSession.get(), buffer.get(), outputBuffer);
1231     buffer = adoptCF(outputBuffer);
1232 #endif
1233
1234 #if !LOG_DISABLED
1235     double duration = monotonicallyIncreasingTime() - start;
1236     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::createPixelBuffer() - creating buffer took %.4f", this, narrowPrecisionToFloat(duration));
1237 #endif
1238
1239     return buffer;
1240 }
1241
1242 void MediaPlayerPrivateAVFoundationObjC::paintWithVideoOutput(GraphicsContext* context, const IntRect& rect)
1243 {
1244     RetainPtr<CVPixelBufferRef> pixelBuffer = createPixelBuffer();
1245
1246     // Calls to copyPixelBufferForItemTime:itemTimeForDisplay: may return nil if the pixel buffer
1247     // for the requested time has already been retrieved. In this case, the last valid image (if any)
1248     // should be displayed.
1249     if (pixelBuffer)
1250         m_lastImage = pixelBuffer;
1251
1252     if (m_lastImage) {
1253         GraphicsContextStateSaver stateSaver(*context);
1254         context->translate(rect.x(), rect.y() + rect.height());
1255         context->scale(FloatSize(1.0f, -1.0f));
1256         RetainPtr<CIImage> image = adoptNS([[CIImage alloc] initWithCVImageBuffer:m_lastImage.get()]);
1257
1258         // ciContext does not use a RetainPtr for results of contextWithCGContext:, as the returned value
1259         // is autoreleased, and there is no non-autoreleased version of that function.
1260         CIContext* ciContext = [CIContext contextWithCGContext:context->platformContext() options:nil];
1261         CGRect outputRect = { CGPointZero, rect.size() };
1262         CGRect imageRect = CGRectMake(0, 0, CVPixelBufferGetWidth(m_lastImage.get()), CVPixelBufferGetHeight(m_lastImage.get()));
1263         [ciContext drawImage:image.get() inRect:outputRect fromRect:imageRect];
1264     }
1265 }
1266 #endif
1267
1268 #if ENABLE(ENCRYPTED_MEDIA) || ENABLE(ENCRYPTED_MEDIA_V2)
1269 bool MediaPlayerPrivateAVFoundationObjC::extractKeyURIKeyIDAndCertificateFromInitData(Uint8Array* initData, String& keyURI, String& keyID, RefPtr<Uint8Array>& certificate)
1270 {
1271     // initData should have the following layout:
1272     // [4 bytes: keyURI length][N bytes: keyURI][4 bytes: contentID length], [N bytes: contentID], [4 bytes: certificate length][N bytes: certificate]
1273     if (initData->byteLength() < 4)
1274         return false;
1275
1276     RefPtr<ArrayBuffer> initDataBuffer = initData->buffer();
1277
1278     // Use a DataView to read uint32 values from the buffer, as Uint32Array requires the reads be aligned on 4-byte boundaries. 
1279     RefPtr<JSC::DataView> initDataView = JSC::DataView::create(initDataBuffer, 0, initDataBuffer->byteLength());
1280     uint32_t offset = 0;
1281     bool status = true;
1282
1283     uint32_t keyURILength = initDataView->get<uint32_t>(offset, true, &status);
1284     offset += 4;
1285     if (!status || offset + keyURILength > initData->length())
1286         return false;
1287
1288     RefPtr<Uint16Array> keyURIArray = Uint16Array::create(initDataBuffer, offset, keyURILength);
1289     if (!keyURIArray)
1290         return false;
1291
1292     keyURI = String(keyURIArray->data(), keyURILength / sizeof(unsigned short));
1293     offset += keyURILength;
1294
1295     uint32_t keyIDLength = initDataView->get<uint32_t>(offset, true, &status);
1296     offset += 4;
1297     if (!status || offset + keyIDLength > initData->length())
1298         return false;
1299
1300     RefPtr<Uint16Array> keyIDArray = Uint16Array::create(initDataBuffer, offset, keyIDLength);
1301     if (!keyIDArray)
1302         return false;
1303
1304     keyID = String(keyIDArray->data(), keyIDLength / sizeof(unsigned short));
1305     offset += keyIDLength;
1306
1307     uint32_t certificateLength = initDataView->get<uint32_t>(offset, true, &status);
1308     offset += 4;
1309     if (!status || offset + certificateLength > initData->length())
1310         return false;
1311
1312     certificate = Uint8Array::create(initDataBuffer, offset, certificateLength);
1313     if (!certificate)
1314         return false;
1315
1316     return true;
1317 }
1318 #endif
1319
1320 #if ENABLE(ENCRYPTED_MEDIA)
1321 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::generateKeyRequest(const String& keySystem, const unsigned char* initDataPtr, unsigned initDataLength)
1322 {
1323     if (!keySystemIsSupported(keySystem))
1324         return MediaPlayer::KeySystemNotSupported;
1325
1326     RefPtr<Uint8Array> initData = Uint8Array::create(initDataPtr, initDataLength);
1327     String keyURI;
1328     String keyID;
1329     RefPtr<Uint8Array> certificate;
1330     if (!extractKeyURIKeyIDAndCertificateFromInitData(initData.get(), keyURI, keyID, certificate))
1331         return MediaPlayer::InvalidPlayerState;
1332
1333     if (!m_keyURIToRequestMap.contains(keyURI))
1334         return MediaPlayer::InvalidPlayerState;
1335
1336     String sessionID = createCanonicalUUIDString();
1337
1338     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_keyURIToRequestMap.get(keyURI);
1339
1340     RetainPtr<NSData> certificateData = adoptNS([[NSData alloc] initWithBytes:certificate->baseAddress() length:certificate->byteLength()]);
1341     NSString* assetStr = keyID;
1342     RetainPtr<NSData> assetID = [NSData dataWithBytes: [assetStr cStringUsingEncoding:NSUTF8StringEncoding] length:[assetStr lengthOfBytesUsingEncoding:NSUTF8StringEncoding]];
1343     NSError* error = 0;
1344     RetainPtr<NSData> keyRequest = [avRequest.get() streamingContentKeyRequestDataForApp:certificateData.get() contentIdentifier:assetID.get() options:nil error:&error];
1345
1346     if (!keyRequest) {
1347         NSError* underlyingError = [[error userInfo] objectForKey:NSUnderlyingErrorKey];
1348         player()->keyError(keySystem, sessionID, MediaPlayerClient::DomainError, [underlyingError code]);
1349         return MediaPlayer::NoError;
1350     }
1351
1352     RefPtr<ArrayBuffer> keyRequestBuffer = ArrayBuffer::create([keyRequest.get() bytes], [keyRequest.get() length]);
1353     RefPtr<Uint8Array> keyRequestArray = Uint8Array::create(keyRequestBuffer, 0, keyRequestBuffer->byteLength());
1354     player()->keyMessage(keySystem, sessionID, keyRequestArray->data(), keyRequestArray->byteLength(), URL());
1355
1356     // Move ownership of the AVAssetResourceLoadingRequestfrom the keyIDToRequestMap to the sessionIDToRequestMap:
1357     m_sessionIDToRequestMap.set(sessionID, avRequest);
1358     m_keyURIToRequestMap.remove(keyURI);
1359
1360     return MediaPlayer::NoError;
1361 }
1362
1363 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::addKey(const String& keySystem, const unsigned char* keyPtr, unsigned keyLength, const unsigned char* initDataPtr, unsigned initDataLength, const String& sessionID)
1364 {
1365     if (!keySystemIsSupported(keySystem))
1366         return MediaPlayer::KeySystemNotSupported;
1367
1368     if (!m_sessionIDToRequestMap.contains(sessionID))
1369         return MediaPlayer::InvalidPlayerState;
1370
1371     RetainPtr<AVAssetResourceLoadingRequest> avRequest = m_sessionIDToRequestMap.get(sessionID);
1372     RetainPtr<NSData> keyData = adoptNS([[NSData alloc] initWithBytes:keyPtr length:keyLength]);
1373     [[avRequest.get() dataRequest] respondWithData:keyData.get()];
1374     [avRequest.get() finishLoading];
1375     m_sessionIDToRequestMap.remove(sessionID);
1376
1377     player()->keyAdded(keySystem, sessionID);
1378
1379     UNUSED_PARAM(initDataPtr);
1380     UNUSED_PARAM(initDataLength);
1381     return MediaPlayer::NoError;
1382 }
1383
1384 MediaPlayer::MediaKeyException MediaPlayerPrivateAVFoundationObjC::cancelKeyRequest(const String& keySystem, const String& sessionID)
1385 {
1386     if (!keySystemIsSupported(keySystem))
1387         return MediaPlayer::KeySystemNotSupported;
1388
1389     if (!m_sessionIDToRequestMap.contains(sessionID))
1390         return MediaPlayer::InvalidPlayerState;
1391
1392     m_sessionIDToRequestMap.remove(sessionID);
1393     return MediaPlayer::NoError;
1394 }
1395 #endif
1396
1397 #if ENABLE(ENCRYPTED_MEDIA_V2)
1398 RetainPtr<AVAssetResourceLoadingRequest> MediaPlayerPrivateAVFoundationObjC::takeRequestForPlayerAndKeyURI(MediaPlayer* player, const String& keyURI)
1399 {
1400     MediaPlayerPrivateAVFoundationObjC* _this = playerToPrivateMap().get(player);
1401     if (!_this)
1402         return nullptr;
1403
1404     return _this->m_keyURIToRequestMap.take(keyURI);
1405
1406 }
1407 #endif
1408
1409 #if !HAVE(AVFOUNDATION_LEGIBLE_OUTPUT_SUPPORT)
1410 void MediaPlayerPrivateAVFoundationObjC::processLegacyClosedCaptionsTracks()
1411 {
1412 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1413     [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1414 #endif
1415
1416     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1417     NSArray *tracks = [m_avPlayerItem.get() tracks];
1418     for (AVPlayerItemTrack *playerItemTrack in tracks) {
1419
1420         AVAssetTrack *assetTrack = [playerItemTrack assetTrack];
1421         if (![[assetTrack mediaType] isEqualToString:AVMediaTypeClosedCaption])
1422             continue;
1423
1424         bool newCCTrack = true;
1425         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1426             if (!removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack())
1427                 continue;
1428
1429             RefPtr<InbandTextTrackPrivateLegacyAVFObjC> track = static_cast<InbandTextTrackPrivateLegacyAVFObjC*>(m_textTracks[i - 1].get());
1430             if (track->avPlayerItemTrack() == playerItemTrack) {
1431                 removedTextTracks.remove(i - 1);
1432                 newCCTrack = false;
1433                 break;
1434             }
1435         }
1436
1437         if (!newCCTrack)
1438             continue;
1439         
1440         m_textTracks.append(InbandTextTrackPrivateLegacyAVFObjC::create(this, playerItemTrack));
1441     }
1442
1443     processNewAndRemovedTextTracks(removedTextTracks);
1444 }
1445 #endif
1446
1447 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1448 AVMediaSelectionGroupType* MediaPlayerPrivateAVFoundationObjC::safeMediaSelectionGroupForLegibleMedia()
1449 {
1450     if (!m_avAsset)
1451         return nil;
1452     
1453     if ([m_avAsset.get() statusOfValueForKey:@"availableMediaCharacteristicsWithMediaSelectionOptions" error:NULL] != AVKeyValueStatusLoaded)
1454         return nil;
1455     
1456     return [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible];
1457 }
1458
1459 void MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions()
1460 {
1461     AVMediaSelectionGroupType *legibleGroup = safeMediaSelectionGroupForLegibleMedia();
1462     if (!legibleGroup) {
1463         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::processMediaSelectionOptions(%p) - nil mediaSelectionGroup", this);
1464         return;
1465     }
1466
1467     // We enabled automatic media selection because we want alternate audio tracks to be enabled/disabled automatically,
1468     // but set the selected legible track to nil so text tracks will not be automatically configured.
1469     if (!m_textTracks.size())
1470         [m_avPlayerItem.get() selectMediaOption:nil inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1471
1472     Vector<RefPtr<InbandTextTrackPrivateAVF> > removedTextTracks = m_textTracks;
1473     NSArray *legibleOptions = [AVMediaSelectionGroup playableMediaSelectionOptionsFromArray:[legibleGroup options]];
1474     for (AVMediaSelectionOptionType *option in legibleOptions) {
1475         bool newTrack = true;
1476         for (unsigned i = removedTextTracks.size(); i > 0; --i) {
1477              if (removedTextTracks[i - 1]->isLegacyClosedCaptionsTrack())
1478                  continue;
1479
1480             RefPtr<InbandTextTrackPrivateAVFObjC> track = static_cast<InbandTextTrackPrivateAVFObjC*>(removedTextTracks[i - 1].get());
1481             if ([track->mediaSelectionOption() isEqual:option]) {
1482                 removedTextTracks.remove(i - 1);
1483                 newTrack = false;
1484                 break;
1485             }
1486         }
1487         if (!newTrack)
1488             continue;
1489
1490         m_textTracks.append(InbandTextTrackPrivateAVFObjC::create(this, option));
1491     }
1492
1493     processNewAndRemovedTextTracks(removedTextTracks);
1494 }
1495
1496 void MediaPlayerPrivateAVFoundationObjC::processCue(NSArray *attributedStrings, double time)
1497 {
1498     if (!m_currentTrack)
1499         return;
1500
1501     m_currentTrack->processCue(reinterpret_cast<CFArrayRef>(attributedStrings), time);
1502 }
1503
1504 void MediaPlayerPrivateAVFoundationObjC::flushCues()
1505 {
1506     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::flushCues(%p)", this);
1507
1508     if (!m_currentTrack)
1509         return;
1510     
1511     m_currentTrack->resetCueValues();
1512 }
1513 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1514
1515 void MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(InbandTextTrackPrivateAVF *track)
1516 {
1517     if (m_currentTrack == track)
1518         return;
1519
1520     LOG(Media, "MediaPlayerPrivateAVFoundationObjC::setCurrentTrack(%p) - selecting track %p, language = %s", this, track, track ? track->language().string().utf8().data() : "");
1521         
1522     m_currentTrack = track;
1523
1524     if (track) {
1525         if (track->isLegacyClosedCaptionsTrack())
1526             [m_avPlayer.get() setClosedCaptionDisplayEnabled:YES];
1527 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1528         else
1529             [m_avPlayerItem.get() selectMediaOption:static_cast<InbandTextTrackPrivateAVFObjC*>(track)->mediaSelectionOption() inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1530 #endif
1531     } else {
1532 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1533         [m_avPlayerItem.get() selectMediaOption:0 inMediaSelectionGroup:safeMediaSelectionGroupForLegibleMedia()];
1534 #endif
1535         [m_avPlayer.get() setClosedCaptionDisplayEnabled:NO];
1536     }
1537
1538 }
1539
1540 String MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack() const
1541 {
1542     if (!m_languageOfPrimaryAudioTrack.isNull())
1543         return m_languageOfPrimaryAudioTrack;
1544
1545     if (!m_avPlayerItem.get())
1546         return emptyString();
1547
1548 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1549     // If AVFoundation has an audible group, return the language of the currently selected audible option.
1550     AVMediaSelectionGroupType *audibleGroup = [m_avAsset.get() mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible];
1551     AVMediaSelectionOptionType *currentlySelectedAudibleOption = [m_avPlayerItem.get() selectedMediaOptionInMediaSelectionGroup:audibleGroup];
1552     if (currentlySelectedAudibleOption) {
1553         m_languageOfPrimaryAudioTrack = [[currentlySelectedAudibleOption locale] localeIdentifier];
1554         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of selected audible option: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1555
1556         return m_languageOfPrimaryAudioTrack;
1557     }
1558 #endif // HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1559
1560     // AVFoundation synthesizes an audible group when there is only one ungrouped audio track if there is also a legible group (one or
1561     // more in-band text tracks). It doesn't know about out-of-band tracks, so if there is a single audio track return its language.
1562     NSArray *tracks = [m_avAsset.get() tracksWithMediaType:AVMediaTypeAudio];
1563     if (!tracks || [tracks count] != 1) {
1564         m_languageOfPrimaryAudioTrack = emptyString();
1565         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - %i audio tracks, returning emptyString()", this, (tracks ? [tracks count] : 0));
1566         return m_languageOfPrimaryAudioTrack;
1567     }
1568
1569     AVAssetTrack *track = [tracks objectAtIndex:0];
1570     m_languageOfPrimaryAudioTrack = AudioTrackPrivateAVFObjC::languageForAVAssetTrack(track);
1571
1572 #if !LOG_DISABLED
1573     if (m_languageOfPrimaryAudioTrack == emptyString())
1574         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - single audio track has no language, returning emptyString()", this);
1575     else
1576         LOG(Media, "MediaPlayerPrivateAVFoundationObjC::languageOfPrimaryAudioTrack(%p) - returning language of single audio track: %s", this, m_languageOfPrimaryAudioTrack.utf8().data());
1577 #endif
1578
1579     return m_languageOfPrimaryAudioTrack;
1580 }
1581
1582 NSArray* assetMetadataKeyNames()
1583 {
1584     static NSArray* keys;
1585     if (!keys) {
1586         keys = [[NSArray alloc] initWithObjects:@"duration",
1587                     @"naturalSize",
1588                     @"preferredTransform",
1589                     @"preferredVolume",
1590                     @"preferredRate",
1591                     @"playable",
1592                     @"tracks",
1593                     @"availableMediaCharacteristicsWithMediaSelectionOptions",
1594                    nil];
1595     }
1596     return keys;
1597 }
1598
1599 NSArray* itemKVOProperties()
1600 {
1601     static NSArray* keys;
1602     if (!keys) {
1603         keys = [[NSArray alloc] initWithObjects:@"presentationSize",
1604                 @"status",
1605                 @"asset",
1606                 @"tracks",
1607                 @"seekableTimeRanges",
1608                 @"loadedTimeRanges",
1609                 @"playbackLikelyToKeepUp",
1610                 @"playbackBufferFull",
1611                 @"playbackBufferEmpty",
1612                 @"duration",
1613                 @"hasEnabledAudio",
1614                 nil];
1615     }
1616     return keys;
1617 }
1618
1619 } // namespace WebCore
1620
1621 @implementation WebCoreAVFMovieObserver
1622
1623 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
1624 {
1625     self = [super init];
1626     if (!self)
1627         return nil;
1628     m_callback = callback;
1629     return self;
1630 }
1631
1632 - (void)disconnect
1633 {
1634     [NSObject cancelPreviousPerformRequestsWithTarget:self];
1635     m_callback = 0;
1636 }
1637
1638 - (void)metadataLoaded
1639 {
1640     if (!m_callback)
1641         return;
1642
1643     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetMetadataLoaded);
1644 }
1645
1646 - (void)playableKnown
1647 {
1648     if (!m_callback)
1649         return;
1650
1651     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::AssetPlayabilityKnown);
1652 }
1653
1654 - (void)seekCompleted:(BOOL)finished
1655 {
1656     if (!m_callback)
1657         return;
1658     
1659     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::SeekCompleted, static_cast<bool>(finished));
1660 }
1661
1662 - (void)didEnd:(NSNotification *)unusedNotification
1663 {
1664     UNUSED_PARAM(unusedNotification);
1665     if (!m_callback)
1666         return;
1667     m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemDidPlayToEndTime);
1668 }
1669
1670 - (void)observeValueForKeyPath:keyPath ofObject:(id)object change:(NSDictionary *)change context:(MediaPlayerAVFoundationObservationContext)context
1671 {
1672     UNUSED_PARAM(change);
1673
1674     LOG(Media, "WebCoreAVFMovieObserver:observeValueForKeyPath(%p) - keyPath = %s", self, [keyPath UTF8String]);
1675
1676     if (!m_callback)
1677         return;
1678
1679     if (context == MediaPlayerAVFoundationObservationContextPlayerItem) {
1680         // A value changed for an AVPlayerItem
1681         if ([keyPath isEqualToString:@"status"])
1682             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemStatusChanged);
1683         else if ([keyPath isEqualToString:@"playbackLikelyToKeepUp"])
1684             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackLikelyToKeepUpChanged);
1685         else if ([keyPath isEqualToString:@"playbackBufferEmpty"])
1686             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferEmptyChanged);
1687         else if ([keyPath isEqualToString:@"playbackBufferFull"])
1688             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemIsPlaybackBufferFullChanged);
1689         else if ([keyPath isEqualToString:@"asset"])
1690             m_callback->setAsset([object asset]);
1691         else if ([keyPath isEqualToString:@"loadedTimeRanges"])
1692             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemLoadedTimeRangesChanged);
1693         else if ([keyPath isEqualToString:@"seekableTimeRanges"])
1694             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemSeekableTimeRangesChanged);
1695         else if ([keyPath isEqualToString:@"tracks"])
1696             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1697         else if ([keyPath isEqualToString:@"hasEnabledAudio"])
1698             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemTracksChanged);
1699         else if ([keyPath isEqualToString:@"presentationSize"])
1700             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::ItemPresentationSizeChanged);
1701         else if ([keyPath isEqualToString:@"duration"])
1702             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::DurationChanged);
1703
1704         return;
1705     }
1706
1707     if (context == MediaPlayerAVFoundationObservationContextPlayer) {
1708         // A value changed for an AVPlayer.
1709         if ([keyPath isEqualToString:@"rate"])
1710             m_callback->scheduleMainThreadNotification(MediaPlayerPrivateAVFoundation::Notification::PlayerRateChanged);
1711     }
1712 }
1713
1714 #if HAVE(AVFOUNDATION_MEDIA_SELECTION_GROUP)
1715 - (void)legibleOutput:(id)output didOutputAttributedStrings:(NSArray *)strings nativeSampleBuffers:(NSArray *)nativeSamples forItemTime:(CMTime)itemTime
1716 {
1717     UNUSED_PARAM(output);
1718     UNUSED_PARAM(nativeSamples);
1719
1720     if (!m_callback)
1721         return;
1722
1723     dispatch_async(dispatch_get_main_queue(), ^{
1724         if (!m_callback)
1725             return;
1726         m_callback->processCue(strings, CMTimeGetSeconds(itemTime));
1727     });
1728 }
1729
1730 - (void)outputSequenceWasFlushed:(id)output
1731 {
1732     UNUSED_PARAM(output);
1733
1734     if (!m_callback)
1735         return;
1736     
1737     dispatch_async(dispatch_get_main_queue(), ^{
1738         if (!m_callback)
1739             return;
1740         m_callback->flushCues();
1741     });
1742 }
1743 #endif
1744
1745 @end
1746
1747 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1090
1748 @implementation WebCoreAVFLoaderDelegate
1749
1750 - (id)initWithCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
1751 {
1752     self = [super init];
1753     if (!self)
1754         return nil;
1755     m_callback = callback;
1756     return self;
1757 }
1758
1759 - (BOOL)resourceLoader:(AVAssetResourceLoader *)resourceLoader shouldWaitForLoadingOfRequestedResource:(AVAssetResourceLoadingRequest *)loadingRequest
1760 {
1761     UNUSED_PARAM(resourceLoader);
1762     if (!m_callback)
1763         return NO;
1764
1765     dispatch_async(dispatch_get_main_queue(), ^{
1766         if (!m_callback) {
1767             [loadingRequest finishLoadingWithError:nil];
1768             return;
1769         }
1770
1771         if (!m_callback->shouldWaitForLoadingOfResource(loadingRequest))
1772             [loadingRequest finishLoadingWithError:nil];
1773     });
1774
1775     return YES;
1776 }
1777
1778 - (void)resourceLoader:(AVAssetResourceLoader *)resourceLoader didCancelLoadingRequest:(AVAssetResourceLoadingRequest *)loadingRequest
1779 {
1780     UNUSED_PARAM(resourceLoader);
1781     if (!m_callback)
1782         return;
1783
1784     dispatch_async(dispatch_get_main_queue(), ^{
1785         if (m_callback)
1786             m_callback->didCancelLoadingRequest(loadingRequest);
1787     });
1788 }
1789
1790 - (void)setCallback:(MediaPlayerPrivateAVFoundationObjC*)callback
1791 {
1792     m_callback = callback;
1793 }
1794 @end
1795 #endif
1796
1797 #endif